-
Notifications
You must be signed in to change notification settings - Fork 44
/
multiple-pause-resume.sh
executable file
·206 lines (177 loc) · 7.57 KB
/
multiple-pause-resume.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
#!/bin/bash
set -e
##
## Case Name: Run multiple pipeline for pause resume
## Preconditions:
## N/A
## Description:
## pickup multiple pipline to do pause resume
## fake pause/resume with expect
## expect sleep for sleep time then mocks spacebar keypresses ' ' to
## cause resume action
## Case step:
## 1. run 1st pipeline
## 2. pickup any other pipeline
## 3. use expect to fake pause/resume in each pipeline
## 4. go through with tplg file
## Expect result:
## no errors occur for either process
##
TOPDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd)
# shellcheck source=case-lib/lib.sh
source "$TOPDIR"/case-lib/lib.sh
OPT_NAME['t']='tplg' OPT_DESC['t']="tplg file, default value is env TPLG: $TPLG"
OPT_HAS_ARG['t']=1 OPT_VAL['t']="$TPLG"
OPT_NAME['l']='loop' OPT_DESC['l']='loop count'
OPT_HAS_ARG['l']=1 OPT_VAL['l']=1
OPT_NAME['c']='count' OPT_DESC['c']='combine test pipeline count'
OPT_HAS_ARG['c']=1 OPT_VAL['c']=2
OPT_NAME['r']='repeat' OPT_DESC['r']='pause resume repeat count'
OPT_HAS_ARG['r']=1 OPT_VAL['r']=5
# pause/resume interval will be a random value bounded by the min and max values below
OPT_NAME['i']='min' OPT_DESC['i']='pause/resume transition min value, unit is ms'
OPT_HAS_ARG['i']=1 OPT_VAL['i']='20'
OPT_NAME['a']='max' OPT_DESC['a']='pause/resume transition max value, unit is ms'
OPT_HAS_ARG['a']=1 OPT_VAL['a']='50'
OPT_NAME['s']='sof-logger' OPT_DESC['s']="Open sof-logger trace the data will store at $LOG_ROOT"
OPT_HAS_ARG['s']=0 OPT_VAL['s']=1
func_opt_parse_option "$@"
repeat_count=${OPT_VAL['r']}
loop_count=${OPT_VAL['l']}
# configure random value range
rnd_min=${OPT_VAL['i']}
rnd_max=${OPT_VAL['a']}
rnd_range=$((rnd_max - rnd_min))
start_test
[[ $rnd_range -le 0 ]] && dlogw "Error random range scope [ min:$rnd_min - max:$rnd_max ]" && exit 2
tplg=${OPT_VAL['t']}
func_pipeline_export "$tplg" "type:any & ~pcm:Amplifier Reference"
logger_disabled || func_lib_start_log_collect
# We know it's failing and it's not going to get fixed: stop polluting
# test results. Note https://github.com/thesofproject/sof/issues/9135
# is NOT an problem with pause/resume; it is a problem with multiple
# pipelines. This code is duplicated in multiple-pipeline.sh
case "$MODEL" in
*NOCODEC*)
is_firmware_file_zephyr ||
skip_test 'Known pipeline_comp_reset() bug sof#9135';;
esac
declare -a pipeline_idx_lst
declare -a cmd_idx_lst
declare -a file_idx_lst
# merge all pipeline to the 1 group
for i in $(seq 0 $((PIPELINE_COUNT - 1)))
do
pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i")
type=$(func_pipeline_parse_value "$i" type)
if [ "$type" == "playback" ];then
cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay")
file_idx_lst=("${file_idx_lst[@]}" "/dev/zero")
elif [ "$type" == "capture" ];then
cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord")
file_idx_lst=("${file_idx_lst[@]}" "/dev/null")
elif [ "$type" == "both" ];then
cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay")
file_idx_lst=("${file_idx_lst[@]}" "/dev/zero")
# both include playback & capture, so duplicate it
pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i")
cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord")
file_idx_lst=("${file_idx_lst[@]}" "/dev/null")
else
die "Unknow pipeline type: $type"
fi
done
# get the min value of TPLG:'pipeline count' with Case:'pipeline count'
[[ ${#pipeline_idx_lst[*]} -gt ${OPT_VAL['c']} ]] && max_count=${OPT_VAL['c']} || max_count=${#pipeline_idx_lst[*]}
[[ $max_count -eq 1 ]] && dlogw "pipeline count is 1, don't need to run this case" && exit 2
# create combination list
declare -a pipeline_combine_lst
for i in $(sof-combinatoric.py -n ${#pipeline_idx_lst[*]} -p "$max_count")
do
# convert combine string to combine element by replacing commas with spaces for the for loop below
pipeline_combine_str="${i//,/ }"
pipeline_combine_lst=("${pipeline_combine_lst[@]}" "$pipeline_combine_str")
done
# This can get pretty big and make the test last a very long time and timeout,
# especially in NOCODEC configurations. See #706 discussion. So, log that value.
declare -p pipeline_combine_lst
[[ ${#pipeline_combine_lst[@]} -eq 0 ]] && dlogw "pipeline combine is empty" && exit 2
func_pause_resume_pipeline()
{
local idx=${pipeline_idx_lst[$1]} cmd=${cmd_idx_lst[$1]} file=${file_idx_lst[$1]}
local channel; channel=$(func_pipeline_parse_value "$idx" channel)
local rate; rate=$(func_pipeline_parse_value "$idx" rate)
local fmt; fmt=$(func_pipeline_parse_value "$idx" fmt)
local dev; dev=$(func_pipeline_parse_value "$idx" dev)
local pcm; pcm=$(func_pipeline_parse_value "$idx" pcm)
local type; type=$(func_pipeline_parse_value "$idx" type)
# expect is tcl language script
# expr rand(): produces random numbers between 0 and 1
# after ms: Ms must be an integer giving a time in milliseconds.
# The command sleeps for ms milliseconds and then returns.
local shortname="cmd$idx $cmd $pcm"
dlogi "Starting: apause.exp $cmd -D $dev -r $rate -c $channel -f $fmt -vv -i $file &"
"$TOPDIR"/case-lib/apause.exp "$shortname" "$repeat_count" "$rnd_min" "$rnd_range" \
"$cmd" -D "$dev" -r "$rate" -c "$channel" -f "$fmt" -vv -i "$file" &
}
# to prevent infinite loop, 5 second per a repeat is plenty
max_wait_time=$((5 * repeat_count))
for i in $(seq 1 "$loop_count")
do
dlogi "===== Loop count( $i / $loop_count ) ====="
# set up checkpoint for each iteration
setup_kernel_check_point
for pipeline_combine_str in "${pipeline_combine_lst[@]}"
do
unset pid_lst
declare -a pid_lst
for idx in $pipeline_combine_str
do
func_pause_resume_pipeline "$idx"
pid_lst=("${pid_lst[@]}" $!)
# Stagger a bit to avoid preambles interleaved with each other.
# It's very far from perfect but it helps a little bit.
sleep 0.1
done
# wait for aplay/arecord finished
dlogi "wait for expect/aplay/arecord process finished"
iwait=$max_wait_time
while [ $iwait -gt 0 ]
do
iwait=$((iwait - 1))
sleep 1s
[[ ! "$(pidof expect aplay arecord)" ]] && break
done
# Catch timout after the wait loop
echo
if [ "$(pidof expect aplay arecord)" ]; then
dloge "Still have expect process not finished after wait for $max_wait_time"
# list expect/aplay/arecord processes
pgrep -a -f expect || true
pgrep -a -f aplay || true
pgrep -a -f arecord || true
# kill aplay/arecord
sof-process-kill.sh ||
dlogw "Kill process catch error"
exit 1
fi
# now check for all expect quit status
# dump the pipeline combine, because pause resume will have too many operation log
for idx in $pipeline_combine_str
do
pipeline_index=${pipeline_idx_lst[$idx]}
pcm=$(func_pipeline_parse_value "$pipeline_index" pcm)
dlogi "pipeline: $pcm with ${cmd_idx_lst[$idx]}"
done
dlogi "Check expect exit status"
for pid in "${pid_lst[@]}"
do
wait "$pid" || {
sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || true
die "pause resume PID $pid had non-zero exit status"
}
done
done
# check kernel log for each iteration to catch issues
sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || die "Caught error in kernel log"
done