blob: 92d15154ba795a4a88e4e6e4a1dab487f08b81cf (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
|
#!/bin/bash
# perf script task-analyzer tests
# SPDX-License-Identifier: GPL-2.0
tmpdir=$(mktemp -d /tmp/perf-script-task-analyzer-XXXXX)
err=0
# set PERF_EXEC_PATH to find scripts in the source directory
perfdir=$(dirname "$0")/../..
if [ -e "$perfdir/scripts/python/Perf-Trace-Util" ]; then
export PERF_EXEC_PATH=$perfdir
fi
cleanup() {
rm -f perf.data
rm -f perf.data.old
rm -f csv
rm -f csvsummary
rm -rf "$tmpdir"
trap - exit term int
}
trap_cleanup() {
cleanup
exit 1
}
trap trap_cleanup exit term int
report() {
if [ "$1" = 0 ]; then
echo "PASS: \"$2\""
else
echo "FAIL: \"$2\" Error message: \"$3\""
err=1
fi
}
check_exec_0() {
if [ $? != 0 ]; then
report 1 "invocation of $1 command failed"
fi
}
find_str_or_fail() {
grep -q "$1" "$2"
if [ "$?" != 0 ]; then
report 1 "$3" "Failed to find required string:'${1}'."
else
report 0 "$3"
fi
}
# check if perf is compiled with libtraceevent support
skip_no_probe_record_support() {
perf version --build-options | grep -q " OFF .* HAVE_LIBTRACEEVENT" && return 2
return 0
}
prepare_perf_data() {
# 1s should be sufficient to catch at least some switches
perf record -e sched:sched_switch -a -- sleep 1 > /dev/null 2>&1
# check if perf data file got created in above step.
if [ ! -e "perf.data" ]; then
printf "FAIL: perf record failed to create \"perf.data\" \n"
return 1
fi
}
# check standard inkvokation with no arguments
test_basic() {
out="$tmpdir/perf.out"
perf script report task-analyzer > "$out"
check_exec_0 "perf script report task-analyzer"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_ns_rename(){
out="$tmpdir/perf.out"
perf script report task-analyzer --ns --rename-comms-by-tids 0:random > "$out"
check_exec_0 "perf script report task-analyzer --ns --rename-comms-by-tids 0:random"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_ms_filtertasks_highlight(){
out="$tmpdir/perf.out"
perf script report task-analyzer --ms --filter-tasks perf --highlight-tasks perf \
> "$out"
check_exec_0 "perf script report task-analyzer --ms --filter-tasks perf --highlight-tasks perf"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_extended_times_timelimit_limittasks() {
out="$tmpdir/perf.out"
perf script report task-analyzer --extended-times --time-limit :99999 \
--limit-to-tasks perf > "$out"
check_exec_0 "perf script report task-analyzer --extended-times --time-limit :99999 --limit-to-tasks perf"
find_str_or_fail "Out-Out" "$out" "${FUNCNAME[0]}"
}
test_summary() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary > "$out"
check_exec_0 "perf script report task-analyzer --summary"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_summaryextended() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary-extended > "$out"
check_exec_0 "perf script report task-analyzer --summary-extended"
find_str_or_fail "Inter Task Times" "$out" "${FUNCNAME[0]}"
}
test_summaryonly() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary-only > "$out"
check_exec_0 "perf script report task-analyzer --summary-only"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_extended_times_summary_ns() {
out="$tmpdir/perf.out"
perf script report task-analyzer --extended-times --summary --ns > "$out"
check_exec_0 "perf script report task-analyzer --extended-times --summary --ns"
find_str_or_fail "Out-Out" "$out" "${FUNCNAME[0]}"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_csv() {
perf script report task-analyzer --csv csv > /dev/null
check_exec_0 "perf script report task-analyzer --csv csv"
find_str_or_fail "Comm;" csv "${FUNCNAME[0]}"
}
test_csv_extended_times() {
perf script report task-analyzer --csv csv --extended-times > /dev/null
check_exec_0 "perf script report task-analyzer --csv csv --extended-times"
find_str_or_fail "Out-Out;" csv "${FUNCNAME[0]}"
}
test_csvsummary() {
perf script report task-analyzer --csv-summary csvsummary > /dev/null
check_exec_0 "perf script report task-analyzer --csv-summary csvsummary"
find_str_or_fail "Comm;" csvsummary "${FUNCNAME[0]}"
}
test_csvsummary_extended() {
perf script report task-analyzer --csv-summary csvsummary --summary-extended \
>/dev/null
check_exec_0 "perf script report task-analyzer --csv-summary csvsummary --summary-extended"
find_str_or_fail "Out-Out;" csvsummary "${FUNCNAME[0]}"
}
skip_no_probe_record_support
err=$?
if [ $err -ne 0 ]; then
echo "WARN: Skipping tests. No libtraceevent support"
cleanup
exit $err
fi
prepare_perf_data
test_basic
test_ns_rename
test_ms_filtertasks_highlight
test_extended_times_timelimit_limittasks
test_summary
test_summaryextended
test_summaryonly
test_extended_times_summary_ns
test_csv
test_csvsummary
test_csv_extended_times
test_csvsummary_extended
cleanup
exit $err
|