1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
19echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
20echo "		-d|--debug Debug mode (trace all shell commands)"
21echo "		-l|--logdir <dir> Save logs on the <dir>"
22echo "		            If <dir> is -, all logs output in console only"
23exit $1
24}
25
26# default error
27err_ret=1
28
29# kselftest skip code is 4
30err_skip=4
31
32# cgroup RT scheduling prevents chrt commands from succeeding, which
33# induces failures in test wakeup tests.  Disable for the duration of
34# the tests.
35
36readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
37
38sched_rt_runtime_orig=$(cat $sched_rt_runtime)
39
40setup() {
41  echo -1 > $sched_rt_runtime
42}
43
44cleanup() {
45  echo $sched_rt_runtime_orig > $sched_rt_runtime
46}
47
48errexit() { # message
49  echo "Error: $1" 1>&2
50  cleanup
51  exit $err_ret
52}
53
54# Ensuring user privilege
55if [ `id -u` -ne 0 ]; then
56  errexit "this must be run by root user"
57fi
58
59setup
60
61# Utilities
62absdir() { # file_path
63  (cd `dirname $1`; pwd)
64}
65
66abspath() {
67  echo `absdir $1`/`basename $1`
68}
69
70find_testcases() { #directory
71  echo `find $1 -name \*.tc | sort`
72}
73
74parse_opts() { # opts
75  local OPT_TEST_CASES=
76  local OPT_TEST_DIR=
77
78  while [ ! -z "$1" ]; do
79    case "$1" in
80    --help|-h)
81      usage 0
82    ;;
83    --keep|-k)
84      KEEP_LOG=1
85      shift 1
86    ;;
87    --verbose|-v|-vv|-vvv)
88      if [ $VERBOSE -eq -1 ]; then
89	usage "--console can not use with --verbose"
90      fi
91      VERBOSE=$((VERBOSE + 1))
92      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
93      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
94      shift 1
95    ;;
96    --console)
97      if [ $VERBOSE -ne 0 ]; then
98	usage "--console can not use with --verbose"
99      fi
100      VERBOSE=-1
101      shift 1
102    ;;
103    --debug|-d)
104      DEBUG=1
105      shift 1
106    ;;
107    --stop-fail)
108      STOP_FAILURE=1
109      shift 1
110    ;;
111    --fail-unsupported)
112      UNSUPPORTED_RESULT=1
113      shift 1
114    ;;
115    --logdir|-l)
116      LOG_DIR=$2
117      shift 2
118    ;;
119    *.tc)
120      if [ -f "$1" ]; then
121        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
122        shift 1
123      else
124        usage 1 "$1 is not a testcase"
125      fi
126      ;;
127    *)
128      if [ -d "$1" ]; then
129        OPT_TEST_DIR=`abspath $1`
130        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
131        shift 1
132      else
133        usage 1 "Invalid option ($1)"
134      fi
135    ;;
136    esac
137  done
138  if [ ! -z "$OPT_TEST_CASES" ]; then
139    TEST_CASES=$OPT_TEST_CASES
140  fi
141}
142
143# Parameters
144TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
145if [ -z "$TRACING_DIR" ]; then
146    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
147    if [ -z "$DEBUGFS_DIR" ]; then
148	# If tracefs exists, then so does /sys/kernel/tracing
149	if [ -d "/sys/kernel/tracing" ]; then
150	    mount -t tracefs nodev /sys/kernel/tracing ||
151	      errexit "Failed to mount /sys/kernel/tracing"
152	    TRACING_DIR="/sys/kernel/tracing"
153	# If debugfs exists, then so does /sys/kernel/debug
154	elif [ -d "/sys/kernel/debug" ]; then
155	    mount -t debugfs nodev /sys/kernel/debug ||
156	      errexit "Failed to mount /sys/kernel/debug"
157	    TRACING_DIR="/sys/kernel/debug/tracing"
158	else
159	    err_ret=$err_skip
160	    errexit "debugfs and tracefs are not configured in this kernel"
161	fi
162    else
163	TRACING_DIR="$DEBUGFS_DIR/tracing"
164    fi
165fi
166if [ ! -d "$TRACING_DIR" ]; then
167    err_ret=$err_skip
168    errexit "ftrace is not configured in this kernel"
169fi
170
171TOP_DIR=`absdir $0`
172TEST_DIR=$TOP_DIR/test.d
173TEST_CASES=`find_testcases $TEST_DIR`
174LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
175KEEP_LOG=0
176DEBUG=0
177VERBOSE=0
178UNSUPPORTED_RESULT=0
179STOP_FAILURE=0
180# Parse command-line options
181parse_opts $*
182
183[ $DEBUG -ne 0 ] && set -x
184
185# Verify parameters
186if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
187  errexit "No ftrace directory found"
188fi
189
190# Preparing logs
191if [ "x$LOG_DIR" = "x-" ]; then
192  LOG_FILE=
193  date
194else
195  LOG_FILE=$LOG_DIR/ftracetest.log
196  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
197  date > $LOG_FILE
198fi
199
200# Define text colors
201# Check available colors on the terminal, if any
202ncolors=`tput colors 2>/dev/null || echo 0`
203color_reset=
204color_red=
205color_green=
206color_blue=
207# If stdout exists and number of colors is eight or more, use them
208if [ -t 1 -a "$ncolors" -ge 8 ]; then
209  color_reset="\033[0m"
210  color_red="\033[31m"
211  color_green="\033[32m"
212  color_blue="\033[34m"
213fi
214
215strip_esc() {
216  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
217  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
218}
219
220prlog() { # messages
221  newline="\n"
222  if [ "$1" = "-n" ] ; then
223    newline=
224    shift
225  fi
226  printf "$*$newline"
227  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
228}
229catlog() { #file
230  cat $1
231  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
232}
233prlog "=== Ftrace unit tests ==="
234
235
236# Testcase management
237# Test result codes - Dejagnu extended code
238PASS=0	# The test succeeded.
239FAIL=1	# The test failed, but was expected to succeed.
240UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
241UNTESTED=3    # The test was not run, currently just a placeholder.
242UNSUPPORTED=4 # The test failed because of lack of feature.
243XFAIL=5	# The test failed, and was expected to fail.
244
245# Accumulations
246PASSED_CASES=
247FAILED_CASES=
248UNRESOLVED_CASES=
249UNTESTED_CASES=
250UNSUPPORTED_CASES=
251XFAILED_CASES=
252UNDEFINED_CASES=
253TOTAL_RESULT=0
254
255INSTANCE=
256CASENO=0
257
258testcase() { # testfile
259  CASENO=$((CASENO+1))
260  desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
261  prlog -n "[$CASENO]$INSTANCE$desc"
262}
263
264test_on_instance() { # testfile
265  grep -q "^#[ \t]*flags:.*instance" $1
266}
267
268eval_result() { # sigval
269  case $1 in
270    $PASS)
271      prlog "	[${color_green}PASS${color_reset}]"
272      PASSED_CASES="$PASSED_CASES $CASENO"
273      return 0
274    ;;
275    $FAIL)
276      prlog "	[${color_red}FAIL${color_reset}]"
277      FAILED_CASES="$FAILED_CASES $CASENO"
278      return 1 # this is a bug.
279    ;;
280    $UNRESOLVED)
281      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
282      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
283      return 1 # this is a kind of bug.. something happened.
284    ;;
285    $UNTESTED)
286      prlog "	[${color_blue}UNTESTED${color_reset}]"
287      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
288      return 0
289    ;;
290    $UNSUPPORTED)
291      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
292      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
293      return $UNSUPPORTED_RESULT # depends on use case
294    ;;
295    $XFAIL)
296      prlog "	[${color_red}XFAIL${color_reset}]"
297      XFAILED_CASES="$XFAILED_CASES $CASENO"
298      return 0
299    ;;
300    *)
301      prlog "	[${color_blue}UNDEFINED${color_reset}]"
302      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
303      return 1 # this must be a test bug
304    ;;
305  esac
306}
307
308# Signal handling for result codes
309SIG_RESULT=
310SIG_BASE=36	# Use realtime signals
311SIG_PID=$$
312
313exit_pass () {
314  exit 0
315}
316
317SIG_FAIL=$((SIG_BASE + FAIL))
318exit_fail () {
319  exit 1
320}
321trap 'SIG_RESULT=$FAIL' $SIG_FAIL
322
323SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
324exit_unresolved () {
325  kill -s $SIG_UNRESOLVED $SIG_PID
326  exit 0
327}
328trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
329
330SIG_UNTESTED=$((SIG_BASE + UNTESTED))
331exit_untested () {
332  kill -s $SIG_UNTESTED $SIG_PID
333  exit 0
334}
335trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
336
337SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
338exit_unsupported () {
339  kill -s $SIG_UNSUPPORTED $SIG_PID
340  exit 0
341}
342trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
343
344SIG_XFAIL=$((SIG_BASE + XFAIL))
345exit_xfail () {
346  kill -s $SIG_XFAIL $SIG_PID
347  exit 0
348}
349trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
350
351__run_test() { # testfile
352  # setup PID and PPID, $$ is not updated.
353  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
354  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
355}
356
357# Run one test case
358run_test() { # testfile
359  local testname=`basename $1`
360  testcase $1
361  if [ ! -z "$LOG_FILE" ] ; then
362    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
363  else
364    local testlog=/proc/self/fd/1
365  fi
366  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
367  export FTRACETEST_ROOT=$TOP_DIR
368  echo "execute$INSTANCE: "$1 > $testlog
369  SIG_RESULT=0
370  if [ $VERBOSE -eq -1 ]; then
371    __run_test $1
372  elif [ -z "$LOG_FILE" ]; then
373    __run_test $1 2>&1
374  elif [ $VERBOSE -ge 3 ]; then
375    __run_test $1 | tee -a $testlog 2>&1
376  elif [ $VERBOSE -eq 2 ]; then
377    __run_test $1 2>> $testlog | tee -a $testlog
378  else
379    __run_test $1 >> $testlog 2>&1
380  fi
381  eval_result $SIG_RESULT
382  if [ $? -eq 0 ]; then
383    # Remove test log if the test was done as it was expected.
384    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
385  else
386    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
387    TOTAL_RESULT=1
388  fi
389  rm -rf $TMPDIR
390}
391
392# load in the helper functions
393. $TEST_DIR/functions
394
395# Main loop
396for t in $TEST_CASES; do
397  run_test $t
398  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
399    echo "A failure detected. Stop test."
400    exit 1
401  fi
402done
403
404# Test on instance loop
405INSTANCE=" (instance) "
406for t in $TEST_CASES; do
407  test_on_instance $t || continue
408  SAVED_TRACING_DIR=$TRACING_DIR
409  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
410  run_test $t
411  rmdir $TRACING_DIR
412  TRACING_DIR=$SAVED_TRACING_DIR
413  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
414    echo "A failure detected. Stop test."
415    exit 1
416  fi
417done
418(cd $TRACING_DIR; initialize_ftrace) # for cleanup
419
420prlog ""
421prlog "# of passed: " `echo $PASSED_CASES | wc -w`
422prlog "# of failed: " `echo $FAILED_CASES | wc -w`
423prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
424prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
425prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
426prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
427prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
428
429cleanup
430
431# if no error, return 0
432exit $TOTAL_RESULT
433