1#!/bin/sh
2
3# ftracetest - Ftrace test shell scripts
4#
5# Copyright (C) Hitachi Ltd., 2014
6#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
7#
8# Released under the terms of the GPL v2.
9
10usage() { # errno [message]
11[ "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
19echo "		-d|--debug Debug mode (trace all shell commands)"
20echo "		-l|--logdir <dir> Save logs on the <dir>"
21exit $1
22}
23
24errexit() { # message
25  echo "Error: $1" 1>&2
26  exit 1
27}
28
29# Ensuring user privilege
30if [ `id -u` -ne 0 ]; then
31  errexit "this must be run by root user"
32fi
33
34# Utilities
35absdir() { # file_path
36  (cd `dirname $1`; pwd)
37}
38
39abspath() {
40  echo `absdir $1`/`basename $1`
41}
42
43find_testcases() { #directory
44  echo `find $1 -name \*.tc | sort`
45}
46
47parse_opts() { # opts
48  local OPT_TEST_CASES=
49  local OPT_TEST_DIR=
50
51  while [ "$1" ]; do
52    case "$1" in
53    --help|-h)
54      usage 0
55    ;;
56    --keep|-k)
57      KEEP_LOG=1
58      shift 1
59    ;;
60    --verbose|-v|-vv)
61      VERBOSE=$((VERBOSE + 1))
62      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
63      shift 1
64    ;;
65    --debug|-d)
66      DEBUG=1
67      shift 1
68    ;;
69    --fail-unsupported)
70      UNSUPPORTED_RESULT=1
71      shift 1
72    ;;
73    --logdir|-l)
74      LOG_DIR=$2
75      shift 2
76    ;;
77    *.tc)
78      if [ -f "$1" ]; then
79        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
80        shift 1
81      else
82        usage 1 "$1 is not a testcase"
83      fi
84      ;;
85    *)
86      if [ -d "$1" ]; then
87        OPT_TEST_DIR=`abspath $1`
88        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
89        shift 1
90      else
91        usage 1 "Invalid option ($1)"
92      fi
93    ;;
94    esac
95  done
96  if [ "$OPT_TEST_CASES" ]; then
97    TEST_CASES=$OPT_TEST_CASES
98  fi
99}
100
101# Parameters
102DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
103if [ -z "$DEBUGFS_DIR" ]; then
104    TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
105else
106    TRACING_DIR=$DEBUGFS_DIR/tracing
107fi
108
109TOP_DIR=`absdir $0`
110TEST_DIR=$TOP_DIR/test.d
111TEST_CASES=`find_testcases $TEST_DIR`
112LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
113KEEP_LOG=0
114DEBUG=0
115VERBOSE=0
116UNSUPPORTED_RESULT=0
117# Parse command-line options
118parse_opts $*
119
120[ $DEBUG -ne 0 ] && set -x
121
122# Verify parameters
123if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
124  errexit "No ftrace directory found"
125fi
126
127# Preparing logs
128LOG_FILE=$LOG_DIR/ftracetest.log
129mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
130date > $LOG_FILE
131prlog() { # messages
132  echo "$@" | tee -a $LOG_FILE
133}
134catlog() { #file
135  cat $1 | tee -a $LOG_FILE
136}
137prlog "=== Ftrace unit tests ==="
138
139
140# Testcase management
141# Test result codes - Dejagnu extended code
142PASS=0	# The test succeeded.
143FAIL=1	# The test failed, but was expected to succeed.
144UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
145UNTESTED=3    # The test was not run, currently just a placeholder.
146UNSUPPORTED=4 # The test failed because of lack of feature.
147XFAIL=5	# The test failed, and was expected to fail.
148
149# Accumulations
150PASSED_CASES=
151FAILED_CASES=
152UNRESOLVED_CASES=
153UNTESTED_CASES=
154UNSUPPORTED_CASES=
155XFAILED_CASES=
156UNDEFINED_CASES=
157TOTAL_RESULT=0
158
159INSTANCE=
160CASENO=0
161testcase() { # testfile
162  CASENO=$((CASENO+1))
163  desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
164  prlog -n "[$CASENO]$INSTANCE$desc"
165}
166
167test_on_instance() { # testfile
168  grep -q "^#[ \t]*flags:.*instance" $1
169}
170
171eval_result() { # sigval
172  case $1 in
173    $PASS)
174      prlog "	[PASS]"
175      PASSED_CASES="$PASSED_CASES $CASENO"
176      return 0
177    ;;
178    $FAIL)
179      prlog "	[FAIL]"
180      FAILED_CASES="$FAILED_CASES $CASENO"
181      return 1 # this is a bug.
182    ;;
183    $UNRESOLVED)
184      prlog "	[UNRESOLVED]"
185      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
186      return 1 # this is a kind of bug.. something happened.
187    ;;
188    $UNTESTED)
189      prlog "	[UNTESTED]"
190      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
191      return 0
192    ;;
193    $UNSUPPORTED)
194      prlog "	[UNSUPPORTED]"
195      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
196      return $UNSUPPORTED_RESULT # depends on use case
197    ;;
198    $XFAIL)
199      prlog "	[XFAIL]"
200      XFAILED_CASES="$XFAILED_CASES $CASENO"
201      return 0
202    ;;
203    *)
204      prlog "	[UNDEFINED]"
205      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
206      return 1 # this must be a test bug
207    ;;
208  esac
209}
210
211# Signal handling for result codes
212SIG_RESULT=
213SIG_BASE=36	# Use realtime signals
214SIG_PID=$$
215
216SIG_FAIL=$((SIG_BASE + FAIL))
217trap 'SIG_RESULT=$FAIL' $SIG_FAIL
218
219SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
220exit_unresolved () {
221  kill -s $SIG_UNRESOLVED $SIG_PID
222  exit 0
223}
224trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
225
226SIG_UNTESTED=$((SIG_BASE + UNTESTED))
227exit_untested () {
228  kill -s $SIG_UNTESTED $SIG_PID
229  exit 0
230}
231trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
232
233SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
234exit_unsupported () {
235  kill -s $SIG_UNSUPPORTED $SIG_PID
236  exit 0
237}
238trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
239
240SIG_XFAIL=$((SIG_BASE + XFAIL))
241exit_xfail () {
242  kill -s $SIG_XFAIL $SIG_PID
243  exit 0
244}
245trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
246
247__run_test() { # testfile
248  # setup PID and PPID, $$ is not updated.
249  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
250  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
251}
252
253# Run one test case
254run_test() { # testfile
255  local testname=`basename $1`
256  local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX`
257  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
258  testcase $1
259  echo "execute$INSTANCE: "$1 > $testlog
260  SIG_RESULT=0
261  if [ $VERBOSE -ge 2 ]; then
262    __run_test $1 2>> $testlog | tee -a $testlog
263  else
264    __run_test $1 >> $testlog 2>&1
265  fi
266  eval_result $SIG_RESULT
267  if [ $? -eq 0 ]; then
268    # Remove test log if the test was done as it was expected.
269    [ $KEEP_LOG -eq 0 ] && rm $testlog
270  else
271    [ $VERBOSE -ge 1 ] && catlog $testlog
272    TOTAL_RESULT=1
273  fi
274  rm -rf $TMPDIR
275}
276
277# load in the helper functions
278. $TEST_DIR/functions
279
280# Main loop
281for t in $TEST_CASES; do
282  run_test $t
283done
284
285# Test on instance loop
286INSTANCE=" (instance) "
287for t in $TEST_CASES; do
288  test_on_instance $t || continue
289  SAVED_TRACING_DIR=$TRACING_DIR
290  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
291  run_test $t
292  rmdir $TRACING_DIR
293  TRACING_DIR=$SAVED_TRACING_DIR
294done
295
296prlog ""
297prlog "# of passed: " `echo $PASSED_CASES | wc -w`
298prlog "# of failed: " `echo $FAILED_CASES | wc -w`
299prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
300prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
301prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
302prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
303prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
304
305# if no error, return 0
306exit $TOTAL_RESULT
307