1#!/bin/sh
2
3# ftracetest - Ftrace test shell scripts
4#
5# Copyright (C) Hitachi Ltd., 2014
6#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
7#
8# Released under the terms of the GPL v2.
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
19echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
20echo "		-d|--debug Debug mode (trace all shell commands)"
21echo "		-l|--logdir <dir> Save logs on the <dir>"
22echo "		            If <dir> is -, all logs output in console only"
23exit $1
24}
25
26errexit() { # message
27  echo "Error: $1" 1>&2
28  exit 1
29}
30
31# Ensuring user privilege
32if [ `id -u` -ne 0 ]; then
33  errexit "this must be run by root user"
34fi
35
36# Utilities
37absdir() { # file_path
38  (cd `dirname $1`; pwd)
39}
40
41abspath() {
42  echo `absdir $1`/`basename $1`
43}
44
45find_testcases() { #directory
46  echo `find $1 -name \*.tc | sort`
47}
48
49parse_opts() { # opts
50  local OPT_TEST_CASES=
51  local OPT_TEST_DIR=
52
53  while [ ! -z "$1" ]; do
54    case "$1" in
55    --help|-h)
56      usage 0
57    ;;
58    --keep|-k)
59      KEEP_LOG=1
60      shift 1
61    ;;
62    --verbose|-v|-vv|-vvv)
63      if [ $VERBOSE -eq -1 ]; then
64	usage "--console can not use with --verbose"
65      fi
66      VERBOSE=$((VERBOSE + 1))
67      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
68      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
69      shift 1
70    ;;
71    --console)
72      if [ $VERBOSE -ne 0 ]; then
73	usage "--console can not use with --verbose"
74      fi
75      VERBOSE=-1
76      shift 1
77    ;;
78    --debug|-d)
79      DEBUG=1
80      shift 1
81    ;;
82    --stop-fail)
83      STOP_FAILURE=1
84      shift 1
85    ;;
86    --fail-unsupported)
87      UNSUPPORTED_RESULT=1
88      shift 1
89    ;;
90    --logdir|-l)
91      LOG_DIR=$2
92      shift 2
93    ;;
94    *.tc)
95      if [ -f "$1" ]; then
96        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
97        shift 1
98      else
99        usage 1 "$1 is not a testcase"
100      fi
101      ;;
102    *)
103      if [ -d "$1" ]; then
104        OPT_TEST_DIR=`abspath $1`
105        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
106        shift 1
107      else
108        usage 1 "Invalid option ($1)"
109      fi
110    ;;
111    esac
112  done
113  if [ ! -z "$OPT_TEST_CASES" ]; then
114    TEST_CASES=$OPT_TEST_CASES
115  fi
116}
117
118# Parameters
119DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
120if [ -z "$DEBUGFS_DIR" ]; then
121    TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
122else
123    TRACING_DIR=$DEBUGFS_DIR/tracing
124fi
125
126TOP_DIR=`absdir $0`
127TEST_DIR=$TOP_DIR/test.d
128TEST_CASES=`find_testcases $TEST_DIR`
129LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
130KEEP_LOG=0
131DEBUG=0
132VERBOSE=0
133UNSUPPORTED_RESULT=0
134STOP_FAILURE=0
135# Parse command-line options
136parse_opts $*
137
138[ $DEBUG -ne 0 ] && set -x
139
140# Verify parameters
141if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
142  errexit "No ftrace directory found"
143fi
144
145# Preparing logs
146if [ "x$LOG_DIR" = "x-" ]; then
147  LOG_FILE=
148  date
149else
150  LOG_FILE=$LOG_DIR/ftracetest.log
151  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
152  date > $LOG_FILE
153fi
154
155prlog() { # messages
156  [ -z "$LOG_FILE" ] && echo "$@" || echo "$@" | tee -a $LOG_FILE
157}
158catlog() { #file
159  [ -z "$LOG_FILE" ] && cat $1 || cat $1 | tee -a $LOG_FILE
160}
161prlog "=== Ftrace unit tests ==="
162
163
164# Testcase management
165# Test result codes - Dejagnu extended code
166PASS=0	# The test succeeded.
167FAIL=1	# The test failed, but was expected to succeed.
168UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
169UNTESTED=3    # The test was not run, currently just a placeholder.
170UNSUPPORTED=4 # The test failed because of lack of feature.
171XFAIL=5	# The test failed, and was expected to fail.
172
173# Accumulations
174PASSED_CASES=
175FAILED_CASES=
176UNRESOLVED_CASES=
177UNTESTED_CASES=
178UNSUPPORTED_CASES=
179XFAILED_CASES=
180UNDEFINED_CASES=
181TOTAL_RESULT=0
182
183INSTANCE=
184CASENO=0
185testcase() { # testfile
186  CASENO=$((CASENO+1))
187  desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
188  prlog -n "[$CASENO]$INSTANCE$desc"
189}
190
191test_on_instance() { # testfile
192  grep -q "^#[ \t]*flags:.*instance" $1
193}
194
195eval_result() { # sigval
196  case $1 in
197    $PASS)
198      prlog "	[PASS]"
199      PASSED_CASES="$PASSED_CASES $CASENO"
200      return 0
201    ;;
202    $FAIL)
203      prlog "	[FAIL]"
204      FAILED_CASES="$FAILED_CASES $CASENO"
205      return 1 # this is a bug.
206    ;;
207    $UNRESOLVED)
208      prlog "	[UNRESOLVED]"
209      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
210      return 1 # this is a kind of bug.. something happened.
211    ;;
212    $UNTESTED)
213      prlog "	[UNTESTED]"
214      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
215      return 0
216    ;;
217    $UNSUPPORTED)
218      prlog "	[UNSUPPORTED]"
219      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
220      return $UNSUPPORTED_RESULT # depends on use case
221    ;;
222    $XFAIL)
223      prlog "	[XFAIL]"
224      XFAILED_CASES="$XFAILED_CASES $CASENO"
225      return 0
226    ;;
227    *)
228      prlog "	[UNDEFINED]"
229      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
230      return 1 # this must be a test bug
231    ;;
232  esac
233}
234
235# Signal handling for result codes
236SIG_RESULT=
237SIG_BASE=36	# Use realtime signals
238SIG_PID=$$
239
240exit_pass () {
241  exit 0
242}
243
244SIG_FAIL=$((SIG_BASE + FAIL))
245exit_fail () {
246  exit 1
247}
248trap 'SIG_RESULT=$FAIL' $SIG_FAIL
249
250SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
251exit_unresolved () {
252  kill -s $SIG_UNRESOLVED $SIG_PID
253  exit 0
254}
255trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
256
257SIG_UNTESTED=$((SIG_BASE + UNTESTED))
258exit_untested () {
259  kill -s $SIG_UNTESTED $SIG_PID
260  exit 0
261}
262trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
263
264SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
265exit_unsupported () {
266  kill -s $SIG_UNSUPPORTED $SIG_PID
267  exit 0
268}
269trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
270
271SIG_XFAIL=$((SIG_BASE + XFAIL))
272exit_xfail () {
273  kill -s $SIG_XFAIL $SIG_PID
274  exit 0
275}
276trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
277
278__run_test() { # testfile
279  # setup PID and PPID, $$ is not updated.
280  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
281  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
282}
283
284# Run one test case
285run_test() { # testfile
286  local testname=`basename $1`
287  testcase $1
288  if [ ! -z "$LOG_FILE" ] ; then
289    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
290  else
291    local testlog=/proc/self/fd/1
292  fi
293  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
294  echo "execute$INSTANCE: "$1 > $testlog
295  SIG_RESULT=0
296  if [ $VERBOSE -eq -1 ]; then
297    __run_test $1
298  elif [ -z "$LOG_FILE" ]; then
299    __run_test $1 2>&1
300  elif [ $VERBOSE -ge 3 ]; then
301    __run_test $1 | tee -a $testlog 2>&1
302  elif [ $VERBOSE -eq 2 ]; then
303    __run_test $1 2>> $testlog | tee -a $testlog
304  else
305    __run_test $1 >> $testlog 2>&1
306  fi
307  eval_result $SIG_RESULT
308  if [ $? -eq 0 ]; then
309    # Remove test log if the test was done as it was expected.
310    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
311  else
312    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
313    TOTAL_RESULT=1
314  fi
315  rm -rf $TMPDIR
316}
317
318# load in the helper functions
319. $TEST_DIR/functions
320
321# Main loop
322for t in $TEST_CASES; do
323  run_test $t
324  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
325    echo "A failure detected. Stop test."
326    exit 1
327  fi
328done
329
330# Test on instance loop
331INSTANCE=" (instance) "
332for t in $TEST_CASES; do
333  test_on_instance $t || continue
334  SAVED_TRACING_DIR=$TRACING_DIR
335  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
336  run_test $t
337  rmdir $TRACING_DIR
338  TRACING_DIR=$SAVED_TRACING_DIR
339  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
340    echo "A failure detected. Stop test."
341    exit 1
342  fi
343done
344(cd $TRACING_DIR; initialize_ftrace) # for cleanup
345
346prlog ""
347prlog "# of passed: " `echo $PASSED_CASES | wc -w`
348prlog "# of failed: " `echo $FAILED_CASES | wc -w`
349prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
350prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
351prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
352prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
353prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
354
355# if no error, return 0
356exit $TOTAL_RESULT
357