1#!/bin/sh
2
3# ftracetest - Ftrace test shell scripts
4#
5# Copyright (C) Hitachi Ltd., 2014
6#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
7#
8# Released under the terms of the GPL v2.
9
10usage() { # errno [message]
11[ "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-d|--debug Debug mode (trace all shell commands)"
19echo "		-l|--logdir <dir> Save logs on the <dir>"
20exit $1
21}
22
23errexit() { # message
24  echo "Error: $1" 1>&2
25  exit 1
26}
27
28# Ensuring user privilege
29if [ `id -u` -ne 0 ]; then
30  errexit "this must be run by root user"
31fi
32
33# Utilities
34absdir() { # file_path
35  (cd `dirname $1`; pwd)
36}
37
38abspath() {
39  echo `absdir $1`/`basename $1`
40}
41
42find_testcases() { #directory
43  echo `find $1 -name \*.tc | sort`
44}
45
46parse_opts() { # opts
47  local OPT_TEST_CASES=
48  local OPT_TEST_DIR=
49
50  while [ "$1" ]; do
51    case "$1" in
52    --help|-h)
53      usage 0
54    ;;
55    --keep|-k)
56      KEEP_LOG=1
57      shift 1
58    ;;
59    --verbose|-v|-vv)
60      VERBOSE=$((VERBOSE + 1))
61      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
62      shift 1
63    ;;
64    --debug|-d)
65      DEBUG=1
66      shift 1
67    ;;
68    --logdir|-l)
69      LOG_DIR=$2
70      shift 2
71    ;;
72    *.tc)
73      if [ -f "$1" ]; then
74        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
75        shift 1
76      else
77        usage 1 "$1 is not a testcase"
78      fi
79      ;;
80    *)
81      if [ -d "$1" ]; then
82        OPT_TEST_DIR=`abspath $1`
83        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
84        shift 1
85      else
86        usage 1 "Invalid option ($1)"
87      fi
88    ;;
89    esac
90  done
91  if [ "$OPT_TEST_CASES" ]; then
92    TEST_CASES=$OPT_TEST_CASES
93  fi
94}
95
96# Parameters
97DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
98if [ -z "$DEBUGFS_DIR" ]; then
99    TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
100else
101    TRACING_DIR=$DEBUGFS_DIR/tracing
102fi
103
104TOP_DIR=`absdir $0`
105TEST_DIR=$TOP_DIR/test.d
106TEST_CASES=`find_testcases $TEST_DIR`
107LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
108KEEP_LOG=0
109DEBUG=0
110VERBOSE=0
111# Parse command-line options
112parse_opts $*
113
114[ $DEBUG -ne 0 ] && set -x
115
116# Verify parameters
117if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
118  errexit "No ftrace directory found"
119fi
120
121# Preparing logs
122LOG_FILE=$LOG_DIR/ftracetest.log
123mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
124date > $LOG_FILE
125prlog() { # messages
126  echo "$@" | tee -a $LOG_FILE
127}
128catlog() { #file
129  cat $1 | tee -a $LOG_FILE
130}
131prlog "=== Ftrace unit tests ==="
132
133
134# Testcase management
135# Test result codes - Dejagnu extended code
136PASS=0	# The test succeeded.
137FAIL=1	# The test failed, but was expected to succeed.
138UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
139UNTESTED=3    # The test was not run, currently just a placeholder.
140UNSUPPORTED=4 # The test failed because of lack of feature.
141XFAIL=5	# The test failed, and was expected to fail.
142
143# Accumulations
144PASSED_CASES=
145FAILED_CASES=
146UNRESOLVED_CASES=
147UNTESTED_CASES=
148UNSUPPORTED_CASES=
149XFAILED_CASES=
150UNDEFINED_CASES=
151TOTAL_RESULT=0
152
153INSTANCE=
154CASENO=0
155testcase() { # testfile
156  CASENO=$((CASENO+1))
157  desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
158  prlog -n "[$CASENO]$INSTANCE$desc"
159}
160
161test_on_instance() { # testfile
162  grep -q "^#[ \t]*flags:.*instance" $1
163}
164
165eval_result() { # sigval
166  case $1 in
167    $PASS)
168      prlog "	[PASS]"
169      PASSED_CASES="$PASSED_CASES $CASENO"
170      return 0
171    ;;
172    $FAIL)
173      prlog "	[FAIL]"
174      FAILED_CASES="$FAILED_CASES $CASENO"
175      return 1 # this is a bug.
176    ;;
177    $UNRESOLVED)
178      prlog "	[UNRESOLVED]"
179      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
180      return 1 # this is a kind of bug.. something happened.
181    ;;
182    $UNTESTED)
183      prlog "	[UNTESTED]"
184      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
185      return 0
186    ;;
187    $UNSUPPORTED)
188      prlog "	[UNSUPPORTED]"
189      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
190      return 1 # this is not a bug, but the result should be reported.
191    ;;
192    $XFAIL)
193      prlog "	[XFAIL]"
194      XFAILED_CASES="$XFAILED_CASES $CASENO"
195      return 0
196    ;;
197    *)
198      prlog "	[UNDEFINED]"
199      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
200      return 1 # this must be a test bug
201    ;;
202  esac
203}
204
205# Signal handling for result codes
206SIG_RESULT=
207SIG_BASE=36	# Use realtime signals
208SIG_PID=$$
209
210SIG_FAIL=$((SIG_BASE + FAIL))
211trap 'SIG_RESULT=$FAIL' $SIG_FAIL
212
213SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
214exit_unresolved () {
215  kill -s $SIG_UNRESOLVED $SIG_PID
216  exit 0
217}
218trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
219
220SIG_UNTESTED=$((SIG_BASE + UNTESTED))
221exit_untested () {
222  kill -s $SIG_UNTESTED $SIG_PID
223  exit 0
224}
225trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
226
227SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
228exit_unsupported () {
229  kill -s $SIG_UNSUPPORTED $SIG_PID
230  exit 0
231}
232trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
233
234SIG_XFAIL=$((SIG_BASE + XFAIL))
235exit_xfail () {
236  kill -s $SIG_XFAIL $SIG_PID
237  exit 0
238}
239trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
240
241__run_test() { # testfile
242  # setup PID and PPID, $$ is not updated.
243  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
244  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
245}
246
247# Run one test case
248run_test() { # testfile
249  local testname=`basename $1`
250  local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX`
251  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
252  testcase $1
253  echo "execute$INSTANCE: "$1 > $testlog
254  SIG_RESULT=0
255  if [ $VERBOSE -ge 2 ]; then
256    __run_test $1 2>> $testlog | tee -a $testlog
257  else
258    __run_test $1 >> $testlog 2>&1
259  fi
260  eval_result $SIG_RESULT
261  if [ $? -eq 0 ]; then
262    # Remove test log if the test was done as it was expected.
263    [ $KEEP_LOG -eq 0 ] && rm $testlog
264  else
265    [ $VERBOSE -ge 1 ] && catlog $testlog
266    TOTAL_RESULT=1
267  fi
268  rm -rf $TMPDIR
269}
270
271# load in the helper functions
272. $TEST_DIR/functions
273
274# Main loop
275for t in $TEST_CASES; do
276  run_test $t
277done
278
279# Test on instance loop
280INSTANCE=" (instance) "
281for t in $TEST_CASES; do
282  test_on_instance $t || continue
283  SAVED_TRACING_DIR=$TRACING_DIR
284  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
285  run_test $t
286  rmdir $TRACING_DIR
287  TRACING_DIR=$SAVED_TRACING_DIR
288done
289
290prlog ""
291prlog "# of passed: " `echo $PASSED_CASES | wc -w`
292prlog "# of failed: " `echo $FAILED_CASES | wc -w`
293prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
294prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
295prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
296prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
297prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
298
299# if no error, return 0
300exit $TOTAL_RESULT
301