1#!/bin/sh
2
3# ftracetest - Ftrace test shell scripts
4#
5# Copyright (C) Hitachi Ltd., 2014
6#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
7#
8# Released under the terms of the GPL v2.
9
10usage() { # errno [message]
11[ "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-d|--debug Debug mode (trace all shell commands)"
19exit $1
20}
21
22errexit() { # message
23  echo "Error: $1" 1>&2
24  exit 1
25}
26
27# Ensuring user privilege
28if [ `id -u` -ne 0 ]; then
29  errexit "this must be run by root user"
30fi
31
32# Utilities
33absdir() { # file_path
34  (cd `dirname $1`; pwd)
35}
36
37abspath() {
38  echo `absdir $1`/`basename $1`
39}
40
41find_testcases() { #directory
42  echo `find $1 -name \*.tc | sort`
43}
44
45parse_opts() { # opts
46  local OPT_TEST_CASES=
47  local OPT_TEST_DIR=
48
49  while [ "$1" ]; do
50    case "$1" in
51    --help|-h)
52      usage 0
53    ;;
54    --keep|-k)
55      KEEP_LOG=1
56      shift 1
57    ;;
58    --verbose|-v|-vv)
59      VERBOSE=$((VERBOSE + 1))
60      [ $1 == '-vv' ] && VERBOSE=$((VERBOSE + 1))
61      shift 1
62    ;;
63    --debug|-d)
64      DEBUG=1
65      shift 1
66    ;;
67    *.tc)
68      if [ -f "$1" ]; then
69        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
70        shift 1
71      else
72        usage 1 "$1 is not a testcase"
73      fi
74      ;;
75    *)
76      if [ -d "$1" ]; then
77        OPT_TEST_DIR=`abspath $1`
78        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
79        shift 1
80      else
81        usage 1 "Invalid option ($1)"
82      fi
83    ;;
84    esac
85  done
86  if [ "$OPT_TEST_CASES" ]; then
87    TEST_CASES=$OPT_TEST_CASES
88  fi
89}
90
91# Parameters
92DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
93if [ -z "$DEBUGFS_DIR" ]; then
94    TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
95else
96    TRACING_DIR=$DEBUGFS_DIR/tracing
97fi
98
99TOP_DIR=`absdir $0`
100TEST_DIR=$TOP_DIR/test.d
101TEST_CASES=`find_testcases $TEST_DIR`
102LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
103KEEP_LOG=0
104DEBUG=0
105VERBOSE=0
106# Parse command-line options
107parse_opts $*
108
109[ $DEBUG -ne 0 ] && set -x
110
111# Verify parameters
112if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
113  errexit "No ftrace directory found"
114fi
115
116# Preparing logs
117LOG_FILE=$LOG_DIR/ftracetest.log
118mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
119date > $LOG_FILE
120prlog() { # messages
121  echo "$@" | tee -a $LOG_FILE
122}
123catlog() { #file
124  cat $1 | tee -a $LOG_FILE
125}
126prlog "=== Ftrace unit tests ==="
127
128
129# Testcase management
130# Test result codes - Dejagnu extended code
131PASS=0	# The test succeeded.
132FAIL=1	# The test failed, but was expected to succeed.
133UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
134UNTESTED=3    # The test was not run, currently just a placeholder.
135UNSUPPORTED=4 # The test failed because of lack of feature.
136XFAIL=5	# The test failed, and was expected to fail.
137
138# Accumulations
139PASSED_CASES=
140FAILED_CASES=
141UNRESOLVED_CASES=
142UNTESTED_CASES=
143UNSUPPORTED_CASES=
144XFAILED_CASES=
145UNDEFINED_CASES=
146TOTAL_RESULT=0
147
148CASENO=0
149testcase() { # testfile
150  CASENO=$((CASENO+1))
151  desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
152  prlog -n "[$CASENO]$desc"
153}
154
155eval_result() { # sigval
156  case $1 in
157    $PASS)
158      prlog "	[PASS]"
159      PASSED_CASES="$PASSED_CASES $CASENO"
160      return 0
161    ;;
162    $FAIL)
163      prlog "	[FAIL]"
164      FAILED_CASES="$FAILED_CASES $CASENO"
165      return 1 # this is a bug.
166    ;;
167    $UNRESOLVED)
168      prlog "	[UNRESOLVED]"
169      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
170      return 1 # this is a kind of bug.. something happened.
171    ;;
172    $UNTESTED)
173      prlog "	[UNTESTED]"
174      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
175      return 0
176    ;;
177    $UNSUPPORTED)
178      prlog "	[UNSUPPORTED]"
179      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
180      return 1 # this is not a bug, but the result should be reported.
181    ;;
182    $XFAIL)
183      prlog "	[XFAIL]"
184      XFAILED_CASES="$XFAILED_CASES $CASENO"
185      return 0
186    ;;
187    *)
188      prlog "	[UNDEFINED]"
189      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
190      return 1 # this must be a test bug
191    ;;
192  esac
193}
194
195# Signal handling for result codes
196SIG_RESULT=
197SIG_BASE=36	# Use realtime signals
198SIG_PID=$$
199
200SIG_FAIL=$((SIG_BASE + FAIL))
201trap 'SIG_RESULT=$FAIL' $SIG_FAIL
202
203SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
204exit_unresolved () {
205  kill -s $SIG_UNRESOLVED $SIG_PID
206  exit 0
207}
208trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
209
210SIG_UNTESTED=$((SIG_BASE + UNTESTED))
211exit_untested () {
212  kill -s $SIG_UNTESTED $SIG_PID
213  exit 0
214}
215trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
216
217SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
218exit_unsupported () {
219  kill -s $SIG_UNSUPPORTED $SIG_PID
220  exit 0
221}
222trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
223
224SIG_XFAIL=$((SIG_BASE + XFAIL))
225exit_xfail () {
226  kill -s $SIG_XFAIL $SIG_PID
227  exit 0
228}
229trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
230
231__run_test() { # testfile
232  # setup PID and PPID, $$ is not updated.
233  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
234  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
235}
236
237# Run one test case
238run_test() { # testfile
239  local testname=`basename $1`
240  local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX`
241  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
242  testcase $1
243  echo "execute: "$1 > $testlog
244  SIG_RESULT=0
245  if [ $VERBOSE -ge 2 ]; then
246    __run_test $1 2>> $testlog | tee -a $testlog
247  else
248    __run_test $1 >> $testlog 2>&1
249  fi
250  eval_result $SIG_RESULT
251  if [ $? -eq 0 ]; then
252    # Remove test log if the test was done as it was expected.
253    [ $KEEP_LOG -eq 0 ] && rm $testlog
254  else
255    [ $VERBOSE -ge 1 ] && catlog $testlog
256    TOTAL_RESULT=1
257  fi
258  rm -rf $TMPDIR
259}
260
261# load in the helper functions
262. $TEST_DIR/functions
263
264# Main loop
265for t in $TEST_CASES; do
266  run_test $t
267done
268
269prlog ""
270prlog "# of passed: " `echo $PASSED_CASES | wc -w`
271prlog "# of failed: " `echo $FAILED_CASES | wc -w`
272prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
273prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
274prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
275prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
276prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
277
278# if no error, return 0
279exit $TOTAL_RESULT
280