1#!/bin/sh 2 3# ftracetest - Ftrace test shell scripts 4# 5# Copyright (C) Hitachi Ltd., 2014 6# Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com> 7# 8# Released under the terms of the GPL v2. 9 10usage() { # errno [message] 11[ "$2" ] && echo $2 12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]" 13echo " Options:" 14echo " -h|--help Show help message" 15echo " -k|--keep Keep passed test logs" 16echo " -v|--verbose Increase verbosity of test messages" 17echo " -vv Alias of -v -v (Show all results in stdout)" 18echo " -vvv Alias of -v -v -v (Show all commands immediately)" 19echo " --fail-unsupported Treat UNSUPPORTED as a failure" 20echo " -d|--debug Debug mode (trace all shell commands)" 21echo " -l|--logdir <dir> Save logs on the <dir>" 22exit $1 23} 24 25errexit() { # message 26 echo "Error: $1" 1>&2 27 exit 1 28} 29 30# Ensuring user privilege 31if [ `id -u` -ne 0 ]; then 32 errexit "this must be run by root user" 33fi 34 35# Utilities 36absdir() { # file_path 37 (cd `dirname $1`; pwd) 38} 39 40abspath() { 41 echo `absdir $1`/`basename $1` 42} 43 44find_testcases() { #directory 45 echo `find $1 -name \*.tc | sort` 46} 47 48parse_opts() { # opts 49 local OPT_TEST_CASES= 50 local OPT_TEST_DIR= 51 52 while [ "$1" ]; do 53 case "$1" in 54 --help|-h) 55 usage 0 56 ;; 57 --keep|-k) 58 KEEP_LOG=1 59 shift 1 60 ;; 61 --verbose|-v|-vv|-vvv) 62 VERBOSE=$((VERBOSE + 1)) 63 [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1)) 64 [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2)) 65 shift 1 66 ;; 67 --debug|-d) 68 DEBUG=1 69 shift 1 70 ;; 71 --fail-unsupported) 72 UNSUPPORTED_RESULT=1 73 shift 1 74 ;; 75 --logdir|-l) 76 LOG_DIR=$2 77 shift 2 78 ;; 79 *.tc) 80 if [ -f "$1" ]; then 81 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`" 82 shift 1 83 else 84 usage 1 "$1 is not a testcase" 85 fi 86 ;; 87 *) 88 if [ -d "$1" ]; then 89 OPT_TEST_DIR=`abspath $1` 90 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`" 91 shift 1 92 else 93 usage 1 "Invalid option ($1)" 94 fi 95 ;; 96 esac 97 done 98 if [ "$OPT_TEST_CASES" ]; then 99 TEST_CASES=$OPT_TEST_CASES 100 fi 101} 102 103# Parameters 104DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1` 105if [ -z "$DEBUGFS_DIR" ]; then 106 TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1` 107else 108 TRACING_DIR=$DEBUGFS_DIR/tracing 109fi 110 111TOP_DIR=`absdir $0` 112TEST_DIR=$TOP_DIR/test.d 113TEST_CASES=`find_testcases $TEST_DIR` 114LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/ 115KEEP_LOG=0 116DEBUG=0 117VERBOSE=0 118UNSUPPORTED_RESULT=0 119# Parse command-line options 120parse_opts $* 121 122[ $DEBUG -ne 0 ] && set -x 123 124# Verify parameters 125if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then 126 errexit "No ftrace directory found" 127fi 128 129# Preparing logs 130LOG_FILE=$LOG_DIR/ftracetest.log 131mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR" 132date > $LOG_FILE 133prlog() { # messages 134 echo "$@" | tee -a $LOG_FILE 135} 136catlog() { #file 137 cat $1 | tee -a $LOG_FILE 138} 139prlog "=== Ftrace unit tests ===" 140 141 142# Testcase management 143# Test result codes - Dejagnu extended code 144PASS=0 # The test succeeded. 145FAIL=1 # The test failed, but was expected to succeed. 146UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted) 147UNTESTED=3 # The test was not run, currently just a placeholder. 148UNSUPPORTED=4 # The test failed because of lack of feature. 149XFAIL=5 # The test failed, and was expected to fail. 150 151# Accumulations 152PASSED_CASES= 153FAILED_CASES= 154UNRESOLVED_CASES= 155UNTESTED_CASES= 156UNSUPPORTED_CASES= 157XFAILED_CASES= 158UNDEFINED_CASES= 159TOTAL_RESULT=0 160 161INSTANCE= 162CASENO=0 163testcase() { # testfile 164 CASENO=$((CASENO+1)) 165 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:` 166 prlog -n "[$CASENO]$INSTANCE$desc" 167} 168 169test_on_instance() { # testfile 170 grep -q "^#[ \t]*flags:.*instance" $1 171} 172 173eval_result() { # sigval 174 case $1 in 175 $PASS) 176 prlog " [PASS]" 177 PASSED_CASES="$PASSED_CASES $CASENO" 178 return 0 179 ;; 180 $FAIL) 181 prlog " [FAIL]" 182 FAILED_CASES="$FAILED_CASES $CASENO" 183 return 1 # this is a bug. 184 ;; 185 $UNRESOLVED) 186 prlog " [UNRESOLVED]" 187 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO" 188 return 1 # this is a kind of bug.. something happened. 189 ;; 190 $UNTESTED) 191 prlog " [UNTESTED]" 192 UNTESTED_CASES="$UNTESTED_CASES $CASENO" 193 return 0 194 ;; 195 $UNSUPPORTED) 196 prlog " [UNSUPPORTED]" 197 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO" 198 return $UNSUPPORTED_RESULT # depends on use case 199 ;; 200 $XFAIL) 201 prlog " [XFAIL]" 202 XFAILED_CASES="$XFAILED_CASES $CASENO" 203 return 0 204 ;; 205 *) 206 prlog " [UNDEFINED]" 207 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO" 208 return 1 # this must be a test bug 209 ;; 210 esac 211} 212 213# Signal handling for result codes 214SIG_RESULT= 215SIG_BASE=36 # Use realtime signals 216SIG_PID=$$ 217 218SIG_FAIL=$((SIG_BASE + FAIL)) 219trap 'SIG_RESULT=$FAIL' $SIG_FAIL 220 221SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED)) 222exit_unresolved () { 223 kill -s $SIG_UNRESOLVED $SIG_PID 224 exit 0 225} 226trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED 227 228SIG_UNTESTED=$((SIG_BASE + UNTESTED)) 229exit_untested () { 230 kill -s $SIG_UNTESTED $SIG_PID 231 exit 0 232} 233trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED 234 235SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED)) 236exit_unsupported () { 237 kill -s $SIG_UNSUPPORTED $SIG_PID 238 exit 0 239} 240trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED 241 242SIG_XFAIL=$((SIG_BASE + XFAIL)) 243exit_xfail () { 244 kill -s $SIG_XFAIL $SIG_PID 245 exit 0 246} 247trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL 248 249__run_test() { # testfile 250 # setup PID and PPID, $$ is not updated. 251 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1) 252 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID 253} 254 255# Run one test case 256run_test() { # testfile 257 local testname=`basename $1` 258 local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX` 259 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX` 260 testcase $1 261 echo "execute$INSTANCE: "$1 > $testlog 262 SIG_RESULT=0 263 if [ $VERBOSE -ge 3 ]; then 264 __run_test $1 | tee -a $testlog 2>&1 265 elif [ $VERBOSE -eq 2 ]; then 266 __run_test $1 2>> $testlog | tee -a $testlog 267 else 268 __run_test $1 >> $testlog 2>&1 269 fi 270 eval_result $SIG_RESULT 271 if [ $? -eq 0 ]; then 272 # Remove test log if the test was done as it was expected. 273 [ $KEEP_LOG -eq 0 ] && rm $testlog 274 else 275 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog 276 TOTAL_RESULT=1 277 fi 278 rm -rf $TMPDIR 279} 280 281# load in the helper functions 282. $TEST_DIR/functions 283 284# Main loop 285for t in $TEST_CASES; do 286 run_test $t 287done 288 289# Test on instance loop 290INSTANCE=" (instance) " 291for t in $TEST_CASES; do 292 test_on_instance $t || continue 293 SAVED_TRACING_DIR=$TRACING_DIR 294 export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX` 295 run_test $t 296 rmdir $TRACING_DIR 297 TRACING_DIR=$SAVED_TRACING_DIR 298done 299 300prlog "" 301prlog "# of passed: " `echo $PASSED_CASES | wc -w` 302prlog "# of failed: " `echo $FAILED_CASES | wc -w` 303prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w` 304prlog "# of untested: " `echo $UNTESTED_CASES | wc -w` 305prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w` 306prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w` 307prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w` 308 309# if no error, return 0 310exit $TOTAL_RESULT 311