1#!/bin/sh 2 3# ftracetest - Ftrace test shell scripts 4# 5# Copyright (C) Hitachi Ltd., 2014 6# Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com> 7# 8# Released under the terms of the GPL v2. 9 10usage() { # errno [message] 11[ ! -z "$2" ] && echo $2 12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]" 13echo " Options:" 14echo " -h|--help Show help message" 15echo " -k|--keep Keep passed test logs" 16echo " -v|--verbose Increase verbosity of test messages" 17echo " -vv Alias of -v -v (Show all results in stdout)" 18echo " -vvv Alias of -v -v -v (Show all commands immediately)" 19echo " --fail-unsupported Treat UNSUPPORTED as a failure" 20echo " -d|--debug Debug mode (trace all shell commands)" 21echo " -l|--logdir <dir> Save logs on the <dir>" 22echo " If <dir> is -, all logs output in console only" 23exit $1 24} 25 26# default error 27err_ret=1 28 29# kselftest skip code is 4 30err_skip=4 31 32errexit() { # message 33 echo "Error: $1" 1>&2 34 exit $err_ret 35} 36 37# Ensuring user privilege 38if [ `id -u` -ne 0 ]; then 39 errexit "this must be run by root user" 40fi 41 42# Utilities 43absdir() { # file_path 44 (cd `dirname $1`; pwd) 45} 46 47abspath() { 48 echo `absdir $1`/`basename $1` 49} 50 51find_testcases() { #directory 52 echo `find $1 -name \*.tc | sort` 53} 54 55parse_opts() { # opts 56 local OPT_TEST_CASES= 57 local OPT_TEST_DIR= 58 59 while [ ! -z "$1" ]; do 60 case "$1" in 61 --help|-h) 62 usage 0 63 ;; 64 --keep|-k) 65 KEEP_LOG=1 66 shift 1 67 ;; 68 --verbose|-v|-vv|-vvv) 69 if [ $VERBOSE -eq -1 ]; then 70 usage "--console can not use with --verbose" 71 fi 72 VERBOSE=$((VERBOSE + 1)) 73 [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1)) 74 [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2)) 75 shift 1 76 ;; 77 --console) 78 if [ $VERBOSE -ne 0 ]; then 79 usage "--console can not use with --verbose" 80 fi 81 VERBOSE=-1 82 shift 1 83 ;; 84 --debug|-d) 85 DEBUG=1 86 shift 1 87 ;; 88 --stop-fail) 89 STOP_FAILURE=1 90 shift 1 91 ;; 92 --fail-unsupported) 93 UNSUPPORTED_RESULT=1 94 shift 1 95 ;; 96 --logdir|-l) 97 LOG_DIR=$2 98 shift 2 99 ;; 100 *.tc) 101 if [ -f "$1" ]; then 102 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`" 103 shift 1 104 else 105 usage 1 "$1 is not a testcase" 106 fi 107 ;; 108 *) 109 if [ -d "$1" ]; then 110 OPT_TEST_DIR=`abspath $1` 111 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`" 112 shift 1 113 else 114 usage 1 "Invalid option ($1)" 115 fi 116 ;; 117 esac 118 done 119 if [ ! -z "$OPT_TEST_CASES" ]; then 120 TEST_CASES=$OPT_TEST_CASES 121 fi 122} 123 124# Parameters 125TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1` 126if [ -z "$TRACING_DIR" ]; then 127 DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1` 128 if [ -z "$DEBUGFS_DIR" ]; then 129 # If tracefs exists, then so does /sys/kernel/tracing 130 if [ -d "/sys/kernel/tracing" ]; then 131 mount -t tracefs nodev /sys/kernel/tracing || 132 errexit "Failed to mount /sys/kernel/tracing" 133 TRACING_DIR="/sys/kernel/tracing" 134 # If debugfs exists, then so does /sys/kernel/debug 135 elif [ -d "/sys/kernel/debug" ]; then 136 mount -t debugfs nodev /sys/kernel/debug || 137 errexit "Failed to mount /sys/kernel/debug" 138 TRACING_DIR="/sys/kernel/debug/tracing" 139 else 140 err_ret=$err_skip 141 errexit "debugfs and tracefs are not configured in this kernel" 142 fi 143 else 144 TRACING_DIR="$DEBUGFS_DIR/tracing" 145 fi 146fi 147if [ ! -d "$TRACING_DIR" ]; then 148 err_ret=$err_skip 149 errexit "ftrace is not configured in this kernel" 150fi 151 152TOP_DIR=`absdir $0` 153TEST_DIR=$TOP_DIR/test.d 154TEST_CASES=`find_testcases $TEST_DIR` 155LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/ 156KEEP_LOG=0 157DEBUG=0 158VERBOSE=0 159UNSUPPORTED_RESULT=0 160STOP_FAILURE=0 161# Parse command-line options 162parse_opts $* 163 164[ $DEBUG -ne 0 ] && set -x 165 166# Verify parameters 167if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then 168 errexit "No ftrace directory found" 169fi 170 171# Preparing logs 172if [ "x$LOG_DIR" = "x-" ]; then 173 LOG_FILE= 174 date 175else 176 LOG_FILE=$LOG_DIR/ftracetest.log 177 mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR" 178 date > $LOG_FILE 179fi 180 181# Define text colors 182# Check available colors on the terminal, if any 183ncolors=`tput colors 2>/dev/null || echo 0` 184color_reset= 185color_red= 186color_green= 187color_blue= 188# If stdout exists and number of colors is eight or more, use them 189if [ -t 1 -a "$ncolors" -ge 8 ]; then 190 color_reset="\033[0m" 191 color_red="\033[31m" 192 color_green="\033[32m" 193 color_blue="\033[34m" 194fi 195 196strip_esc() { 197 # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead. 198 sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g" 199} 200 201prlog() { # messages 202 newline="\n" 203 if [ "$1" = "-n" ] ; then 204 newline= 205 shift 206 fi 207 printf "$*$newline" 208 [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE 209} 210catlog() { #file 211 cat $1 212 [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE 213} 214prlog "=== Ftrace unit tests ===" 215 216 217# Testcase management 218# Test result codes - Dejagnu extended code 219PASS=0 # The test succeeded. 220FAIL=1 # The test failed, but was expected to succeed. 221UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted) 222UNTESTED=3 # The test was not run, currently just a placeholder. 223UNSUPPORTED=4 # The test failed because of lack of feature. 224XFAIL=5 # The test failed, and was expected to fail. 225 226# Accumulations 227PASSED_CASES= 228FAILED_CASES= 229UNRESOLVED_CASES= 230UNTESTED_CASES= 231UNSUPPORTED_CASES= 232XFAILED_CASES= 233UNDEFINED_CASES= 234TOTAL_RESULT=0 235 236INSTANCE= 237CASENO=0 238testcase() { # testfile 239 CASENO=$((CASENO+1)) 240 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:` 241 prlog -n "[$CASENO]$INSTANCE$desc" 242} 243 244test_on_instance() { # testfile 245 grep -q "^#[ \t]*flags:.*instance" $1 246} 247 248eval_result() { # sigval 249 case $1 in 250 $PASS) 251 prlog " [${color_green}PASS${color_reset}]" 252 PASSED_CASES="$PASSED_CASES $CASENO" 253 return 0 254 ;; 255 $FAIL) 256 prlog " [${color_red}FAIL${color_reset}]" 257 FAILED_CASES="$FAILED_CASES $CASENO" 258 return 1 # this is a bug. 259 ;; 260 $UNRESOLVED) 261 prlog " [${color_blue}UNRESOLVED${color_reset}]" 262 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO" 263 return 1 # this is a kind of bug.. something happened. 264 ;; 265 $UNTESTED) 266 prlog " [${color_blue}UNTESTED${color_reset}]" 267 UNTESTED_CASES="$UNTESTED_CASES $CASENO" 268 return 0 269 ;; 270 $UNSUPPORTED) 271 prlog " [${color_blue}UNSUPPORTED${color_reset}]" 272 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO" 273 return $UNSUPPORTED_RESULT # depends on use case 274 ;; 275 $XFAIL) 276 prlog " [${color_red}XFAIL${color_reset}]" 277 XFAILED_CASES="$XFAILED_CASES $CASENO" 278 return 0 279 ;; 280 *) 281 prlog " [${color_blue}UNDEFINED${color_reset}]" 282 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO" 283 return 1 # this must be a test bug 284 ;; 285 esac 286} 287 288# Signal handling for result codes 289SIG_RESULT= 290SIG_BASE=36 # Use realtime signals 291SIG_PID=$$ 292 293exit_pass () { 294 exit 0 295} 296 297SIG_FAIL=$((SIG_BASE + FAIL)) 298exit_fail () { 299 exit 1 300} 301trap 'SIG_RESULT=$FAIL' $SIG_FAIL 302 303SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED)) 304exit_unresolved () { 305 kill -s $SIG_UNRESOLVED $SIG_PID 306 exit 0 307} 308trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED 309 310SIG_UNTESTED=$((SIG_BASE + UNTESTED)) 311exit_untested () { 312 kill -s $SIG_UNTESTED $SIG_PID 313 exit 0 314} 315trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED 316 317SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED)) 318exit_unsupported () { 319 kill -s $SIG_UNSUPPORTED $SIG_PID 320 exit 0 321} 322trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED 323 324SIG_XFAIL=$((SIG_BASE + XFAIL)) 325exit_xfail () { 326 kill -s $SIG_XFAIL $SIG_PID 327 exit 0 328} 329trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL 330 331__run_test() { # testfile 332 # setup PID and PPID, $$ is not updated. 333 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1) 334 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID 335} 336 337# Run one test case 338run_test() { # testfile 339 local testname=`basename $1` 340 testcase $1 341 if [ ! -z "$LOG_FILE" ] ; then 342 local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX` 343 else 344 local testlog=/proc/self/fd/1 345 fi 346 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX` 347 echo "execute$INSTANCE: "$1 > $testlog 348 SIG_RESULT=0 349 if [ $VERBOSE -eq -1 ]; then 350 __run_test $1 351 elif [ -z "$LOG_FILE" ]; then 352 __run_test $1 2>&1 353 elif [ $VERBOSE -ge 3 ]; then 354 __run_test $1 | tee -a $testlog 2>&1 355 elif [ $VERBOSE -eq 2 ]; then 356 __run_test $1 2>> $testlog | tee -a $testlog 357 else 358 __run_test $1 >> $testlog 2>&1 359 fi 360 eval_result $SIG_RESULT 361 if [ $? -eq 0 ]; then 362 # Remove test log if the test was done as it was expected. 363 [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog 364 else 365 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog 366 TOTAL_RESULT=1 367 fi 368 rm -rf $TMPDIR 369} 370 371# load in the helper functions 372. $TEST_DIR/functions 373 374# Main loop 375for t in $TEST_CASES; do 376 run_test $t 377 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then 378 echo "A failure detected. Stop test." 379 exit 1 380 fi 381done 382 383# Test on instance loop 384INSTANCE=" (instance) " 385for t in $TEST_CASES; do 386 test_on_instance $t || continue 387 SAVED_TRACING_DIR=$TRACING_DIR 388 export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX` 389 run_test $t 390 rmdir $TRACING_DIR 391 TRACING_DIR=$SAVED_TRACING_DIR 392 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then 393 echo "A failure detected. Stop test." 394 exit 1 395 fi 396done 397(cd $TRACING_DIR; initialize_ftrace) # for cleanup 398 399prlog "" 400prlog "# of passed: " `echo $PASSED_CASES | wc -w` 401prlog "# of failed: " `echo $FAILED_CASES | wc -w` 402prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w` 403prlog "# of untested: " `echo $UNTESTED_CASES | wc -w` 404prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w` 405prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w` 406prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w` 407 408# if no error, return 0 409exit $TOTAL_RESULT 410