diff options
13 files changed, 189 insertions, 33 deletions
diff --git a/tools/testing/selftests/ftrace/README b/tools/testing/selftests/ftrace/README index b8631f03e754..182e76fa4b82 100644 --- a/tools/testing/selftests/ftrace/README +++ b/tools/testing/selftests/ftrace/README @@ -38,6 +38,43 @@ extension) and rewrite the test description line. * The test cases should run on dash (busybox shell) for testing on minimal cross-build environments. + * Note that the tests are run with "set -e" (errexit) option. If any + command fails, the test will be terminated immediately. + + * The tests can return some result codes instead of pass or fail by + using exit_unresolved, exit_untested, exit_unsupported and exit_xfail. + +Result code +=========== + +Ftracetest supports following result codes. + + * PASS: The test succeeded as expected. The test which exits with 0 is + counted as passed test. + + * FAIL: The test failed, but was expected to succeed. The test which exits + with !0 is counted as failed test. + + * UNRESOLVED: The test produced unclear or intermidiate results. + for example, the test was interrupted + or the test depends on a previous test, which failed. + or the test was set up incorrectly + The test which is in above situation, must call exit_unresolved. + + * UNTESTED: The test was not run, currently just a placeholder. + In this case, the test must call exit_untested. + + * UNSUPPORTED: The test failed because of lack of feature. + In this case, the test must call exit_unsupported. + + * XFAIL: The test failed, and was expected to fail. + To return XFAIL, call exit_xfail from the test. + +There are some sample test scripts for result code under samples/. +You can also run samples as below: + + # ./ftracetest samples/ + TODO ==== diff --git a/tools/testing/selftests/ftrace/ftracetest b/tools/testing/selftests/ftrace/ftracetest index 4c6c2fad2cda..a8f81c782856 100755 --- a/tools/testing/selftests/ftrace/ftracetest +++ b/tools/testing/selftests/ftrace/ftracetest @@ -114,22 +114,106 @@ prlog "=== Ftrace unit tests ===" # Testcase management +# Test result codes - Dejagnu extended code +PASS=0 # The test succeeded. +FAIL=1 # The test failed, but was expected to succeed. +UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted) +UNTESTED=3 # The test was not run, currently just a placeholder. +UNSUPPORTED=4 # The test failed because of lack of feature. +XFAIL=5 # The test failed, and was expected to fail. + +# Accumulations PASSED_CASES= FAILED_CASES= +UNRESOLVED_CASES= +UNTESTED_CASES= +UNSUPPORTED_CASES= +XFAILED_CASES= +UNDEFINED_CASES= +TOTAL_RESULT=0 + CASENO=0 testcase() { # testfile CASENO=$((CASENO+1)) prlog -n "[$CASENO]"`grep "^#[ \t]*description:" $1 | cut -f2 -d:` } -failed() { - prlog " [FAIL]" - FAILED_CASES="$FAILED_CASES $CASENO" + +eval_result() { # retval sigval + local retval=$2 + if [ $2 -eq 0 ]; then + test $1 -ne 0 && retval=$FAIL + fi + case $retval in + $PASS) + prlog " [PASS]" + PASSED_CASES="$PASSED_CASES $CASENO" + return 0 + ;; + $FAIL) + prlog " [FAIL]" + FAILED_CASES="$FAILED_CASES $CASENO" + return 1 # this is a bug. + ;; + $UNRESOLVED) + prlog " [UNRESOLVED]" + UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO" + return 1 # this is a kind of bug.. something happened. + ;; + $UNTESTED) + prlog " [UNTESTED]" + UNTESTED_CASES="$UNTESTED_CASES $CASENO" + return 0 + ;; + $UNSUPPORTED) + prlog " [UNSUPPORTED]" + UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO" + return 1 # this is not a bug, but the result should be reported. + ;; + $XFAIL) + prlog " [XFAIL]" + XFAILED_CASES="$XFAILED_CASES $CASENO" + return 0 + ;; + *) + prlog " [UNDEFINED]" + UNDEFINED_CASES="$UNDEFINED_CASES $CASENO" + return 1 # this must be a test bug + ;; + esac +} + +# Signal handling for result codes +SIG_RESULT= +SIG_BASE=36 # Use realtime signals +SIG_PID=$$ + +SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED)) +exit_unresolved () { + kill -s $SIG_UNRESOLVED $SIG_PID + exit 0 +} +trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED + +SIG_UNTESTED=$((SIG_BASE + UNTESTED)) +exit_untested () { + kill -s $SIG_UNTESTED $SIG_PID + exit 0 } -passed() { - prlog " [PASS]" - PASSED_CASES="$PASSED_CASES $CASENO" +trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED + +SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED)) +exit_unsupported () { + kill -s $SIG_UNSUPPORTED $SIG_PID + exit 0 } +trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED +SIG_XFAIL=$((SIG_BASE + XFAIL)) +exit_xfail () { + kill -s $SIG_XFAIL $SIG_PID + exit 0 +} +trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL # Run one test case run_test() { # testfile @@ -137,14 +221,17 @@ run_test() { # testfile local testlog=`mktemp --tmpdir=$LOG_DIR ${testname}-XXXXXX.log` testcase $1 echo "execute: "$1 > $testlog - (cd $TRACING_DIR; set -x ; . $1) >> $testlog 2>&1 - ret=$? - if [ $ret -ne 0 ]; then - failed - catlog $testlog - else - passed + SIG_RESULT=0 + # setup PID and PPID, $$ is not updated. + (cd $TRACING_DIR; read PID _ < /proc/self/stat ; + set -e; set -x; . $1) >> $testlog 2>&1 + eval_result $? $SIG_RESULT + if [ $? -eq 0 ]; then + # Remove test log if the test was done as it was expected. [ $KEEP_LOG -eq 0 ] && rm $testlog + else + catlog $testlog + TOTAL_RESULT=1 fi } @@ -152,8 +239,15 @@ run_test() { # testfile for t in $TEST_CASES; do run_test $t done + prlog "" prlog "# of passed: " `echo $PASSED_CASES | wc -w` prlog "# of failed: " `echo $FAILED_CASES | wc -w` - -test -z "$FAILED_CASES" # if no error, return 0 +prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w` +prlog "# of untested: " `echo $UNTESTED_CASES | wc -w` +prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w` +prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w` +prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w` + +# if no error, return 0 +exit $TOTAL_RESULT diff --git a/tools/testing/selftests/ftrace/samples/fail.tc b/tools/testing/selftests/ftrace/samples/fail.tc new file mode 100644 index 000000000000..15e35b956e05 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/fail.tc @@ -0,0 +1,4 @@ +#!/bin/sh +# description: failure-case example +cat non-exist-file +echo "this is not executed" diff --git a/tools/testing/selftests/ftrace/samples/pass.tc b/tools/testing/selftests/ftrace/samples/pass.tc new file mode 100644 index 000000000000..d01549370041 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/pass.tc @@ -0,0 +1,3 @@ +#!/bin/sh +# description: pass-case example +return 0 diff --git a/tools/testing/selftests/ftrace/samples/unresolved.tc b/tools/testing/selftests/ftrace/samples/unresolved.tc new file mode 100644 index 000000000000..41e99d3358d1 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/unresolved.tc @@ -0,0 +1,4 @@ +#!/bin/sh +# description: unresolved-case example +trap exit_unresolved INT +kill -INT $PID diff --git a/tools/testing/selftests/ftrace/samples/unsupported.tc b/tools/testing/selftests/ftrace/samples/unsupported.tc new file mode 100644 index 000000000000..45910ff13328 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/unsupported.tc @@ -0,0 +1,3 @@ +#!/bin/sh +# description: unsupported-case example +exit_unsupported diff --git a/tools/testing/selftests/ftrace/samples/untested.tc b/tools/testing/selftests/ftrace/samples/untested.tc new file mode 100644 index 000000000000..35a45946ec60 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/untested.tc @@ -0,0 +1,3 @@ +#!/bin/sh +# description: untested-case example +exit_untested diff --git a/tools/testing/selftests/ftrace/samples/xfail.tc b/tools/testing/selftests/ftrace/samples/xfail.tc new file mode 100644 index 000000000000..9dd395323259 --- /dev/null +++ b/tools/testing/selftests/ftrace/samples/xfail.tc @@ -0,0 +1,3 @@ +#!/bin/sh +# description: xfail-case example +cat non-exist-file || exit_xfail diff --git a/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc b/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc index b04f30df0db3..bf9a7b037924 100644 --- a/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc +++ b/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc @@ -1,6 +1,7 @@ #!/bin/sh # description: Basic test for tracers +test -f available_tracers for t in `cat available_tracers`; do - echo $t > current_tracer || exit 1 + echo $t > current_tracer done echo nop > current_tracer diff --git a/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc b/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc index 0c1a3a207636..bde6625d9785 100644 --- a/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc +++ b/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc @@ -1,8 +1,8 @@ #!/bin/sh # description: Basic trace clock test -[ -f trace_clock ] || exit 1 +test -f trace_clock for c in `cat trace_clock | tr -d \[\]`; do - echo $c > trace_clock || exit 1 - grep '\['$c'\]' trace_clock || exit 1 + echo $c > trace_clock + grep '\['$c'\]' trace_clock done echo local > trace_clock diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc b/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc index 5ddfb476eceb..1b8b665ab2b3 100644 --- a/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc +++ b/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc @@ -1,11 +1,11 @@ #!/bin/sh # description: Kprobe dynamic event - adding and removing -[ -f kprobe_events ] || exit 1 +[ -f kprobe_events ] || exit_unsupported # this is configurable -echo 0 > events/enable || exit 1 -echo > kprobe_events || exit 1 -echo p:myevent do_fork > kprobe_events || exit 1 -grep myevent kprobe_events || exit 1 -[ -d events/kprobes/myevent ] || exit 1 +echo 0 > events/enable +echo > kprobe_events +echo p:myevent do_fork > kprobe_events +grep myevent kprobe_events +test -d events/kprobes/myevent echo > kprobe_events diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc b/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc index 588fde97e93f..b55c84003587 100644 --- a/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc +++ b/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc @@ -1,14 +1,13 @@ #!/bin/sh # description: Kprobe dynamic event - busy event check -[ -f kprobe_events ] || exit 1 +[ -f kprobe_events ] || exit_unsupported -echo 0 > events/enable || exit 1 -echo > kprobe_events || exit 1 -echo p:myevent do_fork > kprobe_events || exit 1 -[ -d events/kprobes/myevent ] || exit 1 -echo 1 > events/kprobes/myevent/enable || exit 1 +echo 0 > events/enable +echo > kprobe_events +echo p:myevent do_fork > kprobe_events +test -d events/kprobes/myevent +echo 1 > events/kprobes/myevent/enable echo > kprobe_events && exit 1 # this must fail -echo 0 > events/kprobes/myevent/enable || exit 1 +echo 0 > events/kprobes/myevent/enable echo > kprobe_events # this must succeed - diff --git a/tools/testing/selftests/ftrace/test.d/template b/tools/testing/selftests/ftrace/test.d/template index ce5f735b2e65..5448f7abad5f 100644 --- a/tools/testing/selftests/ftrace/test.d/template +++ b/tools/testing/selftests/ftrace/test.d/template @@ -1,4 +1,9 @@ #!/bin/sh # description: %HERE DESCRIBE WHAT THIS DOES% # you have to add ".tc" extention for your testcase file +# Note that all tests are run with "errexit" option. + exit 0 # Return 0 if the test is passed, otherwise return !0 +# If the test could not run because of lack of feature, call exit_unsupported +# If the test returned unclear results, call exit_unresolved +# If the test is a dummy, or a placeholder, call exit_untested |