|
@@ -114,22 +114,106 @@ prlog "=== Ftrace unit tests ==="
|
|
|
|
|
|
|
|
|
# Testcase management
|
|
|
+# Test result codes - Dejagnu extended code
|
|
|
+PASS=0 # The test succeeded.
|
|
|
+FAIL=1 # The test failed, but was expected to succeed.
|
|
|
+UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted)
|
|
|
+UNTESTED=3 # The test was not run, currently just a placeholder.
|
|
|
+UNSUPPORTED=4 # The test failed because of lack of feature.
|
|
|
+XFAIL=5 # The test failed, and was expected to fail.
|
|
|
+
|
|
|
+# Accumulations
|
|
|
PASSED_CASES=
|
|
|
FAILED_CASES=
|
|
|
+UNRESOLVED_CASES=
|
|
|
+UNTESTED_CASES=
|
|
|
+UNSUPPORTED_CASES=
|
|
|
+XFAILED_CASES=
|
|
|
+UNDEFINED_CASES=
|
|
|
+TOTAL_RESULT=0
|
|
|
+
|
|
|
CASENO=0
|
|
|
testcase() { # testfile
|
|
|
CASENO=$((CASENO+1))
|
|
|
prlog -n "[$CASENO]"`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
|
|
|
}
|
|
|
-failed() {
|
|
|
- prlog " [FAIL]"
|
|
|
- FAILED_CASES="$FAILED_CASES $CASENO"
|
|
|
+
|
|
|
+eval_result() { # retval sigval
|
|
|
+ local retval=$2
|
|
|
+ if [ $2 -eq 0 ]; then
|
|
|
+ test $1 -ne 0 && retval=$FAIL
|
|
|
+ fi
|
|
|
+ case $retval in
|
|
|
+ $PASS)
|
|
|
+ prlog " [PASS]"
|
|
|
+ PASSED_CASES="$PASSED_CASES $CASENO"
|
|
|
+ return 0
|
|
|
+ ;;
|
|
|
+ $FAIL)
|
|
|
+ prlog " [FAIL]"
|
|
|
+ FAILED_CASES="$FAILED_CASES $CASENO"
|
|
|
+ return 1 # this is a bug.
|
|
|
+ ;;
|
|
|
+ $UNRESOLVED)
|
|
|
+ prlog " [UNRESOLVED]"
|
|
|
+ UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
|
|
|
+ return 1 # this is a kind of bug.. something happened.
|
|
|
+ ;;
|
|
|
+ $UNTESTED)
|
|
|
+ prlog " [UNTESTED]"
|
|
|
+ UNTESTED_CASES="$UNTESTED_CASES $CASENO"
|
|
|
+ return 0
|
|
|
+ ;;
|
|
|
+ $UNSUPPORTED)
|
|
|
+ prlog " [UNSUPPORTED]"
|
|
|
+ UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
|
|
|
+ return 1 # this is not a bug, but the result should be reported.
|
|
|
+ ;;
|
|
|
+ $XFAIL)
|
|
|
+ prlog " [XFAIL]"
|
|
|
+ XFAILED_CASES="$XFAILED_CASES $CASENO"
|
|
|
+ return 0
|
|
|
+ ;;
|
|
|
+ *)
|
|
|
+ prlog " [UNDEFINED]"
|
|
|
+ UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
|
|
|
+ return 1 # this must be a test bug
|
|
|
+ ;;
|
|
|
+ esac
|
|
|
+}
|
|
|
+
|
|
|
+# Signal handling for result codes
|
|
|
+SIG_RESULT=
|
|
|
+SIG_BASE=36 # Use realtime signals
|
|
|
+SIG_PID=$$
|
|
|
+
|
|
|
+SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
|
|
|
+exit_unresolved () {
|
|
|
+ kill -s $SIG_UNRESOLVED $SIG_PID
|
|
|
+ exit 0
|
|
|
+}
|
|
|
+trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
|
|
|
+
|
|
|
+SIG_UNTESTED=$((SIG_BASE + UNTESTED))
|
|
|
+exit_untested () {
|
|
|
+ kill -s $SIG_UNTESTED $SIG_PID
|
|
|
+ exit 0
|
|
|
}
|
|
|
-passed() {
|
|
|
- prlog " [PASS]"
|
|
|
- PASSED_CASES="$PASSED_CASES $CASENO"
|
|
|
+trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
|
|
|
+
|
|
|
+SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
|
|
|
+exit_unsupported () {
|
|
|
+ kill -s $SIG_UNSUPPORTED $SIG_PID
|
|
|
+ exit 0
|
|
|
}
|
|
|
+trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
|
|
|
|
|
|
+SIG_XFAIL=$((SIG_BASE + XFAIL))
|
|
|
+exit_xfail () {
|
|
|
+ kill -s $SIG_XFAIL $SIG_PID
|
|
|
+ exit 0
|
|
|
+}
|
|
|
+trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
|
|
|
|
|
|
# Run one test case
|
|
|
run_test() { # testfile
|
|
@@ -137,14 +221,17 @@ run_test() { # testfile
|
|
|
local testlog=`mktemp --tmpdir=$LOG_DIR ${testname}-XXXXXX.log`
|
|
|
testcase $1
|
|
|
echo "execute: "$1 > $testlog
|
|
|
- (cd $TRACING_DIR; set -x ; . $1) >> $testlog 2>&1
|
|
|
- ret=$?
|
|
|
- if [ $ret -ne 0 ]; then
|
|
|
- failed
|
|
|
- catlog $testlog
|
|
|
- else
|
|
|
- passed
|
|
|
+ SIG_RESULT=0
|
|
|
+ # setup PID and PPID, $$ is not updated.
|
|
|
+ (cd $TRACING_DIR; read PID _ < /proc/self/stat ;
|
|
|
+ set -e; set -x; . $1) >> $testlog 2>&1
|
|
|
+ eval_result $? $SIG_RESULT
|
|
|
+ if [ $? -eq 0 ]; then
|
|
|
+ # Remove test log if the test was done as it was expected.
|
|
|
[ $KEEP_LOG -eq 0 ] && rm $testlog
|
|
|
+ else
|
|
|
+ catlog $testlog
|
|
|
+ TOTAL_RESULT=1
|
|
|
fi
|
|
|
}
|
|
|
|
|
@@ -152,8 +239,15 @@ run_test() { # testfile
|
|
|
for t in $TEST_CASES; do
|
|
|
run_test $t
|
|
|
done
|
|
|
+
|
|
|
prlog ""
|
|
|
prlog "# of passed: " `echo $PASSED_CASES | wc -w`
|
|
|
prlog "# of failed: " `echo $FAILED_CASES | wc -w`
|
|
|
-
|
|
|
-test -z "$FAILED_CASES" # if no error, return 0
|
|
|
+prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
|
|
|
+prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
|
|
|
+prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
|
|
|
+prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
|
|
|
+prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
|
|
|
+
|
|
|
+# if no error, return 0
|
|
|
+exit $TOTAL_RESULT
|