xfs/033: use _scratch_xfs_db wrapper
[xfstests-dev.git] / check
1 #!/bin/bash
2 # SPDX-License-Identifier: GPL-2.0
3 # Copyright (c) 2000-2002,2006 Silicon Graphics, Inc.  All Rights Reserved.
4 #
5 # Control script for QA
6 #
7 tmp=/tmp/$$
8 status=0
9 needwrap=true
10 needsum=true
11 n_try=0
12 try=""
13 n_bad=0
14 sum_bad=0
15 bad=""
16 n_notrun=0
17 notrun=""
18 interrupt=true
19 diff="diff -u"
20 showme=false
21 have_test_arg=false
22 randomize=false
23 export here=`pwd`
24 xfile=""
25 subdir_xfile=""
26 brief_test_summary=false
27 do_report=false
28 DUMP_OUTPUT=false
29 iterations=1
30
31 # This is a global variable used to pass test failure text to reporting gunk
32 _err_msg=""
33
34 # start the initialisation work now
35 iam=check
36
37 export MSGVERB="text:action"
38 export QA_CHECK_FS=${QA_CHECK_FS:=true}
39
40 # number of diff lines from a failed test, 0 for whole output
41 export DIFF_LENGTH=${DIFF_LENGTH:=10}
42
43 # by default don't output timestamps
44 timestamp=${TIMESTAMP:=false}
45
46 rm -f $tmp.list $tmp.tmp $tmp.grep $here/$iam.out $tmp.xlist $tmp.report.*
47
48 SRC_GROUPS="generic shared"
49 export SRC_DIR="tests"
50
51 usage()
52 {
53     echo "Usage: $0 [options] [testlist]"'
54
55 check options
56     -nfs                test NFS
57     -glusterfs          test GlusterFS
58     -cifs               test CIFS
59     -9p                 test 9p
60     -virtiofs           test virtiofs
61     -overlay            test overlay
62     -pvfs2              test PVFS2
63     -tmpfs              test TMPFS
64     -ubifs              test ubifs
65     -l                  line mode diff
66     -udiff              show unified diff (default)
67     -n                  show me, do not run tests
68     -T                  output timestamps
69     -r                  randomize test order
70     -i <n>              iterate the test list <n> times
71     -d                  dump test output to stdout
72     -b                  brief test summary
73     -R fmt[,fmt]        generate report in formats specified. Supported format: [xunit]
74     --large-fs          optimise scratch device for large filesystems
75     -s section          run only specified section from config file
76     -S section          exclude the specified section from the config file
77
78 testlist options
79     -g group[,group...] include tests from these groups
80     -x group[,group...] exclude tests from these groups
81     -X exclude_file     exclude individual tests
82     -E external_file    exclude individual tests
83     [testlist]          include tests matching names in testlist
84
85 testlist argument is a list of tests in the form of <test dir>/<test name>.
86
87 <test dir> is a directory under tests that contains a group file,
88 with a list of the names of the tests in that directory.
89
90 <test name> may be either a specific test file name (e.g. xfs/001) or
91 a test file name match pattern (e.g. xfs/*).
92
93 group argument is either a name of a tests group to collect from all
94 the test dirs (e.g. quick) or a name of a tests group to collect from
95 a specific tests dir in the form of <test dir>/<group name> (e.g. xfs/quick).
96 If you want to run all the tests in the test suite, use "-g all" to specify all
97 groups.
98
99 exclude_file argument refers to a name of a file inside each test directory.
100 for every test dir where this file is found, the listed test names are
101 excluded from the list of tests to run from that test dir.
102
103 external_file argument is a path to a single file containing a list of tests
104 to exclude in the form of <test dir>/<test name>.
105
106 examples:
107  check xfs/001
108  check -g quick
109  check -g xfs/quick
110  check -x stress xfs/*
111  check -X .exclude -g auto
112  check -E ~/.xfstests.exclude
113 '
114             exit 0
115 }
116
117 get_sub_group_list()
118 {
119         local d=$1
120         local grp=$2
121
122         test -s "$SRC_DIR/$d/group" || return 1
123
124         local grpl=$(sed -n < $SRC_DIR/$d/group \
125                 -e 's/#.*//' \
126                 -e 's/$/ /' \
127                 -e "s;^\($VALID_TEST_NAME\).* $grp .*;$SRC_DIR/$d/\1;p")
128         echo $grpl
129 }
130
131 get_group_list()
132 {
133         local grp=$1
134         local grpl=""
135         local sub=$(dirname $grp)
136         local fsgroup="$FSTYP"
137
138         if [ -n "$sub" -a "$sub" != "." -a -d "$SRC_DIR/$sub" ]; then
139                 # group is given as <subdir>/<group> (e.g. xfs/quick)
140                 grp=$(basename $grp)
141                 get_sub_group_list $sub $grp
142                 return
143         fi
144
145         if [ "$FSTYP" = ext2 -o "$FSTYP" = ext3 ]; then
146             fsgroup=ext4
147         fi
148         for d in $SRC_GROUPS $fsgroup; do
149                 if ! test -d "$SRC_DIR/$d" ; then
150                         continue
151                 fi
152                 grpl="$grpl $(get_sub_group_list $d $grp)"
153         done
154         echo $grpl
155 }
156
157 # Find all tests, excluding files that are test metadata such as group files.
158 # It matches test names against $VALID_TEST_NAME defined in common/rc
159 get_all_tests()
160 {
161         touch $tmp.list
162         for d in $SRC_GROUPS $FSTYP; do
163                 if ! test -d "$SRC_DIR/$d" ; then
164                         continue
165                 fi
166                 ls $SRC_DIR/$d/* | \
167                         grep -v "\..*" | \
168                         grep "^$SRC_DIR/$d/$VALID_TEST_NAME"| \
169                         grep -v "group\|Makefile" >> $tmp.list 2>/dev/null
170         done
171 }
172
173 # takes the list of tests to run in $tmp.list, and removes the tests passed to
174 # the function from that list.
175 trim_test_list()
176 {
177         test_list="$*"
178
179         rm -f $tmp.grep
180         numsed=0
181         for t in $test_list
182         do
183             if [ $numsed -gt 100 ]; then
184                 grep -v -f $tmp.grep <$tmp.list >$tmp.tmp
185                 mv $tmp.tmp $tmp.list
186                 numsed=0
187                 rm -f $tmp.grep
188             fi
189             echo "^$t\$" >>$tmp.grep
190             numsed=`expr $numsed + 1`
191         done
192         grep -v -f $tmp.grep <$tmp.list >$tmp.tmp
193         mv $tmp.tmp $tmp.list
194         rm -f $tmp.grep
195 }
196
197
198 _wallclock()
199 {
200     date "+%s"
201 }
202
203 _timestamp()
204 {
205     now=`date "+%T"`
206     echo -n " [$now]"
207 }
208
209 _prepare_test_list()
210 {
211         unset list
212         # Tests specified on the command line
213         if [ -s $tmp.arglist ]; then
214                 cat $tmp.arglist > $tmp.list
215         else
216                 touch $tmp.list
217         fi
218
219         # Specified groups to include
220         # Note that the CLI processing adds a leading space to the first group
221         # parameter, so we have to catch that here checking for "all"
222         if ! $have_test_arg && [ "$GROUP_LIST" == " all" ]; then
223                 # no test numbers, do everything
224                 get_all_tests
225         else
226                 for group in $GROUP_LIST; do
227                         list=$(get_group_list $group)
228                         if [ -z "$list" ]; then
229                                 echo "Group \"$group\" is empty or not defined?"
230                                 exit 1
231                         fi
232
233                         for t in $list; do
234                                 grep -s "^$t\$" $tmp.list >/dev/null || \
235                                                         echo "$t" >>$tmp.list
236                         done
237                 done
238         fi
239
240         # Specified groups to exclude
241         for xgroup in $XGROUP_LIST; do
242                 list=$(get_group_list $xgroup)
243                 if [ -z "$list" ]; then
244                         echo "Group \"$xgroup\" is empty or not defined?"
245                         exit 1
246                 fi
247
248                 trim_test_list $list
249         done
250
251         # sort the list of tests into numeric order
252         if $randomize; then
253                 if type shuf >& /dev/null; then
254                         sorter="shuf"
255                 else
256                         sorter="awk -v seed=$RANDOM -f randomize.awk"
257                 fi
258         else
259                 sorter="cat"
260         fi
261         list=`sort -n $tmp.list | uniq | $sorter`
262         rm -f $tmp.list
263 }
264
265 # Process command arguments first.
266 while [ $# -gt 0 ]; do
267         case "$1" in
268         -\? | -h | --help) usage ;;
269
270         -nfs)           FSTYP=nfs ;;
271         -glusterfs)     FSTYP=glusterfs ;;
272         -cifs)          FSTYP=cifs ;;
273         -9p)            FSTYP=9p ;;
274         -virtiofs)      FSTYP=virtiofs ;;
275         -overlay)       FSTYP=overlay; export OVERLAY=true ;;
276         -pvfs2)         FSTYP=pvfs2 ;;
277         -tmpfs)         FSTYP=tmpfs ;;
278         -ubifs)         FSTYP=ubifs ;;
279
280         -g)     group=$2 ; shift ;
281                 GROUP_LIST="$GROUP_LIST ${group//,/ }"
282                 ;;
283
284         -x)     xgroup=$2 ; shift ;
285                 XGROUP_LIST="$XGROUP_LIST ${xgroup//,/ }"
286                 ;;
287
288         -X)     subdir_xfile=$2; shift ;
289                 ;;
290         -E)     xfile=$2; shift ;
291                 if [ -f $xfile ]; then
292                         sed "s/#.*$//" "$xfile" >> $tmp.xlist
293                 fi
294                 ;;
295         -s)     RUN_SECTION="$RUN_SECTION $2"; shift ;;
296         -S)     EXCLUDE_SECTION="$EXCLUDE_SECTION $2"; shift ;;
297         -l)     diff="diff" ;;
298         -udiff) diff="$diff -u" ;;
299
300         -n)     showme=true ;;
301         -r)     randomize=true ;;
302         -i)     iterations=$2; shift ;;
303         -T)     timestamp=true ;;
304         -d)     DUMP_OUTPUT=true ;;
305         -b)     brief_test_summary=true;;
306         -R)     report_fmt=$2 ; shift ;
307                 REPORT_LIST="$REPORT_LIST ${report_fmt//,/ }"
308                 do_report=true
309                 ;;
310         --large-fs) export LARGE_SCRATCH_DEV=yes ;;
311         --extra-space=*) export SCRATCH_DEV_EMPTY_SPACE=${r#*=} ;;
312
313         -*)     usage ;;
314         *)      # not an argument, we've got tests now.
315                 have_test_arg=true ;;
316         esac
317
318         # if we've found a test specification, the break out of the processing
319         # loop before we shift the arguments so that this is the first argument
320         # that we process in the test arg loop below.
321         if $have_test_arg; then
322                 break;
323         fi
324
325         shift
326 done
327
328 # we need common/rc, that also sources common/config. We need to source it
329 # after processing args, overlay needs FSTYP set before sourcing common/config
330 if ! . ./common/rc; then
331         echo "check: failed to source common/rc"
332         exit 1
333 fi
334
335 if [ -n "$subdir_xfile" ]; then
336         for d in $SRC_GROUPS $FSTYP; do
337                 [ -f $SRC_DIR/$d/$subdir_xfile ] || continue
338                 for f in `sed "s/#.*$//" $SRC_DIR/$d/$subdir_xfile`; do
339                         echo $d/$f >> $tmp.xlist
340                 done
341         done
342 fi
343
344 # Process tests from command line now.
345 if $have_test_arg; then
346         while [ $# -gt 0 ]; do
347                 case "$1" in
348                 -*)     echo "Arguments before tests, please!"
349                         status=1
350                         exit $status
351                         ;;
352                 *)      # Expand test pattern (e.g. xfs/???, *fs/001)
353                         list=$(cd $SRC_DIR; echo $1)
354                         for t in $list; do
355                                 test_dir=`dirname $t`
356                                 test_dir=${test_dir#$SRC_DIR/*}
357                                 test_name=`basename $t`
358                                 group_file=$SRC_DIR/$test_dir/group
359
360                                 if egrep -q "^$test_name" $group_file; then
361                                         # in group file ... OK
362                                         echo $SRC_DIR/$test_dir/$test_name \
363                                                 >>$tmp.arglist
364                                 else
365                                         # oops
366                                         echo "$t - unknown test, ignored"
367                                 fi
368                         done
369                         ;;
370                 esac
371
372                 shift
373         done
374 elif [ -z "$GROUP_LIST" ]; then
375         # default group list is the auto group. If any other group or test is
376         # specified, we use that instead.
377         GROUP_LIST="auto"
378 fi
379
380 if [ `id -u` -ne 0 ]
381 then
382     echo "check: QA must be run as root"
383     exit 1
384 fi
385
386 _wipe_counters()
387 {
388         n_try="0"
389         n_bad="0"
390         n_notrun="0"
391         unset try notrun bad
392 }
393
394 _global_log() {
395         echo "$1" >> $check.log
396         if $OPTIONS_HAVE_SECTIONS; then
397                 echo "$1" >> ${REPORT_DIR}/check.log
398         fi
399 }
400
401 _wrapup()
402 {
403         seq="check"
404         check="$RESULT_BASE/check"
405
406         if $showme; then
407                 if $needwrap; then
408                         if $do_report; then
409                                 _make_section_report
410                         fi
411                         needwrap=false
412                 fi
413         elif $needwrap; then
414                 if [ -f $check.time -a -f $tmp.time ]; then
415                         cat $check.time $tmp.time  \
416                                 | $AWK_PROG '
417                                 { t[$1] = $2 }
418                                 END {
419                                         if (NR > 0) {
420                                                 for (i in t) print i " " t[i]
421                                         }
422                                 }' \
423                                 | sort -n >$tmp.out
424                         mv $tmp.out $check.time
425                         if $OPTIONS_HAVE_SECTIONS; then
426                                 cp $check.time ${REPORT_DIR}/check.time
427                         fi
428                 fi
429
430                 _global_log ""
431                 _global_log "$(date)"
432
433                 echo "SECTION       -- $section" >>$tmp.summary
434                 echo "=========================" >>$tmp.summary
435                 if [ ! -z "$n_try" -a $n_try != 0 ]; then
436                         if [ $brief_test_summary == "false" ]; then
437                                 echo "Ran:$try"
438                                 echo "Ran:$try" >>$tmp.summary
439                         fi
440                         _global_log "Ran:$try"
441                 fi
442
443                 $interrupt && echo "Interrupted!" | tee -a $check.log
444                 if $OPTIONS_HAVE_SECTIONS; then
445                         $interrupt && echo "Interrupted!" | tee -a \
446                                 ${REPORT_DIR}/check.log
447                 fi
448
449                 if [ ! -z "$notrun" ]; then
450                         if [ $brief_test_summary == "false" ]; then
451                                 echo "Not run:$notrun"
452                                 echo "Not run:$notrun" >>$tmp.summary
453                         fi
454                         _global_log "Not run:$notrun"
455                 fi
456
457                 if [ ! -z "$n_bad" -a $n_bad != 0 ]; then
458                         echo "Failures:$bad"
459                         echo "Failed $n_bad of $n_try tests"
460                         _global_log "Failures:$bad"
461                         _global_log "Failed $n_bad of $n_try tests"
462                         echo "Failures:$bad" >>$tmp.summary
463                         echo "Failed $n_bad of $n_try tests" >>$tmp.summary
464                 else
465                         echo "Passed all $n_try tests"
466                         _global_log "Passed all $n_try tests"
467                         echo "Passed all $n_try tests" >>$tmp.summary
468                 fi
469                 echo "" >>$tmp.summary
470                 if $do_report; then
471                         _make_section_report
472                 fi
473                 needwrap=false
474         fi
475
476         sum_bad=`expr $sum_bad + $n_bad`
477         _wipe_counters
478         rm -f /tmp/*.rawout /tmp/*.out /tmp/*.err /tmp/*.time
479         if ! $OPTIONS_HAVE_SECTIONS; then
480                 rm -f $tmp.*
481         fi
482 }
483
484 _summary()
485 {
486         _wrapup
487         if $showme; then
488                 :
489         elif $needsum; then
490                 count=`wc -L $tmp.summary | cut -f1 -d" "`
491                 cat $tmp.summary
492                 needsum=false
493         fi
494         rm -f $tmp.*
495 }
496
497 _check_filesystems()
498 {
499         if [ -f ${RESULT_DIR}/require_test ]; then
500                 _check_test_fs || err=true
501                 rm -f ${RESULT_DIR}/require_test*
502         else
503                 _test_unmount 2> /dev/null
504         fi
505         if [ -f ${RESULT_DIR}/require_scratch ]; then
506                 _check_scratch_fs || err=true
507                 rm -f ${RESULT_DIR}/require_scratch*
508         fi
509         _scratch_unmount 2> /dev/null
510 }
511
512 _expunge_test()
513 {
514         local TEST_ID="$1"
515         if [ -s $tmp.xlist ]; then
516                 if grep -q $TEST_ID $tmp.xlist; then
517                         echo "       [expunged]"
518                         return 1
519                 fi
520         fi
521         return 0
522 }
523
524 # Can we run systemd scopes?
525 HAVE_SYSTEMD_SCOPES=
526 systemctl reset-failed "fstests-check" &>/dev/null
527 systemd-run --quiet --unit "fstests-check" --scope bash -c "exit 77" &> /dev/null
528 test $? -eq 77 && HAVE_SYSTEMD_SCOPES=yes
529
530 # Make the check script unattractive to the OOM killer...
531 OOM_SCORE_ADJ="/proc/self/oom_score_adj"
532 test -w ${OOM_SCORE_ADJ} && echo -1000 > ${OOM_SCORE_ADJ}
533
534 # ...and make the tests themselves somewhat more attractive to it, so that if
535 # the system runs out of memory it'll be the test that gets killed and not the
536 # test framework.
537 #
538 # If systemd is available, run the entire test script in a scope so that we can
539 # kill all subprocesses of the test if it fails to clean up after itself.  This
540 # is essential for ensuring that the post-test unmount succeeds.  Note that
541 # systemd doesn't automatically remove transient scopes that fail to terminate
542 # when systemd tells them to terminate (e.g. programs stuck in D state when
543 # systemd sends SIGKILL), so we use reset-failed to tear down the scope.
544 _run_seq() {
545         local cmd=(bash -c "test -w ${OOM_SCORE_ADJ} && echo 250 > ${OOM_SCORE_ADJ}; exec ./$seq")
546
547         if [ -n "${HAVE_SYSTEMD_SCOPES}" ]; then
548                 local unit="$(systemd-escape "fs$seq").scope"
549                 systemctl reset-failed "${unit}" &> /dev/null
550                 systemd-run --quiet --unit "${unit}" --scope "${cmd[@]}"
551                 res=$?
552                 systemctl stop "${unit}" &> /dev/null
553                 return "${res}"
554         else
555                 "${cmd[@]}"
556         fi
557 }
558
559 _detect_kmemleak
560 _prepare_test_list
561
562 if $OPTIONS_HAVE_SECTIONS; then
563         trap "_summary; exit \$status" 0 1 2 3 15
564 else
565         trap "_wrapup; exit \$status" 0 1 2 3 15
566 fi
567
568 function run_section()
569 {
570         local section=$1
571
572         OLD_FSTYP=$FSTYP
573         OLD_TEST_FS_MOUNT_OPTS=$TEST_FS_MOUNT_OPTS
574         get_next_config $section
575
576         # Do we need to run only some sections ?
577         if [ ! -z "$RUN_SECTION" ]; then
578                 skip=true
579                 for s in $RUN_SECTION; do
580                         if [ $section == $s ]; then
581                                 skip=false
582                                 break;
583                         fi
584                 done
585                 if $skip; then
586                         return
587                 fi
588         fi
589
590         # Did this section get excluded?
591         if [ ! -z "$EXCLUDE_SECTION" ]; then
592                 skip=false
593                 for s in $EXCLUDE_SECTION; do
594                         if [ $section == $s ]; then
595                                 skip=true
596                                 break;
597                         fi
598                 done
599                 if $skip; then
600                         return
601                 fi
602         fi
603
604         mkdir -p $RESULT_BASE
605         if [ ! -d $RESULT_BASE ]; then
606                 echo "failed to create results directory $RESULT_BASE"
607                 status=1
608                 exit
609         fi
610
611         if $OPTIONS_HAVE_SECTIONS; then
612                 echo "SECTION       -- $section"
613         fi
614
615         sect_start=`_wallclock`
616         if $RECREATE_TEST_DEV || [ "$OLD_FSTYP" != "$FSTYP" ]; then
617                 echo "RECREATING    -- $FSTYP on $TEST_DEV"
618                 _test_unmount 2> /dev/null
619                 if ! _test_mkfs >$tmp.err 2>&1
620                 then
621                         echo "our local _test_mkfs routine ..."
622                         cat $tmp.err
623                         echo "check: failed to mkfs \$TEST_DEV using specified options"
624                         status=1
625                         exit
626                 fi
627                 if ! _test_mount
628                 then
629                         echo "check: failed to mount $TEST_DEV on $TEST_DIR"
630                         status=1
631                         exit
632                 fi
633                 _prepare_test_list
634         elif [ "$OLD_TEST_FS_MOUNT_OPTS" != "$TEST_FS_MOUNT_OPTS" ]; then
635                 _test_unmount 2> /dev/null
636                 if ! _test_mount
637                 then
638                         echo "check: failed to mount $TEST_DEV on $TEST_DIR"
639                         status=1
640                         exit
641                 fi
642         fi
643
644         init_rc
645
646         seq="check"
647         check="$RESULT_BASE/check"
648
649         # don't leave old full output behind on a clean run
650         rm -f $check.full
651
652         [ -f $check.time ] || touch $check.time
653
654         # print out our test configuration
655         echo "FSTYP         -- `_full_fstyp_details`"
656         echo "PLATFORM      -- `_full_platform_details`"
657         if [ ! -z "$SCRATCH_DEV" ]; then
658           echo "MKFS_OPTIONS  -- `_scratch_mkfs_options`"
659           echo "MOUNT_OPTIONS -- `_scratch_mount_options`"
660         fi
661         echo
662         needwrap=true
663
664         if [ ! -z "$SCRATCH_DEV" ]; then
665           _scratch_unmount 2> /dev/null
666           # call the overridden mkfs - make sure the FS is built
667           # the same as we'll create it later.
668
669           if ! _scratch_mkfs >$tmp.err 2>&1
670           then
671               echo "our local _scratch_mkfs routine ..."
672               cat $tmp.err
673               echo "check: failed to mkfs \$SCRATCH_DEV using specified options"
674               status=1
675               exit
676           fi
677
678           # call the overridden mount - make sure the FS mounts with
679           # the same options that we'll mount with later.
680           if ! _try_scratch_mount >$tmp.err 2>&1
681           then
682               echo "our local mount routine ..."
683               cat $tmp.err
684               echo "check: failed to mount \$SCRATCH_DEV using specified options"
685               status=1
686               exit
687           else
688               _scratch_unmount
689           fi
690         fi
691
692         seqres="$check"
693         _check_test_fs
694
695         err=false
696         first_test=true
697         prev_seq=""
698         for seq in $list ; do
699                 # Run report for previous test!
700                 if $err ; then
701                         bad="$bad $seqnum"
702                         n_bad=`expr $n_bad + 1`
703                         tc_status="fail"
704                 fi
705                 if $do_report && ! $first_test ; then
706                         if [ $tc_status != "expunge" ] ; then
707                                 _make_testcase_report "$prev_seq" "$tc_status"
708                         fi
709                 fi
710                 first_test=false
711
712                 err=false
713                 prev_seq="$seq"
714                 if [ ! -f $seq ]; then
715                         # Try to get full name in case the user supplied only
716                         # seq id and the test has a name. A bit of hassle to
717                         # find really the test and not its sample output or
718                         # helping files.
719                         bname=$(basename $seq)
720                         full_seq=$(find $(dirname $seq) -name $bname* -executable |
721                                 awk '(NR == 1 || length < length(shortest)) { shortest = $0 }\
722                                      END { print shortest }')
723                         if [ -f $full_seq ] && \
724                            [ x$(echo $bname | grep -o "^$VALID_TEST_ID") != x ]; then
725                                 seq=$full_seq
726                         fi
727                 fi
728
729                 # the filename for the test and the name output are different.
730                 # we don't include the tests/ directory in the name output.
731                 export seqnum=`echo $seq | sed -e "s;$SRC_DIR/;;"`
732
733                 # Similarly, the result directory needs to replace the tests/
734                 # part of the test location.
735                 group=`dirname $seq`
736                 if $OPTIONS_HAVE_SECTIONS; then
737                         export RESULT_DIR=`echo $group | sed -e "s;$SRC_DIR;${RESULT_BASE}/$section;"`
738                         REPORT_DIR="$RESULT_BASE/$section"
739                 else
740                         export RESULT_DIR=`echo $group | sed -e "s;$SRC_DIR;$RESULT_BASE;"`
741                         REPORT_DIR="$RESULT_BASE"
742                 fi
743                 seqres="$REPORT_DIR/$seqnum"
744
745                 mkdir -p $RESULT_DIR
746                 rm -f ${RESULT_DIR}/require_scratch*
747                 rm -f ${RESULT_DIR}/require_test*
748                 echo -n "$seqnum"
749
750                 if $showme; then
751                         _expunge_test $seqnum
752                         if [ $? -eq 1 ]; then
753                             tc_status="expunge"
754                             continue
755                         fi
756                         echo
757                         start=0
758                         stop=0
759                         tc_status="list"
760                         n_notrun=`expr $n_notrun + 1`
761                         continue
762                 fi
763
764                 tc_status="pass"
765                 if [ ! -f $seq ]; then
766                         echo " - no such test?"
767                         continue
768                 fi
769
770                 # really going to try and run this one
771                 rm -f $seqres.out.bad
772
773                 # check if we really should run it
774                 _expunge_test $seqnum
775                 if [ $? -eq 1 ]; then
776                         tc_status="expunge"
777                         continue
778                 fi
779
780                 # record that we really tried to run this test.
781                 try="$try $seqnum"
782                 n_try=`expr $n_try + 1`
783
784                 # slashes now in names, sed barfs on them so use grep
785                 lasttime=`grep -w ^$seqnum $check.time | awk '// {print $2}'`
786                 if [ "X$lasttime" != X ]; then
787                         echo -n " ${lasttime}s ... "
788                 else
789                         echo -n "       " # prettier output with timestamps.
790                 fi
791                 rm -f core $seqres.notrun
792
793                 start=`_wallclock`
794                 $timestamp && echo -n " ["`date "+%T"`"]"
795                 [ ! -x $seq ] && chmod u+x $seq # ensure we can run it
796                 $LOGGER_PROG "run xfstest $seqnum"
797                 if [ -w /dev/kmsg ]; then
798                         export date_time=`date +"%F %T"`
799                         echo "run fstests $seqnum at $date_time" > /dev/kmsg
800                         # _check_dmesg depends on this log in dmesg
801                         touch ${RESULT_DIR}/check_dmesg
802                 fi
803                 _try_wipe_scratch_devs > /dev/null 2>&1
804
805                 # clear the WARN_ONCE state to allow a potential problem
806                 # to be reported for each test
807                 (echo 1 > $DEBUGFS_MNT/clear_warn_once) > /dev/null 2>&1
808
809                 if [ "$DUMP_OUTPUT" = true ]; then
810                         _run_seq 2>&1 | tee $tmp.out
811                         # Because $? would get tee's return code
812                         sts=${PIPESTATUS[0]}
813                 else
814                         _run_seq >$tmp.out 2>&1
815                         sts=$?
816                 fi
817
818                 if [ -f core ]; then
819                         _dump_err_cont "[dumped core]"
820                         mv core $RESULT_BASE/$seqnum.core
821                         err=true
822                 fi
823
824                 if [ -f $seqres.notrun ]; then
825                         $timestamp && _timestamp
826                         stop=`_wallclock`
827                         $timestamp || echo -n "[not run] "
828                         $timestamp && echo " [not run]" && \
829                                       echo -n " $seqnum -- "
830                         cat $seqres.notrun
831                         notrun="$notrun $seqnum"
832                         n_notrun=`expr $n_notrun + 1`
833                         tc_status="notrun"
834                         continue;
835                 fi
836
837                 if [ $sts -ne 0 ]; then
838                         _dump_err_cont "[failed, exit status $sts]"
839                         _test_unmount 2> /dev/null
840                         _scratch_unmount 2> /dev/null
841                         rm -f ${RESULT_DIR}/require_test*
842                         rm -f ${RESULT_DIR}/require_scratch*
843                         err=true
844                 else
845                         # the test apparently passed, so check for corruption
846                         # and log messages that shouldn't be there.
847                         _check_filesystems
848                         _check_dmesg || err=true
849                 fi
850
851                 # Reload the module after each test to check for leaks or
852                 # other problems.
853                 if [ -n "${TEST_FS_MODULE_RELOAD}" ]; then
854                         _test_unmount 2> /dev/null
855                         _scratch_unmount 2> /dev/null
856                         modprobe -r fs-$FSTYP
857                         modprobe fs-$FSTYP
858                 fi
859
860                 # Scan for memory leaks after every test so that associating
861                 # a leak to a particular test will be as accurate as possible.
862                 _check_kmemleak || err=true
863
864                 # test ends after all checks are done.
865                 $timestamp && _timestamp
866                 stop=`_wallclock`
867
868                 if [ ! -f $seq.out ]; then
869                         _dump_err "no qualified output"
870                         err=true
871                         continue;
872                 fi
873
874                 # coreutils 8.16+ changed quote formats in error messages
875                 # from `foo' to 'foo'. Filter old versions to match the new
876                 # version.
877                 sed -i "s/\`/\'/g" $tmp.out
878                 if diff $seq.out $tmp.out >/dev/null 2>&1 ; then
879                         if ! $err ; then
880                                 echo "$seqnum `expr $stop - $start`" >>$tmp.time
881                                 echo -n " `expr $stop - $start`s"
882                         fi
883                         echo ""
884                 else
885                         _dump_err "- output mismatch (see $seqres.out.bad)"
886                         mv $tmp.out $seqres.out.bad
887                         $diff $seq.out $seqres.out.bad | {
888                         if test "$DIFF_LENGTH" -le 0; then
889                                 cat
890                         else
891                                 head -n "$DIFF_LENGTH"
892                                 echo "..."
893                                 echo "(Run '$diff $here/$seq.out $seqres.out.bad'" \
894                                         " to see the entire diff)"
895                         fi; } | sed -e 's/^\(.\)/    \1/'
896                         err=true
897                 fi
898         done
899
900         # make sure we record the status of the last test we ran.
901         if $err ; then
902                 bad="$bad $seqnum"
903                 n_bad=`expr $n_bad + 1`
904                 tc_status="fail"
905         fi
906         if $do_report && ! $first_test ; then
907                 if [ $tc_status != "expunge" ] ; then
908                         _make_testcase_report "$prev_seq" "$tc_status"
909                 fi
910         fi
911
912         sect_stop=`_wallclock`
913         interrupt=false
914         _wrapup
915         interrupt=true
916         echo
917
918         _test_unmount 2> /dev/null
919         _scratch_unmount 2> /dev/null
920 }
921
922 for ((iters = 0; iters < $iterations; iters++)) do
923         for section in $HOST_OPTIONS_SECTIONS; do
924                 run_section $section
925         done
926 done
927
928 interrupt=false
929 status=`expr $sum_bad != 0`
930 exit