Document toke.c.
[p5sagit/p5-mst-13.2.git] / lib / Test / Harness.pm
CommitLineData
a0d0e21e 1package Test::Harness;
2
b876d4a6 3BEGIN {require 5.002;}
a0d0e21e 4use Exporter;
5use Benchmark;
4633a7c4 6use Config;
6c31b336 7use FileHandle;
760ac839 8use strict;
9
10use vars qw($VERSION $verbose $switches $have_devel_corestack $curtest
11 @ISA @EXPORT @EXPORT_OK);
c0ee6f5c 12$have_devel_corestack = 0;
4633a7c4 13
0d0c0d42 14$VERSION = "1.1604";
9b0ceca9 15
f19ae7a7 16$ENV{HARNESS_ACTIVE} = 1;
17
9b0ceca9 18# Some experimental versions of OS/2 build have broken $?
19my $ignore_exitcode = $ENV{HARNESS_IGNORE_EXITCODE};
20
17a79f5b 21my $files_in_dir = $ENV{HARNESS_FILELEAK_IN_DIR};
22
9b0ceca9 23my $tests_skipped = 0;
24my $subtests_skipped = 0;
4633a7c4 25
c07a80fd 26@ISA=('Exporter');
cb1a09d0 27@EXPORT= qw(&runtests);
a0d0e21e 28@EXPORT_OK= qw($verbose $switches);
29
760ac839 30format STDOUT_TOP =
31Failed Test Status Wstat Total Fail Failed List of failed
fb73857a 32-------------------------------------------------------------------------------
760ac839 33.
34
35format STDOUT =
fb73857a 36@<<<<<<<<<<<<<< @>> @>>>> @>>>> @>>> ^##.##% ^<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
760ac839 37{ $curtest->{name},
38 $curtest->{estat},
39 $curtest->{wstat},
40 $curtest->{max},
41 $curtest->{failed},
42 $curtest->{percent},
43 $curtest->{canon}
44}
fb73857a 45~~ ^<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
46 $curtest->{canon}
760ac839 47.
48
c07a80fd 49
6c31b336 50$verbose = 0;
51$switches = "-w";
a0d0e21e 52
17a79f5b 53sub globdir { opendir DIRH, shift; my @f = readdir DIRH; closedir DIRH; @f }
54
a0d0e21e 55sub runtests {
56 my(@tests) = @_;
57 local($|) = 1;
7b13a3f5 58 my($test,$te,$ok,$next,$max,$pct,$totok,$totbonus,@failed,%failedtests);
6c31b336 59 my $totmax = 0;
60 my $files = 0;
a0d0e21e 61 my $bad = 0;
62 my $good = 0;
63 my $total = @tests;
774d564b 64
65 # pass -I flags to children
81ff29e3 66 my $old5lib = $ENV{PERL5LIB};
774d564b 67 local($ENV{'PERL5LIB'}) = join($Config{path_sep}, @INC);
68
a5077310 69 if ($^O eq 'VMS') { $switches =~ s/-(\S*[A-Z]\S*)/"-$1"/g }
a0d0e21e 70
17a79f5b 71 my @dir_files = globdir $files_in_dir if defined $files_in_dir;
a0d0e21e 72 my $t_start = new Benchmark;
73 while ($test = shift(@tests)) {
c07a80fd 74 $te = $test;
75 chop($te);
68dc0745 76 if ($^O eq 'VMS') { $te =~ s/^.*\.t\./[.t./; }
45c0de28 77 my $blank = (' ' x 77);
0d0c0d42 78 my $leader = "$te" . '.' x (20 - length($te));
79 my $ml = "";
23c4718a 80 $ml = "\r$blank\r$leader"
81 if -t STDOUT and not $ENV{HARNESS_NOTTY} and not $verbose;
0d0c0d42 82 print $leader;
6c31b336 83 my $fh = new FileHandle;
aa689395 84 $fh->open($test) or print "can't open $test. $!\n";
85 my $first = <$fh>;
86 my $s = $switches;
da4582cd 87 $s .= join " ", q[ "-T"], map {qq["-I$_"]} @INC
88 if $first =~ /^#!.*\bperl.*-\w*T/;
aa689395 89 $fh->close or print "can't close $test. $!\n";
52cebf5e 90 my $cmd = ($ENV{'COMPILE_TEST'})?
a6f4eb0a 91"./perl -I../lib ../utils/perlcc $test -run 2>> ./compilelog |"
52cebf5e 92 : "$^X $s $test|";
a5077310 93 $cmd = "MCR $cmd" if $^O eq 'VMS';
aa689395 94 $fh->open($cmd) or print "can't run $test. $!\n";
c07a80fd 95 $ok = $next = $max = 0;
96 @failed = ();
7b13a3f5 97 my %todo = ();
98 my $bonus = 0;
fac76ed7 99 my $skipped = 0;
c854dc73 100 my $skip_reason;
c07a80fd 101 while (<$fh>) {
6c31b336 102 if( $verbose ){
c07a80fd 103 print $_;
104 }
7b13a3f5 105 if (/^1\.\.([0-9]+) todo([\d\s]+)\;/) {
106 $max = $1;
107 for (split(/\s+/, $2)) { $todo{$_} = 1; }
108 $totmax += $max;
109 $files++;
110 $next = 1;
45c0de28 111 } elsif (/^1\.\.([0-9]+)(\s*\#\s*[Ss]kip\S*(?>\s+)(.+))?/) {
c0ee6f5c 112 $max = $1;
113 $totmax += $max;
114 $files++;
115 $next = 1;
45c0de28 116 $skip_reason = $3 if not $max and defined $3;
c0ee6f5c 117 } elsif ($max && /^(not\s+)?ok\b/) {
118 my $this = $next;
119 if (/^not ok\s*(\d*)/){
120 $this = $1 if $1 > 0;
23c4718a 121 print "${ml}NOK $this" if $ml;
7b13a3f5 122 if (!$todo{$this}) {
123 push @failed, $this;
124 } else {
125 $ok++;
126 $totok++;
127 }
c854dc73 128 } elsif (/^ok\s*(\d*)(\s*\#\s*[Ss]kip\S*(?:(?>\s+)(.+))?)?/) {
c0ee6f5c 129 $this = $1 if $1 > 0;
45c0de28 130 print "${ml}ok $this/$max" if $ml;
c0ee6f5c 131 $ok++;
132 $totok++;
fac76ed7 133 $skipped++ if defined $2;
c854dc73 134 my $reason;
135 $reason = 'unknown reason' if defined $2;
136 $reason = $3 if defined $3;
137 if (defined $reason and defined $skip_reason) {
138 # print "was: '$skip_reason' new '$reason'\n";
139 $skip_reason = 'various reasons'
140 if $skip_reason ne $reason;
141 } elsif (defined $reason) {
142 $skip_reason = $reason;
143 }
7b13a3f5 144 $bonus++, $totbonus++ if $todo{$this};
c07a80fd 145 }
c0ee6f5c 146 if ($this > $next) {
147 # warn "Test output counter mismatch [test $this]\n";
148 # no need to warn probably
149 push @failed, $next..$this-1;
150 } elsif ($this < $next) {
151 #we have seen more "ok" lines than the number suggests
152 warn "Confused test output: test $this answered after test ", $next-1, "\n";
153 $next = $this;
154 }
155 $next = $this + 1;
c07a80fd 156 }
157 }
6c31b336 158 $fh->close; # must close to reap child resource values
9b0ceca9 159 my $wstatus = $ignore_exitcode ? 0 : $?; # Can trust $? ?
395b061e 160 my $estatus;
161 $estatus = ($^O eq 'VMS'
68dc0745 162 ? eval 'use vmsish "status"; $estatus = $?'
163 : $wstatus >> 8);
164 if ($wstatus) {
aa689395 165 my ($failed, $canon, $percent) = ('??', '??');
0d0c0d42 166 printf "${ml}dubious\n\tTest returned status $estatus (wstat %d, 0x%x)\n",
fb73857a 167 $wstatus,$wstatus;
68dc0745 168 print "\t\t(VMS status is $estatus)\n" if $^O eq 'VMS';
c0ee6f5c 169 if (corestatus($wstatus)) { # until we have a wait module
170 if ($have_devel_corestack) {
171 Devel::CoreStack::stack($^X);
172 } else {
173 print "\ttest program seems to have generated a core\n";
174 }
175 }
176 $bad++;
aa689395 177 if ($max) {
178 if ($next == $max + 1 and not @failed) {
179 print "\tafter all the subtests completed successfully\n";
180 $percent = 0;
181 $failed = 0; # But we do not set $canon!
182 } else {
183 push @failed, $next..$max;
184 $failed = @failed;
89d3b7e2 185 (my $txt, $canon) = canonfailed($max,$skipped,@failed);
aa689395 186 $percent = 100*(scalar @failed)/$max;
187 print "DIED. ",$txt;
188 }
189 }
190 $failedtests{$test} = { canon => $canon, max => $max || '??',
191 failed => $failed,
192 name => $test, percent => $percent,
760ac839 193 estat => $estatus, wstat => $wstatus,
194 };
c0ee6f5c 195 } elsif ($ok == $max && $next == $max+1) {
7b13a3f5 196 if ($max and $skipped + $bonus) {
197 my @msg;
45c0de28 198 push(@msg, "$skipped/$max skipped: $skip_reason")
7b13a3f5 199 if $skipped;
45c0de28 200 push(@msg, "$bonus/$max unexpectedly succeeded")
7b13a3f5 201 if $bonus;
45c0de28 202 print "${ml}ok, ".join(', ', @msg)."\n";
fac76ed7 203 } elsif ($max) {
45c0de28 204 print "${ml}ok\n";
205 } elsif (defined $skip_reason) {
206 print "skipped: $skip_reason\n";
207 $tests_skipped++;
c0ee6f5c 208 } else {
45c0de28 209 print "skipped test on this platform\n";
9b0ceca9 210 $tests_skipped++;
c0ee6f5c 211 }
c07a80fd 212 $good++;
6c31b336 213 } elsif ($max) {
214 if ($next <= $max) {
215 push @failed, $next..$max;
216 }
c07a80fd 217 if (@failed) {
89d3b7e2 218 my ($txt, $canon) = canonfailed($max,$skipped,@failed);
760ac839 219 print $txt;
220 $failedtests{$test} = { canon => $canon, max => $max,
221 failed => scalar @failed,
222 name => $test, percent => 100*(scalar @failed)/$max,
223 estat => '', wstat => '',
224 };
c07a80fd 225 } else {
c0ee6f5c 226 print "Don't know which tests failed: got $ok ok, expected $max\n";
760ac839 227 $failedtests{$test} = { canon => '??', max => $max,
228 failed => '??',
229 name => $test, percent => undef,
230 estat => '', wstat => '',
231 };
c07a80fd 232 }
233 $bad++;
6c31b336 234 } elsif ($next == 0) {
235 print "FAILED before any test output arrived\n";
236 $bad++;
760ac839 237 $failedtests{$test} = { canon => '??', max => '??',
238 failed => '??',
239 name => $test, percent => undef,
240 estat => '', wstat => '',
241 };
6c31b336 242 }
9b0ceca9 243 $subtests_skipped += $skipped;
17a79f5b 244 if (defined $files_in_dir) {
245 my @new_dir_files = globdir $files_in_dir;
246 if (@new_dir_files != @dir_files) {
247 my %f;
248 @f{@new_dir_files} = (1) x @new_dir_files;
249 delete @f{@dir_files};
250 my @f = sort keys %f;
251 print "LEAKED FILES: @f\n";
252 @dir_files = @new_dir_files;
253 }
254 }
a0d0e21e 255 }
256 my $t_total = timediff(new Benchmark, $t_start);
c07a80fd 257
774d564b 258 if ($^O eq 'VMS') {
259 if (defined $old5lib) {
260 $ENV{PERL5LIB} = $old5lib;
b876d4a6 261 } else {
774d564b 262 delete $ENV{PERL5LIB};
263 }
264 }
7b13a3f5 265 my $bonusmsg = '';
266 $bonusmsg = (" ($totbonus subtest".($totbonus>1?'s':'').
267 " UNEXPECTEDLY SUCCEEDED)")
268 if $totbonus;
9b0ceca9 269 if ($tests_skipped) {
c4f6c246 270 $bonusmsg .= ", $tests_skipped test" . ($tests_skipped != 1 ? 's' : '');
271 if ($subtests_skipped) {
272 $bonusmsg .= " and $subtests_skipped subtest"
273 . ($subtests_skipped != 1 ? 's' : '');
274 }
275 $bonusmsg .= ' skipped';
9b0ceca9 276 }
c4f6c246 277 elsif ($subtests_skipped) {
278 $bonusmsg .= ", $subtests_skipped subtest"
279 . ($subtests_skipped != 1 ? 's' : '')
280 . " skipped";
9b0ceca9 281 }
6c31b336 282 if ($bad == 0 && $totmax) {
7b13a3f5 283 print "All tests successful$bonusmsg.\n";
6c31b336 284 } elsif ($total==0){
285 die "FAILED--no tests were run for some reason.\n";
286 } elsif ($totmax==0) {
287 my $blurb = $total==1 ? "script" : "scripts";
c0ee6f5c 288 die "FAILED--$total test $blurb could be run, alas--no output ever seen\n";
c07a80fd 289 } else {
290 $pct = sprintf("%.2f", $good / $total * 100);
6c31b336 291 my $subpct = sprintf " %d/%d subtests failed, %.2f%% okay.",
292 $totmax - $totok, $totmax, 100*$totok/$totmax;
760ac839 293 my $script;
294 for $script (sort keys %failedtests) {
295 $curtest = $failedtests{$script};
296 write;
297 }
b876d4a6 298 if ($bad) {
9b0ceca9 299 $bonusmsg =~ s/^,\s*//;
300 print "$bonusmsg.\n" if $bonusmsg;
6c31b336 301 die "Failed $bad/$total test scripts, $pct% okay.$subpct\n";
c07a80fd 302 }
303 }
5603f27d 304 printf("Files=%d, Tests=%d, %s\n", $files, $totmax, timestr($t_total, 'nop'));
f0a9308e 305
306 return ($bad == 0 && $totmax) ;
c07a80fd 307}
308
aa689395 309my $tried_devel_corestack;
c0ee6f5c 310sub corestatus {
311 my($st) = @_;
312 my($ret);
313
314 eval {require 'wait.ph'};
315 if ($@) {
316 SWITCH: {
317 $ret = ($st & 0200); # Tim says, this is for 90%
318 }
319 } else {
320 $ret = WCOREDUMP($st);
321 }
322
aa689395 323 eval { require Devel::CoreStack; $have_devel_corestack++ }
324 unless $tried_devel_corestack++;
c0ee6f5c 325
326 $ret;
327}
328
c07a80fd 329sub canonfailed ($@) {
89d3b7e2 330 my($max,$skipped,@failed) = @_;
6c31b336 331 my %seen;
332 @failed = sort {$a <=> $b} grep !$seen{$_}++, @failed;
c07a80fd 333 my $failed = @failed;
334 my @result = ();
335 my @canon = ();
336 my $min;
337 my $last = $min = shift @failed;
760ac839 338 my $canon;
c07a80fd 339 if (@failed) {
340 for (@failed, $failed[-1]) { # don't forget the last one
341 if ($_ > $last+1 || $_ == $last) {
342 if ($min == $last) {
343 push @canon, $last;
344 } else {
345 push @canon, "$min-$last";
346 }
347 $min = $_;
348 }
349 $last = $_;
350 }
351 local $" = ", ";
352 push @result, "FAILED tests @canon\n";
760ac839 353 $canon = "@canon";
a0d0e21e 354 } else {
c07a80fd 355 push @result, "FAILED test $last\n";
760ac839 356 $canon = $last;
a0d0e21e 357 }
c07a80fd 358
359 push @result, "\tFailed $failed/$max tests, ";
89d3b7e2 360 push @result, sprintf("%.2f",100*(1-$failed/$max)), "% okay";
361 my $ender = 's' x ($skipped > 1);
362 my $good = $max - $failed - $skipped;
363 my $goodper = sprintf("%.2f",100*($good/$max));
364 push @result, " (-$skipped skipped test$ender: $good okay, $goodper%)" if $skipped;
365 push @result, "\n";
760ac839 366 my $txt = join "", @result;
367 ($txt, $canon);
a0d0e21e 368}
369
3701;
cb1a09d0 371__END__
372
373=head1 NAME
374
375Test::Harness - run perl standard test scripts with statistics
376
377=head1 SYNOPSIS
378
379use Test::Harness;
380
381runtests(@tests);
382
383=head1 DESCRIPTION
384
7b13a3f5 385(By using the L<Test> module, you can write test scripts without
386knowing the exact output this module expects. However, if you need to
387know the specifics, read on!)
388
cb1a09d0 389Perl test scripts print to standard output C<"ok N"> for each single
390test, where C<N> is an increasing sequence of integers. The first line
c0ee6f5c 391output by a standard test script is C<"1..M"> with C<M> being the
cb1a09d0 392number of tests that should be run within the test
c0ee6f5c 393script. Test::Harness::runtests(@tests) runs all the testscripts
cb1a09d0 394named as arguments and checks standard output for the expected
395C<"ok N"> strings.
396
c0ee6f5c 397After all tests have been performed, runtests() prints some
cb1a09d0 398performance statistics that are computed by the Benchmark module.
399
6c31b336 400=head2 The test script output
401
402Any output from the testscript to standard error is ignored and
403bypassed, thus will be seen by the user. Lines written to standard
c0ee6f5c 404output containing C</^(not\s+)?ok\b/> are interpreted as feedback for
405runtests(). All other lines are discarded.
6c31b336 406
407It is tolerated if the test numbers after C<ok> are omitted. In this
408case Test::Harness maintains temporarily its own counter until the
409script supplies test numbers again. So the following test script
410
411 print <<END;
412 1..6
413 not ok
414 ok
415 not ok
416 ok
417 ok
418 END
419
420will generate
421
422 FAILED tests 1, 3, 6
423 Failed 3/6 tests, 50.00% okay
424
425The global variable $Test::Harness::verbose is exportable and can be
c0ee6f5c 426used to let runtests() display the standard output of the script
6c31b336 427without altering the behavior otherwise.
428
fb73857a 429The global variable $Test::Harness::switches is exportable and can be
430used to set perl command line options used for running the test
431script(s). The default value is C<-w>.
432
fac76ed7 433If the standard output line contains substring C< # Skip> (with
434variations in spacing and case) after C<ok> or C<ok NUMBER>, it is
435counted as a skipped test. If the whole testscript succeeds, the
436count of skipped tests is included in the generated output.
437
45c0de28 438C<Test::Harness> reports the text after C< # Skip(whatever)> as a
439reason for skipping. Similarly, one can include a similar explanation
440in a C<1..0> line emitted if the test is skipped completely:
441
442 1..0 # Skipped: no leverage found
443
cb1a09d0 444=head1 EXPORT
445
c0ee6f5c 446C<&runtests> is exported by Test::Harness per default.
cb1a09d0 447
448=head1 DIAGNOSTICS
449
450=over 4
451
452=item C<All tests successful.\nFiles=%d, Tests=%d, %s>
453
454If all tests are successful some statistics about the performance are
455printed.
456
6c31b336 457=item C<FAILED tests %s\n\tFailed %d/%d tests, %.2f%% okay.>
458
459For any single script that has failing subtests statistics like the
460above are printed.
461
462=item C<Test returned status %d (wstat %d)>
463
81ff29e3 464Scripts that return a non-zero exit status, both C<$? E<gt>E<gt> 8> and C<$?> are
6c31b336 465printed in a message similar to the above.
466
467=item C<Failed 1 test, %.2f%% okay. %s>
cb1a09d0 468
6c31b336 469=item C<Failed %d/%d tests, %.2f%% okay. %s>
cb1a09d0 470
471If not all tests were successful, the script dies with one of the
472above messages.
473
474=back
475
9b0ceca9 476=head1 ENVIRONMENT
477
17a79f5b 478Setting C<HARNESS_IGNORE_EXITCODE> makes harness ignore the exit status
9b0ceca9 479of child processes.
480
0d0c0d42 481Setting C<HARNESS_NOTTY> to a true value forces it to behave as though
482STDOUT were not a console. You may need to set this if you don't want
483harness to output more frequent progress messages using carriage returns.
484Some consoles may not handle carriage returns properly (which results
485in a somewhat messy output).
486
17a79f5b 487If C<HARNESS_FILELEAK_IN_DIR> is set to the name of a directory, harness
488will check after each test whether new files appeared in that directory,
489and report them as
490
491 LEAKED FILES: scr.tmp 0 my.db
492
493If relative, directory name is with respect to the current directory at
494the moment runtests() was called. Putting absolute path into
495C<HARNESS_FILELEAK_IN_DIR> may give more predicatable results.
496
f19ae7a7 497Harness sets C<HARNESS_ACTIVE> before executing the individual tests.
498This allows the tests to determine if they are being executed through the
499harness or by any other means.
500
cb1a09d0 501=head1 SEE ALSO
502
7b13a3f5 503L<Test> for writing test scripts and also L<Benchmark> for the
504underlying timing routines.
c07a80fd 505
506=head1 AUTHORS
507
508Either Tim Bunce or Andreas Koenig, we don't know. What we know for
509sure is, that it was inspired by Larry Wall's TEST script that came
b876d4a6 510with perl distributions for ages. Numerous anonymous contributors
511exist. Current maintainer is Andreas Koenig.
cb1a09d0 512
513=head1 BUGS
514
515Test::Harness uses $^X to determine the perl binary to run the tests
6c31b336 516with. Test scripts running via the shebang (C<#!>) line may not be
517portable because $^X is not consistent for shebang scripts across
cb1a09d0 518platforms. This is no problem when Test::Harness is run with an
6c31b336 519absolute path to the perl binary or when $^X can be found in the path.
cb1a09d0 520
521=cut