YA sync with mainline
[p5sagit/p5-mst-13.2.git] / lib / Test / Harness.pm
CommitLineData
a0d0e21e 1package Test::Harness;
2
cb50131a 3use 5.005_64;
a0d0e21e 4use Exporter;
5use Benchmark;
4633a7c4 6use Config;
6c31b336 7use FileHandle;
760ac839 8use strict;
9
cb50131a 10our($VERSION, $verbose, $switches, $have_devel_corestack, $curtest,
11 @ISA, @EXPORT, @EXPORT_OK);
c0ee6f5c 12$have_devel_corestack = 0;
4633a7c4 13
0d0c0d42 14$VERSION = "1.1604";
9b0ceca9 15
f19ae7a7 16$ENV{HARNESS_ACTIVE} = 1;
17
9b0ceca9 18# Some experimental versions of OS/2 build have broken $?
19my $ignore_exitcode = $ENV{HARNESS_IGNORE_EXITCODE};
20
17a79f5b 21my $files_in_dir = $ENV{HARNESS_FILELEAK_IN_DIR};
22
9b0ceca9 23my $tests_skipped = 0;
24my $subtests_skipped = 0;
4633a7c4 25
c07a80fd 26@ISA=('Exporter');
cb1a09d0 27@EXPORT= qw(&runtests);
a0d0e21e 28@EXPORT_OK= qw($verbose $switches);
29
760ac839 30format STDOUT_TOP =
31Failed Test Status Wstat Total Fail Failed List of failed
fb73857a 32-------------------------------------------------------------------------------
760ac839 33.
34
35format STDOUT =
fb73857a 36@<<<<<<<<<<<<<< @>> @>>>> @>>>> @>>> ^##.##% ^<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
760ac839 37{ $curtest->{name},
38 $curtest->{estat},
39 $curtest->{wstat},
40 $curtest->{max},
41 $curtest->{failed},
42 $curtest->{percent},
43 $curtest->{canon}
44}
fb73857a 45~~ ^<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
46 $curtest->{canon}
760ac839 47.
48
c07a80fd 49
6c31b336 50$verbose = 0;
51$switches = "-w";
a0d0e21e 52
17a79f5b 53sub globdir { opendir DIRH, shift; my @f = readdir DIRH; closedir DIRH; @f }
54
a0d0e21e 55sub runtests {
56 my(@tests) = @_;
57 local($|) = 1;
7b13a3f5 58 my($test,$te,$ok,$next,$max,$pct,$totok,$totbonus,@failed,%failedtests);
6c31b336 59 my $totmax = 0;
60 my $files = 0;
a0d0e21e 61 my $bad = 0;
62 my $good = 0;
63 my $total = @tests;
774d564b 64
65 # pass -I flags to children
81ff29e3 66 my $old5lib = $ENV{PERL5LIB};
774d564b 67
fca4e211 68 # VMS has a 255-byte limit on the length of %ENV entries, so
69 # toss the ones that involve perl_root, the install location
70 # for VMS
1250aba5 71 my $new5lib;
72 if ($^O eq 'VMS') {
73 $new5lib = join($Config{path_sep}, grep {!/perl_root/i;} @INC);
74 $switches =~ s/-(\S*[A-Z]\S*)/"-$1"/g;
75 }
76 else {
77 $new5lib = join($Config{path_sep}, @INC);
78 }
1250aba5 79 local($ENV{'PERL5LIB'}) = $new5lib;
a0d0e21e 80
17a79f5b 81 my @dir_files = globdir $files_in_dir if defined $files_in_dir;
a0d0e21e 82 my $t_start = new Benchmark;
83 while ($test = shift(@tests)) {
c07a80fd 84 $te = $test;
85 chop($te);
68dc0745 86 if ($^O eq 'VMS') { $te =~ s/^.*\.t\./[.t./; }
45c0de28 87 my $blank = (' ' x 77);
0d0c0d42 88 my $leader = "$te" . '.' x (20 - length($te));
89 my $ml = "";
23c4718a 90 $ml = "\r$blank\r$leader"
91 if -t STDOUT and not $ENV{HARNESS_NOTTY} and not $verbose;
0d0c0d42 92 print $leader;
6c31b336 93 my $fh = new FileHandle;
aa689395 94 $fh->open($test) or print "can't open $test. $!\n";
95 my $first = <$fh>;
96 my $s = $switches;
da4582cd 97 $s .= join " ", q[ "-T"], map {qq["-I$_"]} @INC
98 if $first =~ /^#!.*\bperl.*-\w*T/;
aa689395 99 $fh->close or print "can't close $test. $!\n";
c529f79d 100 my $cmd = ($ENV{'HARNESS_COMPILE_TEST'})
101 ? "./perl -I../lib ../utils/perlcc $test "
102 . "-run 2>> ./compilelog |"
103 : "$^X $s $test|";
a5077310 104 $cmd = "MCR $cmd" if $^O eq 'VMS';
aa689395 105 $fh->open($cmd) or print "can't run $test. $!\n";
c07a80fd 106 $ok = $next = $max = 0;
107 @failed = ();
7b13a3f5 108 my %todo = ();
109 my $bonus = 0;
fac76ed7 110 my $skipped = 0;
c854dc73 111 my $skip_reason;
c07a80fd 112 while (<$fh>) {
6c31b336 113 if( $verbose ){
c07a80fd 114 print $_;
115 }
7b13a3f5 116 if (/^1\.\.([0-9]+) todo([\d\s]+)\;/) {
117 $max = $1;
118 for (split(/\s+/, $2)) { $todo{$_} = 1; }
119 $totmax += $max;
120 $files++;
121 $next = 1;
45c0de28 122 } elsif (/^1\.\.([0-9]+)(\s*\#\s*[Ss]kip\S*(?>\s+)(.+))?/) {
c0ee6f5c 123 $max = $1;
124 $totmax += $max;
125 $files++;
126 $next = 1;
45c0de28 127 $skip_reason = $3 if not $max and defined $3;
c0ee6f5c 128 } elsif ($max && /^(not\s+)?ok\b/) {
129 my $this = $next;
130 if (/^not ok\s*(\d*)/){
131 $this = $1 if $1 > 0;
23c4718a 132 print "${ml}NOK $this" if $ml;
7b13a3f5 133 if (!$todo{$this}) {
134 push @failed, $this;
135 } else {
136 $ok++;
137 $totok++;
138 }
c854dc73 139 } elsif (/^ok\s*(\d*)(\s*\#\s*[Ss]kip\S*(?:(?>\s+)(.+))?)?/) {
c0ee6f5c 140 $this = $1 if $1 > 0;
45c0de28 141 print "${ml}ok $this/$max" if $ml;
c0ee6f5c 142 $ok++;
143 $totok++;
fac76ed7 144 $skipped++ if defined $2;
c854dc73 145 my $reason;
146 $reason = 'unknown reason' if defined $2;
147 $reason = $3 if defined $3;
148 if (defined $reason and defined $skip_reason) {
149 # print "was: '$skip_reason' new '$reason'\n";
150 $skip_reason = 'various reasons'
151 if $skip_reason ne $reason;
152 } elsif (defined $reason) {
153 $skip_reason = $reason;
154 }
7b13a3f5 155 $bonus++, $totbonus++ if $todo{$this};
c07a80fd 156 }
c0ee6f5c 157 if ($this > $next) {
158 # warn "Test output counter mismatch [test $this]\n";
159 # no need to warn probably
160 push @failed, $next..$this-1;
161 } elsif ($this < $next) {
162 #we have seen more "ok" lines than the number suggests
163 warn "Confused test output: test $this answered after test ", $next-1, "\n";
164 $next = $this;
165 }
166 $next = $this + 1;
c07a80fd 167 }
168 }
6c31b336 169 $fh->close; # must close to reap child resource values
9b0ceca9 170 my $wstatus = $ignore_exitcode ? 0 : $?; # Can trust $? ?
395b061e 171 my $estatus;
172 $estatus = ($^O eq 'VMS'
68dc0745 173 ? eval 'use vmsish "status"; $estatus = $?'
174 : $wstatus >> 8);
175 if ($wstatus) {
aa689395 176 my ($failed, $canon, $percent) = ('??', '??');
0d0c0d42 177 printf "${ml}dubious\n\tTest returned status $estatus (wstat %d, 0x%x)\n",
fb73857a 178 $wstatus,$wstatus;
68dc0745 179 print "\t\t(VMS status is $estatus)\n" if $^O eq 'VMS';
c0ee6f5c 180 if (corestatus($wstatus)) { # until we have a wait module
181 if ($have_devel_corestack) {
182 Devel::CoreStack::stack($^X);
183 } else {
184 print "\ttest program seems to have generated a core\n";
185 }
186 }
187 $bad++;
aa689395 188 if ($max) {
189 if ($next == $max + 1 and not @failed) {
190 print "\tafter all the subtests completed successfully\n";
191 $percent = 0;
192 $failed = 0; # But we do not set $canon!
193 } else {
194 push @failed, $next..$max;
195 $failed = @failed;
89d3b7e2 196 (my $txt, $canon) = canonfailed($max,$skipped,@failed);
aa689395 197 $percent = 100*(scalar @failed)/$max;
198 print "DIED. ",$txt;
199 }
200 }
201 $failedtests{$test} = { canon => $canon, max => $max || '??',
202 failed => $failed,
203 name => $test, percent => $percent,
760ac839 204 estat => $estatus, wstat => $wstatus,
205 };
c0ee6f5c 206 } elsif ($ok == $max && $next == $max+1) {
7b13a3f5 207 if ($max and $skipped + $bonus) {
208 my @msg;
45c0de28 209 push(@msg, "$skipped/$max skipped: $skip_reason")
7b13a3f5 210 if $skipped;
45c0de28 211 push(@msg, "$bonus/$max unexpectedly succeeded")
7b13a3f5 212 if $bonus;
45c0de28 213 print "${ml}ok, ".join(', ', @msg)."\n";
fac76ed7 214 } elsif ($max) {
45c0de28 215 print "${ml}ok\n";
216 } elsif (defined $skip_reason) {
217 print "skipped: $skip_reason\n";
218 $tests_skipped++;
c0ee6f5c 219 } else {
45c0de28 220 print "skipped test on this platform\n";
9b0ceca9 221 $tests_skipped++;
c0ee6f5c 222 }
c07a80fd 223 $good++;
6c31b336 224 } elsif ($max) {
225 if ($next <= $max) {
226 push @failed, $next..$max;
227 }
c07a80fd 228 if (@failed) {
89d3b7e2 229 my ($txt, $canon) = canonfailed($max,$skipped,@failed);
760ac839 230 print $txt;
231 $failedtests{$test} = { canon => $canon, max => $max,
232 failed => scalar @failed,
233 name => $test, percent => 100*(scalar @failed)/$max,
234 estat => '', wstat => '',
235 };
c07a80fd 236 } else {
c0ee6f5c 237 print "Don't know which tests failed: got $ok ok, expected $max\n";
760ac839 238 $failedtests{$test} = { canon => '??', max => $max,
239 failed => '??',
240 name => $test, percent => undef,
241 estat => '', wstat => '',
242 };
c07a80fd 243 }
244 $bad++;
6c31b336 245 } elsif ($next == 0) {
246 print "FAILED before any test output arrived\n";
247 $bad++;
760ac839 248 $failedtests{$test} = { canon => '??', max => '??',
249 failed => '??',
250 name => $test, percent => undef,
251 estat => '', wstat => '',
252 };
6c31b336 253 }
9b0ceca9 254 $subtests_skipped += $skipped;
17a79f5b 255 if (defined $files_in_dir) {
256 my @new_dir_files = globdir $files_in_dir;
257 if (@new_dir_files != @dir_files) {
258 my %f;
259 @f{@new_dir_files} = (1) x @new_dir_files;
260 delete @f{@dir_files};
261 my @f = sort keys %f;
262 print "LEAKED FILES: @f\n";
263 @dir_files = @new_dir_files;
264 }
265 }
a0d0e21e 266 }
267 my $t_total = timediff(new Benchmark, $t_start);
c07a80fd 268
774d564b 269 if ($^O eq 'VMS') {
270 if (defined $old5lib) {
271 $ENV{PERL5LIB} = $old5lib;
b876d4a6 272 } else {
774d564b 273 delete $ENV{PERL5LIB};
274 }
275 }
7b13a3f5 276 my $bonusmsg = '';
277 $bonusmsg = (" ($totbonus subtest".($totbonus>1?'s':'').
278 " UNEXPECTEDLY SUCCEEDED)")
279 if $totbonus;
9b0ceca9 280 if ($tests_skipped) {
c4f6c246 281 $bonusmsg .= ", $tests_skipped test" . ($tests_skipped != 1 ? 's' : '');
282 if ($subtests_skipped) {
283 $bonusmsg .= " and $subtests_skipped subtest"
284 . ($subtests_skipped != 1 ? 's' : '');
285 }
286 $bonusmsg .= ' skipped';
9b0ceca9 287 }
c4f6c246 288 elsif ($subtests_skipped) {
289 $bonusmsg .= ", $subtests_skipped subtest"
290 . ($subtests_skipped != 1 ? 's' : '')
291 . " skipped";
9b0ceca9 292 }
6c31b336 293 if ($bad == 0 && $totmax) {
7b13a3f5 294 print "All tests successful$bonusmsg.\n";
6c31b336 295 } elsif ($total==0){
296 die "FAILED--no tests were run for some reason.\n";
297 } elsif ($totmax==0) {
298 my $blurb = $total==1 ? "script" : "scripts";
c0ee6f5c 299 die "FAILED--$total test $blurb could be run, alas--no output ever seen\n";
c07a80fd 300 } else {
301 $pct = sprintf("%.2f", $good / $total * 100);
6c31b336 302 my $subpct = sprintf " %d/%d subtests failed, %.2f%% okay.",
303 $totmax - $totok, $totmax, 100*$totok/$totmax;
760ac839 304 my $script;
305 for $script (sort keys %failedtests) {
306 $curtest = $failedtests{$script};
307 write;
308 }
b876d4a6 309 if ($bad) {
9b0ceca9 310 $bonusmsg =~ s/^,\s*//;
311 print "$bonusmsg.\n" if $bonusmsg;
6c31b336 312 die "Failed $bad/$total test scripts, $pct% okay.$subpct\n";
c07a80fd 313 }
314 }
5603f27d 315 printf("Files=%d, Tests=%d, %s\n", $files, $totmax, timestr($t_total, 'nop'));
f0a9308e 316
317 return ($bad == 0 && $totmax) ;
c07a80fd 318}
319
aa689395 320my $tried_devel_corestack;
c0ee6f5c 321sub corestatus {
322 my($st) = @_;
323 my($ret);
324
325 eval {require 'wait.ph'};
326 if ($@) {
327 SWITCH: {
328 $ret = ($st & 0200); # Tim says, this is for 90%
329 }
330 } else {
331 $ret = WCOREDUMP($st);
332 }
333
aa689395 334 eval { require Devel::CoreStack; $have_devel_corestack++ }
335 unless $tried_devel_corestack++;
c0ee6f5c 336
337 $ret;
338}
339
c07a80fd 340sub canonfailed ($@) {
89d3b7e2 341 my($max,$skipped,@failed) = @_;
6c31b336 342 my %seen;
343 @failed = sort {$a <=> $b} grep !$seen{$_}++, @failed;
c07a80fd 344 my $failed = @failed;
345 my @result = ();
346 my @canon = ();
347 my $min;
348 my $last = $min = shift @failed;
760ac839 349 my $canon;
c07a80fd 350 if (@failed) {
351 for (@failed, $failed[-1]) { # don't forget the last one
352 if ($_ > $last+1 || $_ == $last) {
353 if ($min == $last) {
354 push @canon, $last;
355 } else {
356 push @canon, "$min-$last";
357 }
358 $min = $_;
359 }
360 $last = $_;
361 }
362 local $" = ", ";
363 push @result, "FAILED tests @canon\n";
760ac839 364 $canon = "@canon";
a0d0e21e 365 } else {
c07a80fd 366 push @result, "FAILED test $last\n";
760ac839 367 $canon = $last;
a0d0e21e 368 }
c07a80fd 369
370 push @result, "\tFailed $failed/$max tests, ";
89d3b7e2 371 push @result, sprintf("%.2f",100*(1-$failed/$max)), "% okay";
372 my $ender = 's' x ($skipped > 1);
373 my $good = $max - $failed - $skipped;
374 my $goodper = sprintf("%.2f",100*($good/$max));
375 push @result, " (-$skipped skipped test$ender: $good okay, $goodper%)" if $skipped;
376 push @result, "\n";
760ac839 377 my $txt = join "", @result;
378 ($txt, $canon);
a0d0e21e 379}
380
3811;
cb1a09d0 382__END__
383
384=head1 NAME
385
386Test::Harness - run perl standard test scripts with statistics
387
388=head1 SYNOPSIS
389
390use Test::Harness;
391
392runtests(@tests);
393
394=head1 DESCRIPTION
395
7b13a3f5 396(By using the L<Test> module, you can write test scripts without
397knowing the exact output this module expects. However, if you need to
398know the specifics, read on!)
399
cb1a09d0 400Perl test scripts print to standard output C<"ok N"> for each single
401test, where C<N> is an increasing sequence of integers. The first line
c0ee6f5c 402output by a standard test script is C<"1..M"> with C<M> being the
cb1a09d0 403number of tests that should be run within the test
c0ee6f5c 404script. Test::Harness::runtests(@tests) runs all the testscripts
cb1a09d0 405named as arguments and checks standard output for the expected
406C<"ok N"> strings.
407
c0ee6f5c 408After all tests have been performed, runtests() prints some
cb1a09d0 409performance statistics that are computed by the Benchmark module.
410
6c31b336 411=head2 The test script output
412
413Any output from the testscript to standard error is ignored and
414bypassed, thus will be seen by the user. Lines written to standard
c0ee6f5c 415output containing C</^(not\s+)?ok\b/> are interpreted as feedback for
416runtests(). All other lines are discarded.
6c31b336 417
418It is tolerated if the test numbers after C<ok> are omitted. In this
419case Test::Harness maintains temporarily its own counter until the
420script supplies test numbers again. So the following test script
421
422 print <<END;
423 1..6
424 not ok
425 ok
426 not ok
427 ok
428 ok
429 END
430
431will generate
432
433 FAILED tests 1, 3, 6
434 Failed 3/6 tests, 50.00% okay
435
436The global variable $Test::Harness::verbose is exportable and can be
c0ee6f5c 437used to let runtests() display the standard output of the script
6c31b336 438without altering the behavior otherwise.
439
fb73857a 440The global variable $Test::Harness::switches is exportable and can be
441used to set perl command line options used for running the test
442script(s). The default value is C<-w>.
443
fac76ed7 444If the standard output line contains substring C< # Skip> (with
445variations in spacing and case) after C<ok> or C<ok NUMBER>, it is
446counted as a skipped test. If the whole testscript succeeds, the
447count of skipped tests is included in the generated output.
448
45c0de28 449C<Test::Harness> reports the text after C< # Skip(whatever)> as a
450reason for skipping. Similarly, one can include a similar explanation
451in a C<1..0> line emitted if the test is skipped completely:
452
453 1..0 # Skipped: no leverage found
454
cb1a09d0 455=head1 EXPORT
456
c0ee6f5c 457C<&runtests> is exported by Test::Harness per default.
cb1a09d0 458
459=head1 DIAGNOSTICS
460
461=over 4
462
463=item C<All tests successful.\nFiles=%d, Tests=%d, %s>
464
465If all tests are successful some statistics about the performance are
466printed.
467
6c31b336 468=item C<FAILED tests %s\n\tFailed %d/%d tests, %.2f%% okay.>
469
470For any single script that has failing subtests statistics like the
471above are printed.
472
473=item C<Test returned status %d (wstat %d)>
474
81ff29e3 475Scripts that return a non-zero exit status, both C<$? E<gt>E<gt> 8> and C<$?> are
6c31b336 476printed in a message similar to the above.
477
478=item C<Failed 1 test, %.2f%% okay. %s>
cb1a09d0 479
6c31b336 480=item C<Failed %d/%d tests, %.2f%% okay. %s>
cb1a09d0 481
482If not all tests were successful, the script dies with one of the
483above messages.
484
485=back
486
9b0ceca9 487=head1 ENVIRONMENT
488
17a79f5b 489Setting C<HARNESS_IGNORE_EXITCODE> makes harness ignore the exit status
9b0ceca9 490of child processes.
491
0d0c0d42 492Setting C<HARNESS_NOTTY> to a true value forces it to behave as though
493STDOUT were not a console. You may need to set this if you don't want
494harness to output more frequent progress messages using carriage returns.
495Some consoles may not handle carriage returns properly (which results
496in a somewhat messy output).
497
c529f79d 498Setting C<HARNESS_COMPILE_TEST> to a true value will make harness attempt
499to compile the test using C<perlcc> before running it.
500
17a79f5b 501If C<HARNESS_FILELEAK_IN_DIR> is set to the name of a directory, harness
502will check after each test whether new files appeared in that directory,
503and report them as
504
505 LEAKED FILES: scr.tmp 0 my.db
506
507If relative, directory name is with respect to the current directory at
508the moment runtests() was called. Putting absolute path into
509C<HARNESS_FILELEAK_IN_DIR> may give more predicatable results.
510
f19ae7a7 511Harness sets C<HARNESS_ACTIVE> before executing the individual tests.
512This allows the tests to determine if they are being executed through the
513harness or by any other means.
514
cb1a09d0 515=head1 SEE ALSO
516
7b13a3f5 517L<Test> for writing test scripts and also L<Benchmark> for the
518underlying timing routines.
c07a80fd 519
520=head1 AUTHORS
521
522Either Tim Bunce or Andreas Koenig, we don't know. What we know for
523sure is, that it was inspired by Larry Wall's TEST script that came
b876d4a6 524with perl distributions for ages. Numerous anonymous contributors
525exist. Current maintainer is Andreas Koenig.
cb1a09d0 526
527=head1 BUGS
528
529Test::Harness uses $^X to determine the perl binary to run the tests
6c31b336 530with. Test scripts running via the shebang (C<#!>) line may not be
531portable because $^X is not consistent for shebang scripts across
cb1a09d0 532platforms. This is no problem when Test::Harness is run with an
6c31b336 533absolute path to the perl binary or when $^X can be found in the path.
cb1a09d0 534
535=cut