Integrate mainline
[p5sagit/p5-mst-13.2.git] / lib / Tie / File / t / 24_cache_loop.t
CommitLineData
27531ffb 1#!/usr/bin/perl
2#
3# Tests for various caching errors
4#
5
6use Config;
80a5d8e7 7my $file = "tf$$.txt";
27531ffb 8unless ($Config{d_alarm}) {
9 print "1..0\n"; exit;
10}
11
27531ffb 12$: = Tie::File::_default_recsep();
13my $data = join $:, "record0" .. "record9", "";
14my $V = $ENV{INTEGRITY}; # Verbose integrity checking?
15
16print "1..3\n";
17
18my $N = 1;
19use Tie::File;
20print "ok $N\n"; $N++;
21
22open F, "> $file" or die $!;
23binmode F;
24print F $data;
25close F;
26
27# Limit cache size to 30 bytes
28my $MAX = 30;
29# -- that's enough space for 3 records, but not 4, on both \n and \r\n systems
30my $o = tie @a, 'Tie::File', $file, memory => $MAX, autodefer => 1;
31print $o ? "ok $N\n" : "not ok $N\n";
32$N++;
33
34# (3) In 0.50 this goes into an infinite loop. Explanation:
35#
36# Suppose you overfill the defer buffer by so much that the memory
37# limit is also exceeded. You'll go into _splice to prepare to
38# write out the defer buffer, and _splice will call _fetch, which
39# will then try to flush the read cache---but the read cache is
40# already empty, so you're stuck in an infinite loop.
41#
42# Five seconds should be plenty of time for it to complete if it works.
43alarm 5 unless $^P;
44@a = "record0" .. "record9";
45print "ok 3\n";
46
47END {
48 undef $o;
49 untie @a;
50 1 while unlink $file;
51}
52
53
54