6 use DBIx::Class::Optional::Dependencies ();
13 DBICTEST_FIREBIRD => 'test_rdbms_firebird',
14 DBICTEST_FIREBIRD_INTERBASE => 'test_rdbms_firebird_interbase',
15 DBICTEST_FIREBIRD_ODBC => 'test_rdbms_firebird_odbc',
18 plan skip_all => join (' ',
19 'Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}',
20 'and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},',
21 '_USER and _PASS to run these tests.',
23 'WARNING: this test creates and drops the tables "artist", "bindtype_test" and',
24 '"sequence_test"; the generators "gen_artist_artistid", "pkid1_seq", "pkid2_seq"',
25 'and "nonpkid_seq" and the trigger "artist_bi".',
26 ) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
28 # tests stolen from 749sybase_asa.t
31 # dbi:Firebird:db=/var/lib/firebird/2.5/data/hlaghdb.fdb
32 # dbi:InterBase:db=/var/lib/firebird/2.5/data/hlaghdb.fdb
35 # dbi:ODBC:Driver=Firebird;Dbname=/var/lib/firebird/2.5/data/hlaghdb.fdb
39 for my $prefix (keys %$env2optdep) { SKIP: {
41 my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
45 note "Testing with ${prefix}_DSN";
47 skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
48 unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
50 $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
53 ($dsn !~ /ODBC/ ? (on_connect_call => 'use_softcommit') : ()),
55 my $dbh = $schema->storage->dbh;
57 my $sg = Scope::Guard->new(sub { cleanup($schema) });
59 eval { $dbh->do(q[DROP TABLE "artist"]) };
61 CREATE TABLE "artist" (
62 "artistid" INT PRIMARY KEY,
68 eval { $dbh->do(q[DROP GENERATOR "gen_artist_artistid"]) };
69 $dbh->do('CREATE GENERATOR "gen_artist_artistid"');
70 eval { $dbh->do('DROP TRIGGER "artist_bi"') };
72 CREATE TRIGGER "artist_bi" FOR "artist"
73 ACTIVE BEFORE INSERT POSITION 0
76 IF (NEW."artistid" IS NULL) THEN
77 NEW."artistid" = GEN_ID("gen_artist_artistid",1);
80 eval { $dbh->do('DROP TABLE "sequence_test"') };
82 CREATE TABLE "sequence_test" (
89 $dbh->do('ALTER TABLE "sequence_test" ADD CONSTRAINT "sequence_test_constraint" PRIMARY KEY ("pkid1", "pkid2")');
90 eval { $dbh->do('DROP GENERATOR "pkid1_seq"') };
91 eval { $dbh->do('DROP GENERATOR pkid2_seq') };
92 eval { $dbh->do('DROP GENERATOR "nonpkid_seq"') };
93 $dbh->do('CREATE GENERATOR "pkid1_seq"');
94 $dbh->do('CREATE GENERATOR pkid2_seq');
95 $dbh->do('SET GENERATOR pkid2_seq TO 9');
96 $dbh->do('CREATE GENERATOR "nonpkid_seq"');
97 $dbh->do('SET GENERATOR "nonpkid_seq" TO 19');
99 my $ars = $schema->resultset('Artist');
100 is ( $ars->count, 0, 'No rows at first' );
102 # test primary key handling
103 my $new = $ars->create({ name => 'foo' });
104 ok($new->artistid, "Auto-PK worked");
106 # test auto increment using generators WITHOUT triggers
108 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
109 is($st->pkid1, $_, "Firebird Auto-PK without trigger: First primary key");
110 is($st->pkid2, $_ + 9, "Firebird Auto-PK without trigger: Second primary key");
111 is($st->nonpkid, $_ + 19, "Firebird Auto-PK without trigger: Non-primary key");
113 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
114 is($st->pkid1, 55, "Firebird Auto-PK without trigger: First primary key set manually");
116 # test transaction commit
117 $schema->txn_do(sub {
118 $ars->create({ name => 'in_transaction' });
120 ok (($ars->search({ name => 'in_transaction' })->first),
121 'transaction committed');
122 is $schema->storage->_dbh->{AutoCommit}, 1,
123 '$dbh->{AutoCommit} is correct after transaction commit';
125 $ars->search({ name => 'in_transaction' })->delete;
129 $schema->txn_do(sub {
130 my ($schema, $ars) = @_;
132 $schema->txn_do(sub {
133 $ars->create({ name => 'in_savepoint' });
134 die "rolling back savepoint";
137 ok ((not $ars->search({ name => 'in_savepoint' })->first),
138 'savepoint rolled back');
139 $ars->create({ name => 'in_outer_txn' });
140 die "rolling back outer txn";
142 } qr/rolling back outer txn/,
143 'correct exception for rollback';
145 is $schema->storage->_dbh->{AutoCommit}, 1,
146 '$dbh->{AutoCommit} is correct after transaction rollback';
148 ok ((not $ars->search({ name => 'in_outer_txn' })->first),
149 'outer txn rolled back');
151 # test explicit key spec
152 $new = $ars->create ({ name => 'bar', artistid => 66 });
153 is($new->artistid, 66, 'Explicit PK worked');
154 $new->discard_changes;
155 is($new->artistid, 66, 'Explicit PK assigned');
159 $new->update({ name => 'baz' })
161 $new->discard_changes;
162 is $new->name, 'baz', 'row updated';
168 push @pop, { name => "Artist_$_" };
170 $ars->populate (\@pop);
173 # test populate with explicit key
177 push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
179 $ars->populate (\@pop);
182 # count what we did so far
183 is ($ars->count, 6, 'Simple count works');
185 # test ResultSet UPDATE
187 $ars->search({ name => 'foo' })->update({ rank => 4 });
189 is eval { $ars->search({ name => 'foo' })->first->rank }, 4;
190 } 'Can update a column';
192 my ($updated) = $schema->resultset('Artist')->search({name => 'foo'});
193 is eval { $updated->rank }, 4, 'and the update made it to the database';
196 my $lim = $ars->search( {},
200 order_by => 'artistid'
203 is( $lim->count, 2, 'ROWS+OFFSET count ok' );
204 is( $lim->all, 2, 'Number of ->all objects matches count' );
208 is( eval { $lim->next->artistid }, 101, "iterator->next ok" );
209 is( eval { $lim->next->artistid }, 102, "iterator->next ok" );
210 is( $lim->next, undef, "next past end of resultset ok" );
213 my $paged = $ars->search({ name => { -like => 'Artist%' } }, {
216 order_by => 'artistid',
222 } 'paged query survived';
224 is try { $row->artistid }, 5, 'correct row from paged query';
226 # DBD bug - if any unfinished statements are present during
227 # DDL manipulation (test blobs below)- a segfault will occur
230 # test nested cursors
232 my $rs1 = $ars->search({}, { order_by => { -asc => 'artistid' }});
234 my $rs2 = $ars->search({ artistid => $rs1->next->artistid }, {
235 order_by => { -desc => 'artistid' }
238 is $rs2->next->artistid, 1, 'nested cursors';
243 my $row = $ars->create({});
245 } 'empty insert works';
247 # test inferring the generator from the trigger source and using it with
250 local $ars->result_source->column_info('artistid')->{auto_nextval} = 1;
253 my $row = $ars->create({ name => 'introspecting generator' });
255 } 'inferring generator from trigger source works';
258 # at this point there should be no active statements
259 # (finish() was called everywhere, either explicitly via
260 # reset() or on DESTROY)
261 for (keys %{$schema->storage->dbh->{CachedKids}}) {
262 fail("Unreachable cached statement still active: $_")
263 if $schema->storage->dbh->{CachedKids}{$_}->FETCH('Active');
266 # test blobs (stolen from 73oracle.t)
267 eval { $dbh->do('DROP TABLE "bindtype_test"') };
269 CREATE TABLE "bindtype_test"
271 "id" INT PRIMARY KEY,
274 "clob" BLOB SUB_TYPE TEXT,
279 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
280 $binstr{'large'} = $binstr{'small'} x 1024;
282 my $maxloblen = length $binstr{'large'};
283 local $dbh->{'LongReadLen'} = $maxloblen;
285 my $rs = $schema->resultset('BindType');
288 foreach my $type (qw( blob clob )) {
289 foreach my $size (qw( small large )) {
292 # turn off horrendous binary DBIC_TRACE output
293 local $schema->storage->{debug} = 0;
295 lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
296 "inserted $size $type without dying";
298 my $got = $rs->find($id)->$type;
300 my $hexdump = sub { join '', map sprintf('%02X', ord), split //, shift };
302 ok($got eq $binstr{$size}, "verified inserted $size $type" )
304 diag "For " . (ref $schema->storage) . "\n";
306 diag $hexdump->(substr($got,0,50));
307 diag "Expecting blob:\n";
308 diag $hexdump->(substr($binstr{$size},0,50));
323 $schema->storage->disconnect; # to avoid object FOO is in use errors
324 $dbh = $schema->storage->dbh;
328 eval { $dbh->do('DROP TRIGGER "artist_bi"') };
331 foreach my $generator (qw/
332 "gen_artist_artistid"
337 eval { $dbh->do(qq{DROP GENERATOR $generator}) };
341 foreach my $table (qw/artist sequence_test/) {
342 eval { $dbh->do(qq[DROP TABLE "$table"]) };
346 eval { $dbh->do(q{DROP TABLE "bindtype_test"}) };