6 use DBIx::Class::Optional::Dependencies ();
13 DBICTEST_FIREBIRD => 'test_rdbms_firebird',
14 DBICTEST_FIREBIRD_INTERBASE => 'test_rdbms_firebird_interbase',
15 DBICTEST_FIREBIRD_ODBC => 'test_rdbms_firebird_odbc',
18 plan skip_all => join (' ',
19 'Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}',
20 'and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},',
21 '_USER and _PASS to run these tests.',
23 'WARNING: this test creates and drops the tables "artist", "bindtype_test" and',
24 '"sequence_test"; the generators "gen_artist_artistid", "pkid1_seq", "pkid2_seq"',
25 'and "nonpkid_seq" and the trigger "artist_bi".',
26 ) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
28 # tests stolen from 749sybase_asa.t
31 # dbi:Firebird:db=/var/lib/firebird/2.5/data/hlaghdb.fdb
32 # dbi:InterBase:db=/var/lib/firebird/2.5/data/hlaghdb.fdb
35 # dbi:ODBC:Driver=Firebird;Dbname=/var/lib/firebird/2.5/data/hlaghdb.fdb
39 for my $prefix (keys %$env2optdep) { SKIP: {
41 my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
45 skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
46 unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
48 $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
51 ($dsn !~ /ODBC/ ? (on_connect_call => 'use_softcommit') : ()),
53 my $dbh = $schema->storage->dbh;
55 my $sg = Scope::Guard->new(sub { cleanup($schema) });
57 eval { $dbh->do(q[DROP TABLE "artist"]) };
59 CREATE TABLE "artist" (
60 "artistid" INT PRIMARY KEY,
66 eval { $dbh->do(q[DROP GENERATOR "gen_artist_artistid"]) };
67 $dbh->do('CREATE GENERATOR "gen_artist_artistid"');
68 eval { $dbh->do('DROP TRIGGER "artist_bi"') };
70 CREATE TRIGGER "artist_bi" FOR "artist"
71 ACTIVE BEFORE INSERT POSITION 0
74 IF (NEW."artistid" IS NULL) THEN
75 NEW."artistid" = GEN_ID("gen_artist_artistid",1);
78 eval { $dbh->do('DROP TABLE "sequence_test"') };
80 CREATE TABLE "sequence_test" (
87 $dbh->do('ALTER TABLE "sequence_test" ADD CONSTRAINT "sequence_test_constraint" PRIMARY KEY ("pkid1", "pkid2")');
88 eval { $dbh->do('DROP GENERATOR "pkid1_seq"') };
89 eval { $dbh->do('DROP GENERATOR pkid2_seq') };
90 eval { $dbh->do('DROP GENERATOR "nonpkid_seq"') };
91 $dbh->do('CREATE GENERATOR "pkid1_seq"');
92 $dbh->do('CREATE GENERATOR pkid2_seq');
93 $dbh->do('SET GENERATOR pkid2_seq TO 9');
94 $dbh->do('CREATE GENERATOR "nonpkid_seq"');
95 $dbh->do('SET GENERATOR "nonpkid_seq" TO 19');
97 my $ars = $schema->resultset('Artist');
98 is ( $ars->count, 0, 'No rows at first' );
100 # test primary key handling
101 my $new = $ars->create({ name => 'foo' });
102 ok($new->artistid, "Auto-PK worked");
104 # test auto increment using generators WITHOUT triggers
106 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
107 is($st->pkid1, $_, "Firebird Auto-PK without trigger: First primary key");
108 is($st->pkid2, $_ + 9, "Firebird Auto-PK without trigger: Second primary key");
109 is($st->nonpkid, $_ + 19, "Firebird Auto-PK without trigger: Non-primary key");
111 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
112 is($st->pkid1, 55, "Firebird Auto-PK without trigger: First primary key set manually");
114 # test transaction commit
115 $schema->txn_do(sub {
116 $ars->create({ name => 'in_transaction' });
118 ok (($ars->search({ name => 'in_transaction' })->first),
119 'transaction committed');
120 is $schema->storage->_dbh->{AutoCommit}, 1,
121 '$dbh->{AutoCommit} is correct after transaction commit';
123 $ars->search({ name => 'in_transaction' })->delete;
127 $schema->txn_do(sub {
129 $schema->txn_do(sub {
130 $ars->create({ name => 'in_savepoint' });
131 die "rolling back savepoint";
134 ok ((not $ars->search({ name => 'in_savepoint' })->first),
135 'savepoint rolled back');
136 $ars->create({ name => 'in_outer_txn' });
137 die "rolling back outer txn";
139 } qr/rolling back outer txn/,
140 'correct exception for rollback';
142 is $schema->storage->_dbh->{AutoCommit}, 1,
143 '$dbh->{AutoCommit} is correct after transaction rollback';
145 ok ((not $ars->search({ name => 'in_outer_txn' })->first),
146 'outer txn rolled back');
148 # test explicit key spec
149 $new = $ars->create ({ name => 'bar', artistid => 66 });
150 is($new->artistid, 66, 'Explicit PK worked');
151 $new->discard_changes;
152 is($new->artistid, 66, 'Explicit PK assigned');
156 $new->update({ name => 'baz' })
158 $new->discard_changes;
159 is $new->name, 'baz', 'row updated';
165 push @pop, { name => "Artist_$_" };
167 $ars->populate (\@pop);
170 # test populate with explicit key
174 push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
176 $ars->populate (\@pop);
179 # count what we did so far
180 is ($ars->count, 6, 'Simple count works');
182 # test ResultSet UPDATE
184 $ars->search({ name => 'foo' })->update({ rank => 4 });
186 is eval { $ars->search({ name => 'foo' })->first->rank }, 4;
187 } 'Can update a column';
189 my ($updated) = $schema->resultset('Artist')->search({name => 'foo'});
190 is eval { $updated->rank }, 4, 'and the update made it to the database';
193 my $lim = $ars->search( {},
197 order_by => 'artistid'
200 is( $lim->count, 2, 'ROWS+OFFSET count ok' );
201 is( $lim->all, 2, 'Number of ->all objects matches count' );
205 is( eval { $lim->next->artistid }, 101, "iterator->next ok" );
206 is( eval { $lim->next->artistid }, 102, "iterator->next ok" );
207 is( $lim->next, undef, "next past end of resultset ok" );
210 my $paged = $ars->search({ name => { -like => 'Artist%' } }, {
213 order_by => 'artistid',
219 } 'paged query survived';
221 is try { $row->artistid }, 5, 'correct row from paged query';
223 # DBD bug - if any unfinished statements are present during
224 # DDL manipulation (test blobs below)- a segfault will occur
227 # test nested cursors
229 my $rs1 = $ars->search({}, { order_by => { -asc => 'artistid' }});
231 my $rs2 = $ars->search({ artistid => $rs1->next->artistid }, {
232 order_by => { -desc => 'artistid' }
235 is $rs2->next->artistid, 1, 'nested cursors';
240 my $row = $ars->create({});
242 } 'empty insert works';
244 # test inferring the generator from the trigger source and using it with
247 local $ars->result_source->column_info('artistid')->{auto_nextval} = 1;
250 my $row = $ars->create({ name => 'introspecting generator' });
252 } 'inferring generator from trigger source works';
255 # test blobs (stolen from 73oracle.t)
256 eval { $dbh->do('DROP TABLE "bindtype_test"') };
258 CREATE TABLE "bindtype_test"
260 "id" INT PRIMARY KEY,
263 "clob" BLOB SUB_TYPE TEXT,
268 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
269 $binstr{'large'} = $binstr{'small'} x 1024;
271 my $maxloblen = length $binstr{'large'};
272 local $dbh->{'LongReadLen'} = $maxloblen;
274 my $rs = $schema->resultset('BindType');
277 foreach my $type (qw( blob clob )) {
278 foreach my $size (qw( small large )) {
281 # turn off horrendous binary DBIC_TRACE output
282 local $schema->storage->{debug} = 0;
284 lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
285 "inserted $size $type without dying";
287 my $got = $rs->find($id)->$type;
289 my $hexdump = sub { join '', map sprintf('%02X', ord), split //, shift };
291 ok($got eq $binstr{$size}, "verified inserted $size $type" )
293 diag "For " . (ref $schema->storage) . "\n";
295 diag $hexdump->(substr($got,0,50));
296 diag "Expecting blob:\n";
297 diag $hexdump->(substr($binstr{$size},0,50));
312 $schema->storage->disconnect; # to avoid object FOO is in use errors
313 $dbh = $schema->storage->dbh;
317 eval { $dbh->do('DROP TRIGGER "artist_bi"') };
320 foreach my $generator (qw/
321 "gen_artist_artistid"
326 eval { $dbh->do(qq{DROP GENERATOR $generator}) };
330 foreach my $table (qw/artist sequence_test/) {
331 eval { $dbh->do(qq[DROP TABLE "$table"]) };
335 eval { $dbh->do(q{DROP TABLE "bindtype_test"}) };