1 use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_ase';
5 no warnings 'uninitialized';
14 'DBI::Sybase::ASE::NoBindVars',
16 eval "require DBIx::Class::Storage::$_;" or die $@
22 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
25 DBICTest::Schema->connect($dsn, $user, $pass, {
27 [ blob_setup => log_on_update => 1 ], # this is a safer option
34 my $ping = DBIx::Class::Storage::DBI::Sybase::ASE->can('_ping');
35 *DBIx::Class::Storage::DBI::Sybase::ASE::_ping = sub {
41 for my $storage_type (@storage_types) {
44 unless ($storage_type eq 'DBI::Sybase::ASE') { # autodetect
45 DBICTest::Schema->storage_type("::$storage_type");
48 $schema = get_schema();
50 $schema->storage->ensure_connected;
52 if ($storage_idx == 0 &&
53 $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')) {
54 # no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
55 skip "Skipping entire test for $storage_type - no placeholder support", 1;
59 isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" );
61 $schema->storage->_dbh->disconnect;
62 lives_ok (sub { $schema->storage->dbh }, 'reconnect works');
64 $schema->storage->dbh_do (sub {
65 my ($storage, $dbh) = @_;
66 eval { $dbh->do("DROP TABLE artist") };
69 artistid INT IDENTITY PRIMARY KEY,
71 rank INT DEFAULT 13 NOT NULL,
72 charfield CHAR(10) NULL
79 # so we start unconnected
80 $schema->storage->disconnect;
82 # test primary key handling
83 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
84 like $new->artistid, qr/^\d+\z/, 'Auto-PK returned a number';
85 ok($new->artistid > 0, "Auto-PK worked");
87 $seen_id{$new->artistid}++;
89 # check redispatch to storage-specific insert when auto-detected storage
90 if ($storage_type eq 'DBI::Sybase::ASE') {
91 DBICTest::Schema->storage_type('::DBI');
92 $schema = get_schema();
95 $new = $schema->resultset('Artist')->create({ name => 'Artist 1' });
96 is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' );
97 $seen_id{$new->artistid}++;
99 # inserts happen in a txn, so we make sure it still works inside a txn too
103 $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
104 is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
105 $seen_id{$new->artistid}++;
111 is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok');
114 my $it = $schema->resultset('Artist')->search({
115 artistid => { '>' => 0 }
118 order_by => 'artistid',
121 is( $it->count, 3, "LIMIT count ok" );
123 is( $it->next->name, "foo", "iterator->next ok" );
125 is( $it->next->name, "Artist 2", "iterator->next ok" );
126 is( $it->next, undef, "next past end of resultset ok" );
128 # now try with offset
129 $it = $schema->resultset('Artist')->search({}, {
132 order_by => 'artistid',
135 is( $it->count, 3, "LIMIT with offset count ok" );
137 is( $it->next->name, "Artist 3", "iterator->next ok" );
139 is( $it->next->name, "Artist 5", "iterator->next ok" );
140 is( $it->next, undef, "next past end of resultset ok" );
142 # now try a grouped count
143 $schema->resultset('Artist')->create({ name => 'Artist 6' })
146 $it = $schema->resultset('Artist')->search({}, {
150 is( $it->count, 7, 'COUNT of GROUP_BY ok' );
152 # do an IDENTITY_INSERT
154 no warnings 'redefine';
157 local $schema->storage->{debug} = 1;
158 local $schema->storage->debugobj->{callback} = sub {
159 push @debug_out, $_[1];
163 my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit;
164 local *DBIx::Class::Storage::DBI::txn_commit = sub {
169 $schema->resultset('Artist')
170 ->create({ artistid => 999, name => 'mtfnpy' });
172 ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
175 skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
176 if $storage_type =~ /NoBindVars/i;
178 is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT';
182 # do an IDENTITY_UPDATE
185 local $schema->storage->{debug} = 1;
186 local $schema->storage->debugobj->{callback} = sub {
187 push @debug_out, $_[1];
191 $schema->resultset('Artist')
192 ->find(999)->update({ artistid => 555 });
193 ok((grep /IDENTITY_UPDATE/i, @debug_out));
194 } 'IDENTITY_UPDATE used';
198 my $bulk_rs = $schema->resultset('Artist')->search({
199 name => { -like => 'bulk artist %' }
202 # test _insert_bulk using populate.
204 skip '_insert_bulk not supported', 4
205 unless $storage_type !~ /NoBindVars/i;
208 $schema->resultset('Artist')->populate([
210 name => 'bulk artist 1',
214 name => 'bulk artist 2',
218 name => 'bulk artist 3',
222 } '_insert_bulk via populate';
224 is $bulk_rs->count, 3, 'correct number inserted via _insert_bulk';
226 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
227 'column set correctly via _insert_bulk');
230 @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
232 is ((scalar keys %bulk_ids), 3,
233 'identities generated correctly in _insert_bulk');
238 # make sure _insert_bulk works a second time on the same connection
240 skip '_insert_bulk not supported', 3
241 unless $storage_type !~ /NoBindVars/i;
244 $schema->resultset('Artist')->populate([
246 name => 'bulk artist 1',
250 name => 'bulk artist 2',
254 name => 'bulk artist 3',
258 } '_insert_bulk via populate called a second time';
260 is $bulk_rs->count, 3,
261 'correct number inserted via _insert_bulk';
263 is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
264 'column set correctly via _insert_bulk');
269 # test invalid _insert_bulk (missing required column)
271 # There should be a rollback, reconnect and the next valid _insert_bulk should
274 $schema->resultset('Artist')->populate([
279 } qr/no value or default|does not allow null|placeholders/i,
280 # The second pattern is the error from fallback to regular array insert on
281 # incompatible charset.
282 # The third is for ::NoBindVars with no syb_has_blk.
283 '_insert_bulk with missing required column throws error';
285 # now test _insert_bulk with IDENTITY_INSERT
287 skip '_insert_bulk not supported', 3
288 unless $storage_type !~ /NoBindVars/i;
291 $schema->resultset('Artist')->populate([
294 name => 'bulk artist 1',
299 name => 'bulk artist 2',
304 name => 'bulk artist 3',
308 } '_insert_bulk with IDENTITY_INSERT via populate';
310 is $bulk_rs->count, 3,
311 'correct number inserted via _insert_bulk with IDENTITY_INSERT';
313 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
314 'column set correctly via _insert_bulk with IDENTITY_INSERT');
319 # test correlated subquery
320 my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } })
321 ->get_column('artistid')
323 my $subq_rs = $schema->resultset('Artist')->search({
324 artistid => { -in => $subq }
326 is $subq_rs->count, 11, 'correlated subquery';
328 # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
330 skip 'TEXT/IMAGE support does not work with FreeTDS', 22
331 if $schema->storage->_using_freetds;
333 my $dbh = $schema->storage->_dbh;
335 local $SIG{__WARN__} = sub {};
336 eval { $dbh->do('DROP TABLE bindtype_test') };
339 CREATE TABLE bindtype_test
341 id INT IDENTITY PRIMARY KEY,
347 ],{ RaiseError => 1, PrintError => 0 });
350 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
351 $binstr{'large'} = $binstr{'small'} x 1024;
353 my $maxloblen = length $binstr{'large'};
355 if (not $schema->storage->_using_freetds) {
356 $dbh->{'LongReadLen'} = $maxloblen * 2;
358 $dbh->do("set textsize ".($maxloblen * 2));
361 my $rs = $schema->resultset('BindType');
364 foreach my $type (qw(blob clob)) {
365 foreach my $size (qw(small large)) {
366 no warnings 'uninitialized';
370 $created = $rs->create( { $type => $binstr{$size} } )
371 } "inserted $size $type without dying";
373 $last_id = $created->id if $created;
376 ok($rs->find($last_id)->$type eq $binstr{$size})
377 } "verified inserted $size $type";
383 # blob insert with explicit PK
384 # also a good opportunity to test IDENTITY_INSERT
386 $rs->create( { id => 1, blob => $binstr{large} } )
387 } 'inserted large blob without dying with manual PK';
390 ok($rs->find(1)->blob eq $binstr{large})
391 } 'verified inserted large blob with manual PK';
394 my $new_str = $binstr{large} . 'mtfnpy';
396 # check redispatch to storage-specific update when auto-detected storage
397 if ($storage_type eq 'DBI::Sybase::ASE') {
398 DBICTest::Schema->storage_type('::DBI');
399 $schema = get_schema();
403 $rs->search({ id => 1 })->update({ blob => $new_str })
404 } 'updated blob successfully';
407 ok($rs->find(1)->blob eq $new_str)
408 } 'verified updated blob';
410 # try a blob update with IDENTITY_UPDATE
412 $new_str = $binstr{large} . 'hlagh';
413 $rs->find(1)->update({ id => 999, blob => $new_str });
414 ok($rs->find(999)->blob eq $new_str);
415 } 'verified updated blob with IDENTITY_UPDATE';
417 ## try multi-row blob update
418 # first insert some blobs
419 $new_str = $binstr{large} . 'foo';
422 $rs->create({ blob => $binstr{large} }) for (1..2);
423 $rs->update({ blob => $new_str });
424 is((grep $_->blob eq $new_str, $rs->all), 2);
425 } 'multi-row blob update';
429 # now try _insert_bulk with blobs and only blobs
430 $new_str = $binstr{large} . 'bar';
434 blob => $binstr{large},
438 blob => $binstr{large},
442 } '_insert_bulk with blobs does not die';
444 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
445 'IMAGE column set correctly via _insert_bulk');
447 is((grep $_->clob eq $new_str, $rs->all), 2,
448 'TEXT column set correctly via _insert_bulk');
450 # now try _insert_bulk with blobs and a non-blob which also happens to be an
453 skip 'no _insert_bulk without placeholders', 4
454 if $storage_type =~ /NoBindVars/i;
457 $new_str = $binstr{large} . 'bar';
463 blob => $binstr{large},
470 blob => $binstr{large},
475 } '_insert_bulk with blobs and explicit identity does NOT die';
477 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
478 'IMAGE column set correctly via _insert_bulk with identity');
480 is((grep $_->clob eq $new_str, $rs->all), 2,
481 'TEXT column set correctly via _insert_bulk with identity');
483 is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
484 'explicit identities set correctly via _insert_bulk with blobs';
489 $rs->create({ blob => $binstr{large} }) for (1..2);
490 $rs->update({ blob => undef });
491 is((grep !defined($_->blob), $rs->all), 2);
492 } 'blob update to NULL';
495 # test MONEY column support (and some other misc. stuff)
496 $schema->storage->dbh_do (sub {
497 my ($storage, $dbh) = @_;
498 eval { $dbh->do("DROP TABLE money_test") };
500 CREATE TABLE money_test (
501 id INT IDENTITY PRIMARY KEY,
502 amount MONEY DEFAULT $999.99 NULL
507 my $rs = $schema->resultset('Money');
509 # test insert with defaults
512 is((grep $_->amount == 999.99, $rs->all), 1);
513 } 'insert with all defaults works';
516 # test insert transaction when there's an active cursor
518 my $artist_rs = $schema->resultset('Artist');
521 my $row = $schema->resultset('Money')->create({ amount => 100 });
523 } 'inserted a row with an active cursor';
524 $ping_count-- if $@; # dbh_do calls ->connected
527 # test insert in an outer transaction when there's an active cursor
529 local $TODO = 'this should work once we have eager cursors';
531 # clear state, or we get a deadlock on $row->delete
532 # XXX figure out why this happens
533 $schema->storage->disconnect;
536 $schema->txn_do(sub {
537 my $artist_rs = $schema->resultset('Artist');
539 my $row = $schema->resultset('Money')->create({ amount => 100 });
542 } 'inserted a row with an active cursor in outer txn';
543 $ping_count-- if $@; # dbh_do calls ->connected
546 # Now test money values.
549 $row = $rs->create({ amount => 100 });
550 } 'inserted a money value';
552 cmp_ok eval { $rs->find($row->id)->amount }, '==', 100,
553 'money value round-trip';
556 $row->update({ amount => 200 });
557 } 'updated a money value';
559 cmp_ok eval { $rs->find($row->id)->amount }, '==', 200,
560 'updated money value round-trip';
563 $row->update({ amount => undef });
564 } 'updated a money value to NULL';
567 my $null_amount = $rs->find($row->id)->amount;
568 is $null_amount, undef;
569 } 'updated money value to NULL round-trip';
571 # Test computed columns and timestamps
572 $schema->storage->dbh_do (sub {
573 my ($storage, $dbh) = @_;
574 eval { $dbh->do("DROP TABLE computed_column_test") };
576 CREATE TABLE computed_column_test (
577 id INT IDENTITY PRIMARY KEY,
578 a_computed_column AS getdate(),
579 a_timestamp timestamp,
580 charfield VARCHAR(20) DEFAULT 'foo'
585 require DBICTest::Schema::ComputedColumn;
586 $schema->register_class(
587 ComputedColumn => 'DBICTest::Schema::ComputedColumn'
590 ok (($rs = $schema->resultset('ComputedColumn')),
591 'got rs for ComputedColumn');
593 lives_ok { $row = $rs->create({}) }
594 'empty insert for a table with computed columns survived';
597 $row->update({ charfield => 'bar' })
598 } 'update of a table with computed columns survived';
601 is $ping_count, 0, 'no pings';
603 # if tests passed and did so under a non-C lang - let's rerun the test
604 if (Test::Builder->new->is_passing and $ENV{LANG} and $ENV{LANG} ne 'C') {
605 my $oldlang = $ENV{LANG};
606 local $ENV{LANG} = 'C';
608 pass ("Your lang is set to $oldlang - retesting with C");
611 my @cmd = map { $_ =~ /(.+)/ } ($^X, __FILE__);
613 # this is cheating, and may even hang here and there (testing on windows passed fine)
614 # will be replaced with Test::SubExec::Noninteractive in due course
616 IPC::Open2::open2(my $out, undef, @cmd);
617 while (my $ln = <$out>) {
622 ok (! $?, "Wstat $? from: @cmd");
629 if (my $dbh = eval { $schema->storage->_dbh }) {
630 eval { $dbh->do("DROP TABLE $_") }
631 for qw/artist bindtype_test money_test computed_column_test/;