1 BEGIN { do "./t/lib/ANFANG.pm" or die ( $@ || $! ) }
2 use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_ase';
6 no warnings 'uninitialized';
11 use DBIx::Class::_Util 'sigwarn_silencer';
17 'DBI::Sybase::ASE::NoBindVars',
22 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
25 DBICTest::Schema->connect($dsn, $user, $pass, {
27 [ blob_setup => log_on_update => 1 ], # this is a safer option
34 require DBIx::Class::Storage::DBI::Sybase::ASE;
35 my $ping = DBIx::Class::Storage::DBI::Sybase::ASE->can('_ping');
36 *DBIx::Class::Storage::DBI::Sybase::ASE::_ping = sub {
42 for my $storage_type (@storage_types) {
44 unless ($storage_type eq 'DBI::Sybase::ASE') { # autodetect
45 DBICTest::Schema->storage_type("::$storage_type");
48 $schema = get_schema();
50 $schema->storage->ensure_connected;
52 # we are going to explicitly test this anyway, just loop through
54 $storage_type ne 'DBI::Sybase::ASE::NoBindVars'
56 $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')
59 isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" );
61 $schema->storage->_dbh->disconnect;
62 lives_ok (sub { $schema->storage->dbh }, 'reconnect works');
64 $schema->storage->dbh_do (sub {
65 my ($storage, $dbh) = @_;
66 eval { $dbh->do("DROP TABLE artist") };
69 artistid INT IDENTITY PRIMARY KEY,
71 rank INT DEFAULT 13 NOT NULL,
72 charfield CHAR(10) NULL
79 # so we start unconnected
80 $schema->storage->disconnect;
82 # test primary key handling
83 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
84 like $new->artistid, qr/^\d+\z/, 'Auto-PK returned a number';
85 ok($new->artistid > 0, "Auto-PK worked");
87 $seen_id{$new->artistid}++;
89 # check redispatch to storage-specific insert when auto-detected storage
90 if ($storage_type eq 'DBI::Sybase::ASE') {
91 DBICTest::Schema->storage_type('::DBI');
92 $schema = get_schema();
95 $new = $schema->resultset('Artist')->create({ name => 'Artist 1' });
96 is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' );
97 $seen_id{$new->artistid}++;
99 # inserts happen in a txn, so we make sure it still works inside a txn too
103 $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
104 is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
105 $seen_id{$new->artistid}++;
111 is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok');
114 my $it = $schema->resultset('Artist')->search({
115 artistid => { '>' => 0 }
118 order_by => 'artistid',
121 is( $it->count, 3, "LIMIT count ok" );
123 is( $it->next->name, "foo", "iterator->next ok" );
125 is( $it->next->name, "Artist 2", "iterator->next ok" );
126 is( $it->next, undef, "next past end of resultset ok" );
128 # now try with offset
129 $it = $schema->resultset('Artist')->search({}, {
132 order_by => 'artistid',
135 is( $it->count, 3, "LIMIT with offset count ok" );
137 is( $it->next->name, "Artist 3", "iterator->next ok" );
139 is( $it->next->name, "Artist 5", "iterator->next ok" );
140 is( $it->next, undef, "next past end of resultset ok" );
142 # now try a grouped count
143 $schema->resultset('Artist')->create({ name => 'Artist 6' })
146 $it = $schema->resultset('Artist')->search({}, {
150 is( $it->count, 7, 'COUNT of GROUP_BY ok' );
152 # do an IDENTITY_INSERT
154 no warnings 'redefine';
157 local $schema->storage->{debug} = 1;
158 local $schema->storage->debugobj->{callback} = sub {
159 push @debug_out, $_[1];
163 my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit;
164 local *DBIx::Class::Storage::DBI::txn_commit = sub {
169 $schema->resultset('Artist')
170 ->create({ artistid => 999, name => 'mtfnpy' });
172 ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
175 skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
176 if $storage_type =~ /NoBindVars/i;
178 is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT';
182 # do an IDENTITY_UPDATE
185 local $schema->storage->{debug} = 1;
186 local $schema->storage->debugobj->{callback} = sub {
187 push @debug_out, $_[1];
191 $schema->resultset('Artist')
192 ->find(999)->update({ artistid => 555 });
193 ok((grep /IDENTITY_UPDATE/i, @debug_out));
194 } 'IDENTITY_UPDATE used';
198 my $bulk_rs = $schema->resultset('Artist')->search({
199 name => { -like => 'bulk artist %' }
202 # test _insert_bulk using populate.
204 skip '_insert_bulk not supported', 4
205 unless $storage_type !~ /NoBindVars/i;
209 local $SIG{__WARN__} = sigwarn_silencer(qr/Sybase bulk API operation failed due to character set incompatibility/)
210 unless $ENV{DBICTEST_SYBASE_SUBTEST_RERUN};
212 $schema->resultset('Artist')->populate([
214 name => 'bulk artist 1',
218 name => 'bulk artist 2',
222 name => 'bulk artist 3',
226 } '_insert_bulk via populate';
228 is $bulk_rs->count, 3, 'correct number inserted via _insert_bulk';
230 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
231 'column set correctly via _insert_bulk');
234 @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
236 is ((scalar keys %bulk_ids), 3,
237 'identities generated correctly in _insert_bulk');
242 # make sure _insert_bulk works a second time on the same connection
244 skip '_insert_bulk not supported', 3
245 unless $storage_type !~ /NoBindVars/i;
248 $schema->resultset('Artist')->populate([
250 name => 'bulk artist 1',
254 name => 'bulk artist 2',
258 name => 'bulk artist 3',
262 } '_insert_bulk via populate called a second time';
264 is $bulk_rs->count, 3,
265 'correct number inserted via _insert_bulk';
267 is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
268 'column set correctly via _insert_bulk');
273 # test invalid _insert_bulk (missing required column)
276 local $SIG{__WARN__} = sigwarn_silencer(qr/Sybase bulk API operation failed due to character set incompatibility/)
277 unless $ENV{DBICTEST_SYBASE_SUBTEST_RERUN};
279 $schema->resultset('Artist')->populate([
285 # The second pattern is the error from fallback to regular array insert on
286 # incompatible charset.
287 # The third is for ::NoBindVars with no syb_has_blk.
289 \Qno value or default\E
291 \Qdoes not allow null\E
293 \QUnable to invoke fast-path insert without storage placeholder support\E
295 '_insert_bulk with missing required column throws error';
297 # now test _insert_bulk with IDENTITY_INSERT
299 skip '_insert_bulk not supported', 3
300 unless $storage_type !~ /NoBindVars/i;
303 $schema->resultset('Artist')->populate([
306 name => 'bulk artist 1',
311 name => 'bulk artist 2',
316 name => 'bulk artist 3',
320 } '_insert_bulk with IDENTITY_INSERT via populate';
322 is $bulk_rs->count, 3,
323 'correct number inserted via _insert_bulk with IDENTITY_INSERT';
325 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
326 'column set correctly via _insert_bulk with IDENTITY_INSERT');
331 # test correlated subquery
332 my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } })
333 ->get_column('artistid')
335 my $subq_rs = $schema->resultset('Artist')->search({
336 artistid => { -in => $subq }
338 is $subq_rs->count, 11, 'correlated subquery';
340 # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
342 skip 'TEXT/IMAGE support does not work with FreeTDS', 22
343 if $schema->storage->_using_freetds;
345 my $dbh = $schema->storage->_dbh;
347 local $SIG{__WARN__} = sub {};
348 eval { $dbh->do('DROP TABLE bindtype_test') };
351 CREATE TABLE bindtype_test
353 id INT IDENTITY PRIMARY KEY,
359 ],{ RaiseError => 1, PrintError => 0 });
362 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
363 $binstr{'large'} = $binstr{'small'} x 1024;
365 my $maxloblen = length $binstr{'large'};
367 if (not $schema->storage->_using_freetds) {
368 $dbh->{'LongReadLen'} = $maxloblen * 2;
370 $dbh->do("set textsize ".($maxloblen * 2));
373 my $rs = $schema->resultset('BindType');
376 foreach my $type (qw(blob clob)) {
377 foreach my $size (qw(small large)) {
378 no warnings 'uninitialized';
382 $created = $rs->create( { $type => $binstr{$size} } )
383 } "inserted $size $type without dying";
385 $last_id = $created->id if $created;
388 ok($rs->find($last_id)->$type eq $binstr{$size})
389 } "verified inserted $size $type";
395 # blob insert with explicit PK
396 # also a good opportunity to test IDENTITY_INSERT
398 $rs->create( { id => 1, blob => $binstr{large} } )
399 } 'inserted large blob without dying with manual PK';
402 ok($rs->find(1)->blob eq $binstr{large})
403 } 'verified inserted large blob with manual PK';
406 my $new_str = $binstr{large} . 'mtfnpy';
408 # check redispatch to storage-specific update when auto-detected storage
409 if ($storage_type eq 'DBI::Sybase::ASE') {
410 DBICTest::Schema->storage_type('::DBI');
411 $schema = get_schema();
415 $rs->search({ id => 1 })->update({ blob => $new_str })
416 } 'updated blob successfully';
419 ok($rs->find(1)->blob eq $new_str)
420 } 'verified updated blob';
422 # try a blob update with IDENTITY_UPDATE
424 $new_str = $binstr{large} . 'hlagh';
425 $rs->find(1)->update({ id => 999, blob => $new_str });
426 ok($rs->find(999)->blob eq $new_str);
427 } 'verified updated blob with IDENTITY_UPDATE';
429 ## try multi-row blob update
430 # first insert some blobs
431 $new_str = $binstr{large} . 'foo';
434 $rs->create({ blob => $binstr{large} }) for (1..2);
435 $rs->update({ blob => $new_str });
436 is((grep $_->blob eq $new_str, $rs->all), 2);
437 } 'multi-row blob update';
441 # now try _insert_bulk with blobs and only blobs
442 $new_str = $binstr{large} . 'bar';
446 blob => $binstr{large},
450 blob => $binstr{large},
454 } '_insert_bulk with blobs does not die';
456 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
457 'IMAGE column set correctly via _insert_bulk');
459 is((grep $_->clob eq $new_str, $rs->all), 2,
460 'TEXT column set correctly via _insert_bulk');
462 # now try _insert_bulk with blobs and a non-blob which also happens to be an
465 skip 'no _insert_bulk without placeholders', 4
466 if $storage_type =~ /NoBindVars/i;
469 $new_str = $binstr{large} . 'bar';
475 blob => $binstr{large},
482 blob => $binstr{large},
487 } '_insert_bulk with blobs and explicit identity does NOT die';
489 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
490 'IMAGE column set correctly via _insert_bulk with identity');
492 is((grep $_->clob eq $new_str, $rs->all), 2,
493 'TEXT column set correctly via _insert_bulk with identity');
495 is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
496 'explicit identities set correctly via _insert_bulk with blobs';
501 $rs->create({ blob => $binstr{large} }) for (1..2);
502 $rs->update({ blob => undef });
503 is((grep !defined($_->blob), $rs->all), 2);
504 } 'blob update to NULL';
507 $schema->txn_do(sub {
508 my $created = $rs->create( { clob => "some text" } );
510 } 'insert blob field in transaction';
511 $ping_count-- if $@; # failure retry triggers a ping
514 # test MONEY column support (and some other misc. stuff)
515 $schema->storage->dbh_do (sub {
516 my ($storage, $dbh) = @_;
517 eval { $dbh->do("DROP TABLE money_test") };
519 CREATE TABLE money_test (
520 id INT IDENTITY PRIMARY KEY,
521 amount MONEY DEFAULT $999.99 NULL
526 my $rs = $schema->resultset('Money');
528 # test insert with defaults
531 is((grep $_->amount == 999.99, $rs->all), 1);
532 } 'insert with all defaults works';
535 # test insert transaction when there's an active cursor
537 my $artist_rs = $schema->resultset('Artist');
540 my $row = $schema->resultset('Money')->create({ amount => 100 });
542 } 'inserted a row with an active cursor';
543 $ping_count-- if $@; # dbh_do calls ->connected
546 # test insert in an outer transaction when there's an active cursor
548 local $TODO = 'this should work once we have eager cursors';
550 # clear state, or we get a deadlock on $row->delete
551 # XXX figure out why this happens
552 $schema->storage->disconnect;
555 $schema->txn_do(sub {
556 my $artist_rs = $schema->resultset('Artist');
558 my $row = $schema->resultset('Money')->create({ amount => 100 });
561 } 'inserted a row with an active cursor in outer txn';
562 $ping_count-- if $@; # dbh_do calls ->connected
565 # Now test money values.
568 $row = $rs->create({ amount => 100 });
569 } 'inserted a money value';
571 cmp_ok eval { $rs->find($row->id)->amount }, '==', 100,
572 'money value round-trip';
575 $row->update({ amount => 200 });
576 } 'updated a money value';
578 cmp_ok eval { $rs->find($row->id)->amount }, '==', 200,
579 'updated money value round-trip';
582 $row->update({ amount => undef });
583 } 'updated a money value to NULL';
586 my $null_amount = $rs->find($row->id)->amount;
587 is $null_amount, undef;
588 } 'updated money value to NULL round-trip';
590 # Test computed columns and timestamps
591 $schema->storage->dbh_do (sub {
592 my ($storage, $dbh) = @_;
593 eval { $dbh->do("DROP TABLE computed_column_test") };
595 CREATE TABLE computed_column_test (
596 id INT IDENTITY PRIMARY KEY,
597 a_computed_column AS getdate(),
598 a_timestamp timestamp,
599 charfield VARCHAR(20) DEFAULT 'foo'
604 require DBICTest::Schema::ComputedColumn;
605 $schema->register_class(
606 ComputedColumn => 'DBICTest::Schema::ComputedColumn'
609 ok (($rs = $schema->resultset('ComputedColumn')),
610 'got rs for ComputedColumn');
612 lives_ok { $row = $rs->create({}) }
613 'empty insert for a table with computed columns survived';
616 $row->update({ charfield => 'bar' })
617 } 'update of a table with computed columns survived';
620 is $ping_count, 0, 'no pings';
622 # if tests passed and did so under a non-C LC_ALL - let's rerun the test
623 if (Test::Builder->new->is_passing and $ENV{LC_ALL} and $ENV{LC_ALL} ne 'C') {
625 pass ("Your LC_ALL is set to $ENV{LC_ALL} - retesting with C");
627 local $ENV{LC_ALL} = 'C';
628 local $ENV{DBICTEST_SYBASE_SUBTEST_RERUN} = 1;
631 local $ENV{PERL5LIB} = join ($Config{path_sep}, @INC);
632 my @cmd = map { $_ =~ /(.+)/ } ($^X, __FILE__);
634 # this is cheating, and may even hang here and there (testing on windows passed fine)
635 # will be replaced with Test::SubExec::Noninteractive in due course
637 IPC::Open2::open2(my $out, undef, @cmd);
638 while (my $ln = <$out>) {
643 ok (! $?, "Wstat $? from: @cmd");
650 if (my $dbh = eval { $schema->storage->_dbh }) {
651 eval { $dbh->do("DROP TABLE $_") }
652 for qw/artist bindtype_test money_test computed_column_test/;