1 use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_ase';
5 no warnings 'uninitialized';
9 use DBIx::Class::_Util 'sigwarn_silencer';
16 'DBI::Sybase::ASE::NoBindVars',
21 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
24 DBICTest::Schema->connect($dsn, $user, $pass, {
26 [ blob_setup => log_on_update => 1 ], # this is a safer option
33 require DBIx::Class::Storage::DBI::Sybase::ASE;
34 my $ping = DBIx::Class::Storage::DBI::Sybase::ASE->can('_ping');
35 *DBIx::Class::Storage::DBI::Sybase::ASE::_ping = sub {
41 for my $storage_type (@storage_types) {
43 unless ($storage_type eq 'DBI::Sybase::ASE') { # autodetect
44 DBICTest::Schema->storage_type("::$storage_type");
47 $schema = get_schema();
49 $schema->storage->ensure_connected;
51 # we are going to explicitly test this anyway, just loop through
53 $storage_type ne 'DBI::Sybase::ASE::NoBindVars'
55 $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')
58 isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" );
60 $schema->storage->_dbh->disconnect;
61 lives_ok (sub { $schema->storage->dbh }, 'reconnect works');
63 $schema->storage->dbh_do (sub {
64 my ($storage, $dbh) = @_;
65 eval { $dbh->do("DROP TABLE artist") };
68 artistid INT IDENTITY PRIMARY KEY,
70 rank INT DEFAULT 13 NOT NULL,
71 charfield CHAR(10) NULL
78 # so we start unconnected
79 $schema->storage->disconnect;
81 # test primary key handling
82 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
83 like $new->artistid, qr/^\d+\z/, 'Auto-PK returned a number';
84 ok($new->artistid > 0, "Auto-PK worked");
86 $seen_id{$new->artistid}++;
88 # check redispatch to storage-specific insert when auto-detected storage
89 if ($storage_type eq 'DBI::Sybase::ASE') {
90 DBICTest::Schema->storage_type('::DBI');
91 $schema = get_schema();
94 $new = $schema->resultset('Artist')->create({ name => 'Artist 1' });
95 is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' );
96 $seen_id{$new->artistid}++;
98 # inserts happen in a txn, so we make sure it still works inside a txn too
102 $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
103 is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
104 $seen_id{$new->artistid}++;
110 is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok');
113 my $it = $schema->resultset('Artist')->search({
114 artistid => { '>' => 0 }
117 order_by => 'artistid',
120 is( $it->count, 3, "LIMIT count ok" );
122 is( $it->next->name, "foo", "iterator->next ok" );
124 is( $it->next->name, "Artist 2", "iterator->next ok" );
125 is( $it->next, undef, "next past end of resultset ok" );
127 # now try with offset
128 $it = $schema->resultset('Artist')->search({}, {
131 order_by => 'artistid',
134 is( $it->count, 3, "LIMIT with offset count ok" );
136 is( $it->next->name, "Artist 3", "iterator->next ok" );
138 is( $it->next->name, "Artist 5", "iterator->next ok" );
139 is( $it->next, undef, "next past end of resultset ok" );
141 # now try a grouped count
142 $schema->resultset('Artist')->create({ name => 'Artist 6' })
145 $it = $schema->resultset('Artist')->search({}, {
149 is( $it->count, 7, 'COUNT of GROUP_BY ok' );
151 # do an IDENTITY_INSERT
153 no warnings 'redefine';
156 local $schema->storage->{debug} = 1;
157 local $schema->storage->debugobj->{callback} = sub {
158 push @debug_out, $_[1];
162 my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit;
163 local *DBIx::Class::Storage::DBI::txn_commit = sub {
168 $schema->resultset('Artist')
169 ->create({ artistid => 999, name => 'mtfnpy' });
171 ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
174 skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
175 if $storage_type =~ /NoBindVars/i;
177 is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT';
181 # do an IDENTITY_UPDATE
184 local $schema->storage->{debug} = 1;
185 local $schema->storage->debugobj->{callback} = sub {
186 push @debug_out, $_[1];
190 $schema->resultset('Artist')
191 ->find(999)->update({ artistid => 555 });
192 ok((grep /IDENTITY_UPDATE/i, @debug_out));
193 } 'IDENTITY_UPDATE used';
197 my $bulk_rs = $schema->resultset('Artist')->search({
198 name => { -like => 'bulk artist %' }
201 # test _insert_bulk using populate.
203 skip '_insert_bulk not supported', 4
204 unless $storage_type !~ /NoBindVars/i;
208 local $SIG{__WARN__} = sigwarn_silencer(qr/Sybase bulk API operation failed due to character set incompatibility/)
209 unless $ENV{DBICTEST_SYBASE_SUBTEST_RERUN};
211 $schema->resultset('Artist')->populate([
213 name => 'bulk artist 1',
217 name => 'bulk artist 2',
221 name => 'bulk artist 3',
225 } '_insert_bulk via populate';
227 is $bulk_rs->count, 3, 'correct number inserted via _insert_bulk';
229 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
230 'column set correctly via _insert_bulk');
233 @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
235 is ((scalar keys %bulk_ids), 3,
236 'identities generated correctly in _insert_bulk');
241 # make sure _insert_bulk works a second time on the same connection
243 skip '_insert_bulk not supported', 3
244 unless $storage_type !~ /NoBindVars/i;
247 $schema->resultset('Artist')->populate([
249 name => 'bulk artist 1',
253 name => 'bulk artist 2',
257 name => 'bulk artist 3',
261 } '_insert_bulk via populate called a second time';
263 is $bulk_rs->count, 3,
264 'correct number inserted via _insert_bulk';
266 is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
267 'column set correctly via _insert_bulk');
272 # test invalid _insert_bulk (missing required column)
275 local $SIG{__WARN__} = sigwarn_silencer(qr/Sybase bulk API operation failed due to character set incompatibility/)
276 unless $ENV{DBICTEST_SYBASE_SUBTEST_RERUN};
278 $schema->resultset('Artist')->populate([
284 # The second pattern is the error from fallback to regular array insert on
285 # incompatible charset.
286 # The third is for ::NoBindVars with no syb_has_blk.
288 \Qno value or default\E
290 \Qdoes not allow null\E
292 \QUnable to invoke fast-path insert without storage placeholder support\E
294 '_insert_bulk with missing required column throws error';
296 # now test _insert_bulk with IDENTITY_INSERT
298 skip '_insert_bulk not supported', 3
299 unless $storage_type !~ /NoBindVars/i;
302 $schema->resultset('Artist')->populate([
305 name => 'bulk artist 1',
310 name => 'bulk artist 2',
315 name => 'bulk artist 3',
319 } '_insert_bulk with IDENTITY_INSERT via populate';
321 is $bulk_rs->count, 3,
322 'correct number inserted via _insert_bulk with IDENTITY_INSERT';
324 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
325 'column set correctly via _insert_bulk with IDENTITY_INSERT');
330 # test correlated subquery
331 my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } })
332 ->get_column('artistid')
334 my $subq_rs = $schema->resultset('Artist')->search({
335 artistid => { -in => $subq }
337 is $subq_rs->count, 11, 'correlated subquery';
339 # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
341 skip 'TEXT/IMAGE support does not work with FreeTDS', 22
342 if $schema->storage->_using_freetds;
344 my $dbh = $schema->storage->_dbh;
346 local $SIG{__WARN__} = sub {};
347 eval { $dbh->do('DROP TABLE bindtype_test') };
350 CREATE TABLE bindtype_test
352 id INT IDENTITY PRIMARY KEY,
358 ],{ RaiseError => 1, PrintError => 0 });
361 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
362 $binstr{'large'} = $binstr{'small'} x 1024;
364 my $maxloblen = length $binstr{'large'};
366 if (not $schema->storage->_using_freetds) {
367 $dbh->{'LongReadLen'} = $maxloblen * 2;
369 $dbh->do("set textsize ".($maxloblen * 2));
372 my $rs = $schema->resultset('BindType');
375 foreach my $type (qw(blob clob)) {
376 foreach my $size (qw(small large)) {
377 no warnings 'uninitialized';
381 $created = $rs->create( { $type => $binstr{$size} } )
382 } "inserted $size $type without dying";
384 $last_id = $created->id if $created;
387 ok($rs->find($last_id)->$type eq $binstr{$size})
388 } "verified inserted $size $type";
394 # blob insert with explicit PK
395 # also a good opportunity to test IDENTITY_INSERT
397 $rs->create( { id => 1, blob => $binstr{large} } )
398 } 'inserted large blob without dying with manual PK';
401 ok($rs->find(1)->blob eq $binstr{large})
402 } 'verified inserted large blob with manual PK';
405 my $new_str = $binstr{large} . 'mtfnpy';
407 # check redispatch to storage-specific update when auto-detected storage
408 if ($storage_type eq 'DBI::Sybase::ASE') {
409 DBICTest::Schema->storage_type('::DBI');
410 $schema = get_schema();
414 $rs->search({ id => 1 })->update({ blob => $new_str })
415 } 'updated blob successfully';
418 ok($rs->find(1)->blob eq $new_str)
419 } 'verified updated blob';
421 # try a blob update with IDENTITY_UPDATE
423 $new_str = $binstr{large} . 'hlagh';
424 $rs->find(1)->update({ id => 999, blob => $new_str });
425 ok($rs->find(999)->blob eq $new_str);
426 } 'verified updated blob with IDENTITY_UPDATE';
428 ## try multi-row blob update
429 # first insert some blobs
430 $new_str = $binstr{large} . 'foo';
433 $rs->create({ blob => $binstr{large} }) for (1..2);
434 $rs->update({ blob => $new_str });
435 is((grep $_->blob eq $new_str, $rs->all), 2);
436 } 'multi-row blob update';
440 # now try _insert_bulk with blobs and only blobs
441 $new_str = $binstr{large} . 'bar';
445 blob => $binstr{large},
449 blob => $binstr{large},
453 } '_insert_bulk with blobs does not die';
455 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
456 'IMAGE column set correctly via _insert_bulk');
458 is((grep $_->clob eq $new_str, $rs->all), 2,
459 'TEXT column set correctly via _insert_bulk');
461 # now try _insert_bulk with blobs and a non-blob which also happens to be an
464 skip 'no _insert_bulk without placeholders', 4
465 if $storage_type =~ /NoBindVars/i;
468 $new_str = $binstr{large} . 'bar';
474 blob => $binstr{large},
481 blob => $binstr{large},
486 } '_insert_bulk with blobs and explicit identity does NOT die';
488 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
489 'IMAGE column set correctly via _insert_bulk with identity');
491 is((grep $_->clob eq $new_str, $rs->all), 2,
492 'TEXT column set correctly via _insert_bulk with identity');
494 is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
495 'explicit identities set correctly via _insert_bulk with blobs';
500 $rs->create({ blob => $binstr{large} }) for (1..2);
501 $rs->update({ blob => undef });
502 is((grep !defined($_->blob), $rs->all), 2);
503 } 'blob update to NULL';
506 $schema->txn_do(sub {
507 my $created = $rs->create( { clob => "some text" } );
509 } 'insert blob field in transaction';
510 $ping_count-- if $@; # failure retry triggers a ping
513 # test MONEY column support (and some other misc. stuff)
514 $schema->storage->dbh_do (sub {
515 my ($storage, $dbh) = @_;
516 eval { $dbh->do("DROP TABLE money_test") };
518 CREATE TABLE money_test (
519 id INT IDENTITY PRIMARY KEY,
520 amount MONEY DEFAULT $999.99 NULL
525 my $rs = $schema->resultset('Money');
527 # test insert with defaults
530 is((grep $_->amount == 999.99, $rs->all), 1);
531 } 'insert with all defaults works';
534 # test insert transaction when there's an active cursor
536 my $artist_rs = $schema->resultset('Artist');
539 my $row = $schema->resultset('Money')->create({ amount => 100 });
541 } 'inserted a row with an active cursor';
542 $ping_count-- if $@; # dbh_do calls ->connected
545 # test insert in an outer transaction when there's an active cursor
547 local $TODO = 'this should work once we have eager cursors';
549 # clear state, or we get a deadlock on $row->delete
550 # XXX figure out why this happens
551 $schema->storage->disconnect;
554 $schema->txn_do(sub {
555 my $artist_rs = $schema->resultset('Artist');
557 my $row = $schema->resultset('Money')->create({ amount => 100 });
560 } 'inserted a row with an active cursor in outer txn';
561 $ping_count-- if $@; # dbh_do calls ->connected
564 # Now test money values.
567 $row = $rs->create({ amount => 100 });
568 } 'inserted a money value';
570 cmp_ok eval { $rs->find($row->id)->amount }, '==', 100,
571 'money value round-trip';
574 $row->update({ amount => 200 });
575 } 'updated a money value';
577 cmp_ok eval { $rs->find($row->id)->amount }, '==', 200,
578 'updated money value round-trip';
581 $row->update({ amount => undef });
582 } 'updated a money value to NULL';
585 my $null_amount = $rs->find($row->id)->amount;
586 is $null_amount, undef;
587 } 'updated money value to NULL round-trip';
589 # Test computed columns and timestamps
590 $schema->storage->dbh_do (sub {
591 my ($storage, $dbh) = @_;
592 eval { $dbh->do("DROP TABLE computed_column_test") };
594 CREATE TABLE computed_column_test (
595 id INT IDENTITY PRIMARY KEY,
596 a_computed_column AS getdate(),
597 a_timestamp timestamp,
598 charfield VARCHAR(20) DEFAULT 'foo'
603 require DBICTest::Schema::ComputedColumn;
604 $schema->register_class(
605 ComputedColumn => 'DBICTest::Schema::ComputedColumn'
608 ok (($rs = $schema->resultset('ComputedColumn')),
609 'got rs for ComputedColumn');
611 lives_ok { $row = $rs->create({}) }
612 'empty insert for a table with computed columns survived';
615 $row->update({ charfield => 'bar' })
616 } 'update of a table with computed columns survived';
619 is $ping_count, 0, 'no pings';
621 # if tests passed and did so under a non-C LC_ALL - let's rerun the test
622 if (Test::Builder->new->is_passing and $ENV{LC_ALL} and $ENV{LC_ALL} ne 'C') {
624 pass ("Your LC_ALL is set to $ENV{LC_ALL} - retesting with C");
626 local $ENV{LC_ALL} = 'C';
627 local $ENV{DBICTEST_SYBASE_SUBTEST_RERUN} = 1;
630 my @cmd = map { $_ =~ /(.+)/ } ($^X, __FILE__);
632 # this is cheating, and may even hang here and there (testing on windows passed fine)
633 # will be replaced with Test::SubExec::Noninteractive in due course
635 IPC::Open2::open2(my $out, undef, @cmd);
636 while (my $ln = <$out>) {
641 ok (! $?, "Wstat $? from: @cmd");
648 if (my $dbh = eval { $schema->storage->_dbh }) {
649 eval { $dbh->do("DROP TABLE $_") }
650 for qw/artist bindtype_test money_test computed_column_test/;