1 use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_ase';
5 no warnings 'uninitialized';
14 'DBI::Sybase::ASE::NoBindVars',
19 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
22 DBICTest::Schema->connect($dsn, $user, $pass, {
24 [ blob_setup => log_on_update => 1 ], # this is a safer option
31 require DBIx::Class::Storage::DBI::Sybase::ASE;
32 my $ping = DBIx::Class::Storage::DBI::Sybase::ASE->can('_ping');
33 *DBIx::Class::Storage::DBI::Sybase::ASE::_ping = sub {
39 for my $storage_type (@storage_types) {
41 unless ($storage_type eq 'DBI::Sybase::ASE') { # autodetect
42 DBICTest::Schema->storage_type("::$storage_type");
45 $schema = get_schema();
47 $schema->storage->ensure_connected;
49 # we are going to explicitly test this anyway, just loop through
51 $storage_type ne 'DBI::Sybase::ASE::NoBindVars'
53 $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')
56 isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" );
58 $schema->storage->_dbh->disconnect;
59 lives_ok (sub { $schema->storage->dbh }, 'reconnect works');
61 $schema->storage->dbh_do (sub {
62 my ($storage, $dbh) = @_;
63 eval { $dbh->do("DROP TABLE artist") };
66 artistid INT IDENTITY PRIMARY KEY,
68 rank INT DEFAULT 13 NOT NULL,
69 charfield CHAR(10) NULL
76 # so we start unconnected
77 $schema->storage->disconnect;
79 # test primary key handling
80 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
81 like $new->artistid, qr/^\d+\z/, 'Auto-PK returned a number';
82 ok($new->artistid > 0, "Auto-PK worked");
84 $seen_id{$new->artistid}++;
86 # check redispatch to storage-specific insert when auto-detected storage
87 if ($storage_type eq 'DBI::Sybase::ASE') {
88 DBICTest::Schema->storage_type('::DBI');
89 $schema = get_schema();
92 $new = $schema->resultset('Artist')->create({ name => 'Artist 1' });
93 is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' );
94 $seen_id{$new->artistid}++;
96 # inserts happen in a txn, so we make sure it still works inside a txn too
100 $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
101 is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
102 $seen_id{$new->artistid}++;
108 is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok');
111 my $it = $schema->resultset('Artist')->search({
112 artistid => { '>' => 0 }
115 order_by => 'artistid',
118 is( $it->count, 3, "LIMIT count ok" );
120 is( $it->next->name, "foo", "iterator->next ok" );
122 is( $it->next->name, "Artist 2", "iterator->next ok" );
123 is( $it->next, undef, "next past end of resultset ok" );
125 # now try with offset
126 $it = $schema->resultset('Artist')->search({}, {
129 order_by => 'artistid',
132 is( $it->count, 3, "LIMIT with offset count ok" );
134 is( $it->next->name, "Artist 3", "iterator->next ok" );
136 is( $it->next->name, "Artist 5", "iterator->next ok" );
137 is( $it->next, undef, "next past end of resultset ok" );
139 # now try a grouped count
140 $schema->resultset('Artist')->create({ name => 'Artist 6' })
143 $it = $schema->resultset('Artist')->search({}, {
147 is( $it->count, 7, 'COUNT of GROUP_BY ok' );
149 # do an IDENTITY_INSERT
151 no warnings 'redefine';
154 local $schema->storage->{debug} = 1;
155 local $schema->storage->debugobj->{callback} = sub {
156 push @debug_out, $_[1];
160 my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit;
161 local *DBIx::Class::Storage::DBI::txn_commit = sub {
166 $schema->resultset('Artist')
167 ->create({ artistid => 999, name => 'mtfnpy' });
169 ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
172 skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
173 if $storage_type =~ /NoBindVars/i;
175 is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT';
179 # do an IDENTITY_UPDATE
182 local $schema->storage->{debug} = 1;
183 local $schema->storage->debugobj->{callback} = sub {
184 push @debug_out, $_[1];
188 $schema->resultset('Artist')
189 ->find(999)->update({ artistid => 555 });
190 ok((grep /IDENTITY_UPDATE/i, @debug_out));
191 } 'IDENTITY_UPDATE used';
195 my $bulk_rs = $schema->resultset('Artist')->search({
196 name => { -like => 'bulk artist %' }
199 # test _insert_bulk using populate.
201 skip '_insert_bulk not supported', 4
202 unless $storage_type !~ /NoBindVars/i;
205 $schema->resultset('Artist')->populate([
207 name => 'bulk artist 1',
211 name => 'bulk artist 2',
215 name => 'bulk artist 3',
219 } '_insert_bulk via populate';
221 is $bulk_rs->count, 3, 'correct number inserted via _insert_bulk';
223 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
224 'column set correctly via _insert_bulk');
227 @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
229 is ((scalar keys %bulk_ids), 3,
230 'identities generated correctly in _insert_bulk');
235 # make sure _insert_bulk works a second time on the same connection
237 skip '_insert_bulk not supported', 3
238 unless $storage_type !~ /NoBindVars/i;
241 $schema->resultset('Artist')->populate([
243 name => 'bulk artist 1',
247 name => 'bulk artist 2',
251 name => 'bulk artist 3',
255 } '_insert_bulk via populate called a second time';
257 is $bulk_rs->count, 3,
258 'correct number inserted via _insert_bulk';
260 is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
261 'column set correctly via _insert_bulk');
266 # test invalid _insert_bulk (missing required column)
269 $schema->resultset('Artist')->populate([
275 # The second pattern is the error from fallback to regular array insert on
276 # incompatible charset.
277 # The third is for ::NoBindVars with no syb_has_blk.
279 \Qno value or default\E
281 \Qdoes not allow null\E
283 \QUnable to invoke fast-path insert without storage placeholder support\E
285 '_insert_bulk with missing required column throws error';
287 # now test _insert_bulk with IDENTITY_INSERT
289 skip '_insert_bulk not supported', 3
290 unless $storage_type !~ /NoBindVars/i;
293 $schema->resultset('Artist')->populate([
296 name => 'bulk artist 1',
301 name => 'bulk artist 2',
306 name => 'bulk artist 3',
310 } '_insert_bulk with IDENTITY_INSERT via populate';
312 is $bulk_rs->count, 3,
313 'correct number inserted via _insert_bulk with IDENTITY_INSERT';
315 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
316 'column set correctly via _insert_bulk with IDENTITY_INSERT');
321 # test correlated subquery
322 my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } })
323 ->get_column('artistid')
325 my $subq_rs = $schema->resultset('Artist')->search({
326 artistid => { -in => $subq }
328 is $subq_rs->count, 11, 'correlated subquery';
330 # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
332 skip 'TEXT/IMAGE support does not work with FreeTDS', 22
333 if $schema->storage->_using_freetds;
335 my $dbh = $schema->storage->_dbh;
337 local $SIG{__WARN__} = sub {};
338 eval { $dbh->do('DROP TABLE bindtype_test') };
341 CREATE TABLE bindtype_test
343 id INT IDENTITY PRIMARY KEY,
349 ],{ RaiseError => 1, PrintError => 0 });
352 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
353 $binstr{'large'} = $binstr{'small'} x 1024;
355 my $maxloblen = length $binstr{'large'};
357 if (not $schema->storage->_using_freetds) {
358 $dbh->{'LongReadLen'} = $maxloblen * 2;
360 $dbh->do("set textsize ".($maxloblen * 2));
363 my $rs = $schema->resultset('BindType');
366 foreach my $type (qw(blob clob)) {
367 foreach my $size (qw(small large)) {
368 no warnings 'uninitialized';
372 $created = $rs->create( { $type => $binstr{$size} } )
373 } "inserted $size $type without dying";
375 $last_id = $created->id if $created;
378 ok($rs->find($last_id)->$type eq $binstr{$size})
379 } "verified inserted $size $type";
385 # blob insert with explicit PK
386 # also a good opportunity to test IDENTITY_INSERT
388 $rs->create( { id => 1, blob => $binstr{large} } )
389 } 'inserted large blob without dying with manual PK';
392 ok($rs->find(1)->blob eq $binstr{large})
393 } 'verified inserted large blob with manual PK';
396 my $new_str = $binstr{large} . 'mtfnpy';
398 # check redispatch to storage-specific update when auto-detected storage
399 if ($storage_type eq 'DBI::Sybase::ASE') {
400 DBICTest::Schema->storage_type('::DBI');
401 $schema = get_schema();
405 $rs->search({ id => 1 })->update({ blob => $new_str })
406 } 'updated blob successfully';
409 ok($rs->find(1)->blob eq $new_str)
410 } 'verified updated blob';
412 # try a blob update with IDENTITY_UPDATE
414 $new_str = $binstr{large} . 'hlagh';
415 $rs->find(1)->update({ id => 999, blob => $new_str });
416 ok($rs->find(999)->blob eq $new_str);
417 } 'verified updated blob with IDENTITY_UPDATE';
419 ## try multi-row blob update
420 # first insert some blobs
421 $new_str = $binstr{large} . 'foo';
424 $rs->create({ blob => $binstr{large} }) for (1..2);
425 $rs->update({ blob => $new_str });
426 is((grep $_->blob eq $new_str, $rs->all), 2);
427 } 'multi-row blob update';
431 # now try _insert_bulk with blobs and only blobs
432 $new_str = $binstr{large} . 'bar';
436 blob => $binstr{large},
440 blob => $binstr{large},
444 } '_insert_bulk with blobs does not die';
446 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
447 'IMAGE column set correctly via _insert_bulk');
449 is((grep $_->clob eq $new_str, $rs->all), 2,
450 'TEXT column set correctly via _insert_bulk');
452 # now try _insert_bulk with blobs and a non-blob which also happens to be an
455 skip 'no _insert_bulk without placeholders', 4
456 if $storage_type =~ /NoBindVars/i;
459 $new_str = $binstr{large} . 'bar';
465 blob => $binstr{large},
472 blob => $binstr{large},
477 } '_insert_bulk with blobs and explicit identity does NOT die';
479 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
480 'IMAGE column set correctly via _insert_bulk with identity');
482 is((grep $_->clob eq $new_str, $rs->all), 2,
483 'TEXT column set correctly via _insert_bulk with identity');
485 is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
486 'explicit identities set correctly via _insert_bulk with blobs';
491 $rs->create({ blob => $binstr{large} }) for (1..2);
492 $rs->update({ blob => undef });
493 is((grep !defined($_->blob), $rs->all), 2);
494 } 'blob update to NULL';
497 # test MONEY column support (and some other misc. stuff)
498 $schema->storage->dbh_do (sub {
499 my ($storage, $dbh) = @_;
500 eval { $dbh->do("DROP TABLE money_test") };
502 CREATE TABLE money_test (
503 id INT IDENTITY PRIMARY KEY,
504 amount MONEY DEFAULT $999.99 NULL
509 my $rs = $schema->resultset('Money');
511 # test insert with defaults
514 is((grep $_->amount == 999.99, $rs->all), 1);
515 } 'insert with all defaults works';
518 # test insert transaction when there's an active cursor
520 my $artist_rs = $schema->resultset('Artist');
523 my $row = $schema->resultset('Money')->create({ amount => 100 });
525 } 'inserted a row with an active cursor';
526 $ping_count-- if $@; # dbh_do calls ->connected
529 # test insert in an outer transaction when there's an active cursor
531 local $TODO = 'this should work once we have eager cursors';
533 # clear state, or we get a deadlock on $row->delete
534 # XXX figure out why this happens
535 $schema->storage->disconnect;
538 $schema->txn_do(sub {
539 my $artist_rs = $schema->resultset('Artist');
541 my $row = $schema->resultset('Money')->create({ amount => 100 });
544 } 'inserted a row with an active cursor in outer txn';
545 $ping_count-- if $@; # dbh_do calls ->connected
548 # Now test money values.
551 $row = $rs->create({ amount => 100 });
552 } 'inserted a money value';
554 cmp_ok eval { $rs->find($row->id)->amount }, '==', 100,
555 'money value round-trip';
558 $row->update({ amount => 200 });
559 } 'updated a money value';
561 cmp_ok eval { $rs->find($row->id)->amount }, '==', 200,
562 'updated money value round-trip';
565 $row->update({ amount => undef });
566 } 'updated a money value to NULL';
569 my $null_amount = $rs->find($row->id)->amount;
570 is $null_amount, undef;
571 } 'updated money value to NULL round-trip';
573 # Test computed columns and timestamps
574 $schema->storage->dbh_do (sub {
575 my ($storage, $dbh) = @_;
576 eval { $dbh->do("DROP TABLE computed_column_test") };
578 CREATE TABLE computed_column_test (
579 id INT IDENTITY PRIMARY KEY,
580 a_computed_column AS getdate(),
581 a_timestamp timestamp,
582 charfield VARCHAR(20) DEFAULT 'foo'
587 require DBICTest::Schema::ComputedColumn;
588 $schema->register_class(
589 ComputedColumn => 'DBICTest::Schema::ComputedColumn'
592 ok (($rs = $schema->resultset('ComputedColumn')),
593 'got rs for ComputedColumn');
595 lives_ok { $row = $rs->create({}) }
596 'empty insert for a table with computed columns survived';
599 $row->update({ charfield => 'bar' })
600 } 'update of a table with computed columns survived';
603 is $ping_count, 0, 'no pings';
605 # if tests passed and did so under a non-C lang - let's rerun the test
606 if (Test::Builder->new->is_passing and $ENV{LANG} and $ENV{LANG} ne 'C') {
607 my $oldlang = $ENV{LANG};
608 local $ENV{LANG} = 'C';
610 pass ("Your lang is set to $oldlang - retesting with C");
613 my @cmd = map { $_ =~ /(.+)/ } ($^X, __FILE__);
615 # this is cheating, and may even hang here and there (testing on windows passed fine)
616 # will be replaced with Test::SubExec::Noninteractive in due course
618 IPC::Open2::open2(my $out, undef, @cmd);
619 while (my $ln = <$out>) {
624 ok (! $?, "Wstat $? from: @cmd");
631 if (my $dbh = eval { $schema->storage->_dbh }) {
632 eval { $dbh->do("DROP TABLE $_") }
633 for qw/artist bindtype_test money_test computed_column_test/;