3 no warnings 'uninitialized';
7 use DBIx::Class::Optional::Dependencies ();
11 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
12 if (not ($dsn && $user)) {
13 plan skip_all => join ' ',
14 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test.',
15 'Warning: This test drops and creates the tables:',
16 "'artist', 'money_test' and 'bindtype_test'",
20 plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_ase')
21 unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_ase');
23 # first run the test without the lang variable set
24 # it is important to do this before module load, hence
25 # the subprocess before the optdep check
26 if ($ENV{LANG} and $ENV{LANG} ne 'C') {
27 my $oldlang = $ENV{LANG};
28 local $ENV{LANG} = 'C';
30 pass ("Your lang is set to $oldlang - testing with C first");
32 my @cmd = ($^X, __FILE__);
34 # this is cheating, and may even hang here and there (testing on windows passed fine)
35 # will be replaced with Test::SubExec::Noninteractive in due course
37 IPC::Open2::open2(my $out, my $in, @cmd);
38 while (my $ln = <$out>) {
43 ok (! $?, "Wstat $? from: @cmd");
48 'DBI::Sybase::ASE::NoBindVars',
50 eval "require DBIx::Class::Storage::$_;" for @storage_types;
56 DBICTest::Schema->connect($dsn, $user, $pass, {
58 [ blob_setup => log_on_update => 1 ], # this is a safer option
65 my $ping = DBIx::Class::Storage::DBI::Sybase::ASE->can('_ping');
66 *DBIx::Class::Storage::DBI::Sybase::ASE::_ping = sub {
72 for my $storage_type (@storage_types) {
75 unless ($storage_type eq 'DBI::Sybase::ASE') { # autodetect
76 DBICTest::Schema->storage_type("::$storage_type");
79 $schema = get_schema();
81 $schema->storage->ensure_connected;
83 if ($storage_idx == 0 &&
84 $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')) {
85 # no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
86 skip "Skipping entire test for $storage_type - no placeholder support", 1;
90 isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" );
92 $schema->storage->_dbh->disconnect;
93 lives_ok (sub { $schema->storage->dbh }, 'reconnect works');
95 $schema->storage->dbh_do (sub {
96 my ($storage, $dbh) = @_;
97 eval { $dbh->do("DROP TABLE artist") };
100 artistid INT IDENTITY PRIMARY KEY,
102 rank INT DEFAULT 13 NOT NULL,
103 charfield CHAR(10) NULL
110 # so we start unconnected
111 $schema->storage->disconnect;
113 # test primary key handling
114 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
115 ok($new->artistid > 0, "Auto-PK worked");
117 $seen_id{$new->artistid}++;
119 # check redispatch to storage-specific insert when auto-detected storage
120 if ($storage_type eq 'DBI::Sybase::ASE') {
121 DBICTest::Schema->storage_type('::DBI');
122 $schema = get_schema();
125 $new = $schema->resultset('Artist')->create({ name => 'Artist 1' });
126 is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' );
127 $seen_id{$new->artistid}++;
129 # inserts happen in a txn, so we make sure it still works inside a txn too
133 $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
134 is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
135 $seen_id{$new->artistid}++;
141 is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok');
144 my $it = $schema->resultset('Artist')->search({
145 artistid => { '>' => 0 }
148 order_by => 'artistid',
151 is( $it->count, 3, "LIMIT count ok" );
153 is( $it->next->name, "foo", "iterator->next ok" );
155 is( $it->next->name, "Artist 2", "iterator->next ok" );
156 is( $it->next, undef, "next past end of resultset ok" );
158 # now try with offset
159 $it = $schema->resultset('Artist')->search({}, {
162 order_by => 'artistid',
165 is( $it->count, 3, "LIMIT with offset count ok" );
167 is( $it->next->name, "Artist 3", "iterator->next ok" );
169 is( $it->next->name, "Artist 5", "iterator->next ok" );
170 is( $it->next, undef, "next past end of resultset ok" );
172 # now try a grouped count
173 $schema->resultset('Artist')->create({ name => 'Artist 6' })
176 $it = $schema->resultset('Artist')->search({}, {
180 is( $it->count, 7, 'COUNT of GROUP_BY ok' );
182 # do an IDENTITY_INSERT
184 no warnings 'redefine';
187 local $schema->storage->{debug} = 1;
188 local $schema->storage->debugobj->{callback} = sub {
189 push @debug_out, $_[1];
193 my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit;
194 local *DBIx::Class::Storage::DBI::txn_commit = sub {
199 $schema->resultset('Artist')
200 ->create({ artistid => 999, name => 'mtfnpy' });
202 ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
205 skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
206 if $storage_type =~ /NoBindVars/i;
208 is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT';
212 # do an IDENTITY_UPDATE
215 local $schema->storage->{debug} = 1;
216 local $schema->storage->debugobj->{callback} = sub {
217 push @debug_out, $_[1];
221 $schema->resultset('Artist')
222 ->find(999)->update({ artistid => 555 });
223 ok((grep /IDENTITY_UPDATE/i, @debug_out));
224 } 'IDENTITY_UPDATE used';
228 my $bulk_rs = $schema->resultset('Artist')->search({
229 name => { -like => 'bulk artist %' }
232 # test insert_bulk using populate.
234 skip 'insert_bulk not supported', 4
235 unless $storage_type !~ /NoBindVars/i;
238 $schema->resultset('Artist')->populate([
240 name => 'bulk artist 1',
244 name => 'bulk artist 2',
248 name => 'bulk artist 3',
252 } 'insert_bulk via populate';
254 is $bulk_rs->count, 3, 'correct number inserted via insert_bulk';
256 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
257 'column set correctly via insert_bulk');
260 @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
262 is ((scalar keys %bulk_ids), 3,
263 'identities generated correctly in insert_bulk');
268 # make sure insert_bulk works a second time on the same connection
270 skip 'insert_bulk not supported', 3
271 unless $storage_type !~ /NoBindVars/i;
274 $schema->resultset('Artist')->populate([
276 name => 'bulk artist 1',
280 name => 'bulk artist 2',
284 name => 'bulk artist 3',
288 } 'insert_bulk via populate called a second time';
290 is $bulk_rs->count, 3,
291 'correct number inserted via insert_bulk';
293 is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
294 'column set correctly via insert_bulk');
299 # test invalid insert_bulk (missing required column)
301 # There should be a rollback, reconnect and the next valid insert_bulk should
304 $schema->resultset('Artist')->populate([
309 } qr/no value or default|does not allow null|placeholders/i,
310 # The second pattern is the error from fallback to regular array insert on
311 # incompatible charset.
312 # The third is for ::NoBindVars with no syb_has_blk.
313 'insert_bulk with missing required column throws error';
315 # now test insert_bulk with IDENTITY_INSERT
317 skip 'insert_bulk not supported', 3
318 unless $storage_type !~ /NoBindVars/i;
321 $schema->resultset('Artist')->populate([
324 name => 'bulk artist 1',
329 name => 'bulk artist 2',
334 name => 'bulk artist 3',
338 } 'insert_bulk with IDENTITY_INSERT via populate';
340 is $bulk_rs->count, 3,
341 'correct number inserted via insert_bulk with IDENTITY_INSERT';
343 is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
344 'column set correctly via insert_bulk with IDENTITY_INSERT');
349 # test correlated subquery
350 my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } })
351 ->get_column('artistid')
353 my $subq_rs = $schema->resultset('Artist')->search({
354 artistid => { -in => $subq }
356 is $subq_rs->count, 11, 'correlated subquery';
358 # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
360 skip 'TEXT/IMAGE support does not work with FreeTDS', 22
361 if $schema->storage->using_freetds;
363 my $dbh = $schema->storage->_dbh;
365 local $SIG{__WARN__} = sub {};
366 eval { $dbh->do('DROP TABLE bindtype_test') };
369 CREATE TABLE bindtype_test
371 id INT IDENTITY PRIMARY KEY,
377 ],{ RaiseError => 1, PrintError => 0 });
380 my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
381 $binstr{'large'} = $binstr{'small'} x 1024;
383 my $maxloblen = length $binstr{'large'};
385 if (not $schema->storage->using_freetds) {
386 $dbh->{'LongReadLen'} = $maxloblen * 2;
388 $dbh->do("set textsize ".($maxloblen * 2));
391 my $rs = $schema->resultset('BindType');
394 foreach my $type (qw(blob clob)) {
395 foreach my $size (qw(small large)) {
396 no warnings 'uninitialized';
400 $created = $rs->create( { $type => $binstr{$size} } )
401 } "inserted $size $type without dying";
403 $last_id = $created->id if $created;
406 ok($rs->find($last_id)->$type eq $binstr{$size})
407 } "verified inserted $size $type";
413 # blob insert with explicit PK
414 # also a good opportunity to test IDENTITY_INSERT
416 $rs->create( { id => 1, blob => $binstr{large} } )
417 } 'inserted large blob without dying with manual PK';
420 ok($rs->find(1)->blob eq $binstr{large})
421 } 'verified inserted large blob with manual PK';
424 my $new_str = $binstr{large} . 'mtfnpy';
426 # check redispatch to storage-specific update when auto-detected storage
427 if ($storage_type eq 'DBI::Sybase::ASE') {
428 DBICTest::Schema->storage_type('::DBI');
429 $schema = get_schema();
433 $rs->search({ id => 1 })->update({ blob => $new_str })
434 } 'updated blob successfully';
437 ok($rs->find(1)->blob eq $new_str)
438 } 'verified updated blob';
440 # try a blob update with IDENTITY_UPDATE
442 $new_str = $binstr{large} . 'hlagh';
443 $rs->find(1)->update({ id => 999, blob => $new_str });
444 ok($rs->find(999)->blob eq $new_str);
445 } 'verified updated blob with IDENTITY_UPDATE';
447 ## try multi-row blob update
448 # first insert some blobs
449 $new_str = $binstr{large} . 'foo';
452 $rs->create({ blob => $binstr{large} }) for (1..2);
453 $rs->update({ blob => $new_str });
454 is((grep $_->blob eq $new_str, $rs->all), 2);
455 } 'multi-row blob update';
459 # now try insert_bulk with blobs and only blobs
460 $new_str = $binstr{large} . 'bar';
464 blob => $binstr{large},
468 blob => $binstr{large},
472 } 'insert_bulk with blobs does not die';
474 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
475 'IMAGE column set correctly via insert_bulk');
477 is((grep $_->clob eq $new_str, $rs->all), 2,
478 'TEXT column set correctly via insert_bulk');
480 # now try insert_bulk with blobs and a non-blob which also happens to be an
483 skip 'no insert_bulk without placeholders', 4
484 if $storage_type =~ /NoBindVars/i;
487 $new_str = $binstr{large} . 'bar';
493 blob => $binstr{large},
500 blob => $binstr{large},
505 } 'insert_bulk with blobs and explicit identity does NOT die';
507 is((grep $_->blob eq $binstr{large}, $rs->all), 2,
508 'IMAGE column set correctly via insert_bulk with identity');
510 is((grep $_->clob eq $new_str, $rs->all), 2,
511 'TEXT column set correctly via insert_bulk with identity');
513 is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
514 'explicit identities set correctly via insert_bulk with blobs';
519 $rs->create({ blob => $binstr{large} }) for (1..2);
520 $rs->update({ blob => undef });
521 is((grep !defined($_->blob), $rs->all), 2);
522 } 'blob update to NULL';
525 # test MONEY column support (and some other misc. stuff)
526 $schema->storage->dbh_do (sub {
527 my ($storage, $dbh) = @_;
528 eval { $dbh->do("DROP TABLE money_test") };
530 CREATE TABLE money_test (
531 id INT IDENTITY PRIMARY KEY,
532 amount MONEY DEFAULT $999.99 NULL
537 my $rs = $schema->resultset('Money');
539 # test insert with defaults
542 is((grep $_->amount == 999.99, $rs->all), 1);
543 } 'insert with all defaults works';
546 # test insert transaction when there's an active cursor
548 my $artist_rs = $schema->resultset('Artist');
551 my $row = $schema->resultset('Money')->create({ amount => 100 });
553 } 'inserted a row with an active cursor';
554 $ping_count-- if $@; # dbh_do calls ->connected
557 # test insert in an outer transaction when there's an active cursor
559 local $TODO = 'this should work once we have eager cursors';
561 # clear state, or we get a deadlock on $row->delete
562 # XXX figure out why this happens
563 $schema->storage->disconnect;
566 $schema->txn_do(sub {
567 my $artist_rs = $schema->resultset('Artist');
569 my $row = $schema->resultset('Money')->create({ amount => 100 });
572 } 'inserted a row with an active cursor in outer txn';
573 $ping_count-- if $@; # dbh_do calls ->connected
576 # Now test money values.
579 $row = $rs->create({ amount => 100 });
580 } 'inserted a money value';
582 cmp_ok eval { $rs->find($row->id)->amount }, '==', 100,
583 'money value round-trip';
586 $row->update({ amount => 200 });
587 } 'updated a money value';
589 cmp_ok eval { $rs->find($row->id)->amount }, '==', 200,
590 'updated money value round-trip';
593 $row->update({ amount => undef });
594 } 'updated a money value to NULL';
597 my $null_amount = $rs->find($row->id)->amount;
598 is $null_amount, undef;
599 } 'updated money value to NULL round-trip';
601 # Test computed columns and timestamps
602 $schema->storage->dbh_do (sub {
603 my ($storage, $dbh) = @_;
604 eval { $dbh->do("DROP TABLE computed_column_test") };
606 CREATE TABLE computed_column_test (
607 id INT IDENTITY PRIMARY KEY,
608 a_computed_column AS getdate(),
609 a_timestamp timestamp,
610 charfield VARCHAR(20) DEFAULT 'foo'
615 require DBICTest::Schema::ComputedColumn;
616 $schema->register_class(
617 ComputedColumn => 'DBICTest::Schema::ComputedColumn'
620 ok (($rs = $schema->resultset('ComputedColumn')),
621 'got rs for ComputedColumn');
623 lives_ok { $row = $rs->create({}) }
624 'empty insert for a table with computed columns survived';
627 $row->update({ charfield => 'bar' })
628 } 'update of a table with computed columns survived';
631 is $ping_count, 0, 'no pings';
637 if (my $dbh = eval { $schema->storage->_dbh }) {
638 eval { $dbh->do("DROP TABLE $_") }
639 for qw/artist bindtype_test money_test computed_column_test/;