X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=t%2F746sybase.t;h=0c1130ccec69832604e3794180f4ea06a256d97b;hb=689819e14e9e6245000c64ece46ddd1bc8293bf5;hp=9e0caaec229cc2f91829f279b6003eaca8f14ba1;hpb=d867eedaa703200d7f0bc329836e99b6bd22bc39;p=dbsrgits%2FDBIx-Class.git diff --git a/t/746sybase.t b/t/746sybase.t index 9e0caae..0c1130c 100644 --- a/t/746sybase.t +++ b/t/746sybase.t @@ -6,12 +6,13 @@ use Test::More; use Test::Exception; use lib qw(t/lib); use DBICTest; -use DBIx::Class::Storage::DBI::Sybase; -use DBIx::Class::Storage::DBI::Sybase::NoBindVars; + +require DBIx::Class::Storage::DBI::Sybase; +require DBIx::Class::Storage::DBI::Sybase::NoBindVars; my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/}; -my $TESTS = 48 + 2; +my $TESTS = 51 + 2; if (not ($dsn && $user)) { plan skip_all => @@ -157,8 +158,7 @@ SQL is( $it->count, 7, 'COUNT of GROUP_BY ok' ); -# do an identity insert (which should happen with no txn when using -# placeholders.) +# do an IDENTITY_INSERT { no warnings 'redefine'; @@ -178,7 +178,7 @@ SQL $schema->resultset('Artist') ->create({ artistid => 999, name => 'mtfnpy' }); - ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT'); + ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used'); SKIP: { skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1 @@ -188,70 +188,96 @@ SQL } } -# test insert_bulk using populate, this should always pass whether or not it -# does anything Sybase specific or not. Just here to aid debugging. - lives_ok { - $schema->resultset('Artist')->populate([ - { - name => 'bulk artist 1', - charfield => 'foo', - }, - { - name => 'bulk artist 2', - charfield => 'foo', - }, - { - name => 'bulk artist 3', - charfield => 'foo', - }, - ]); - } 'insert_bulk via populate'; +# do an IDENTITY_UPDATE + { + my @debug_out; + local $schema->storage->{debug} = 1; + local $schema->storage->debugobj->{callback} = sub { + push @debug_out, $_[1]; + }; + + lives_and { + $schema->resultset('Artist') + ->find(999)->update({ artistid => 555 }); + ok((grep /IDENTITY_UPDATE/i, @debug_out)); + } 'IDENTITY_UPDATE used'; + $ping_count-- if $@; + } my $bulk_rs = $schema->resultset('Artist')->search({ name => { -like => 'bulk artist %' } }); - is $bulk_rs->count, 3, 'correct number inserted via insert_bulk'; - - is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, - 'column set correctly via insert_bulk'); - - my %bulk_ids; - @bulk_ids{map $_->artistid, $bulk_rs->all} = (); - - is ((scalar keys %bulk_ids), 3, - 'identities generated correctly in insert_bulk'); +# test insert_bulk using populate, this should always pass whether or not it +# does anything Sybase specific or not. Just here to aid debugging. + SKIP: { + skip 'insert_bulk not supported', 4 + unless $schema->storage->_can_insert_bulk; - $bulk_rs->delete; + lives_ok { + $schema->resultset('Artist')->populate([ + { + name => 'bulk artist 1', + charfield => 'foo', + }, + { + name => 'bulk artist 2', + charfield => 'foo', + }, + { + name => 'bulk artist 3', + charfield => 'foo', + }, + ]); + } 'insert_bulk via populate'; + + is $bulk_rs->count, 3, 'correct number inserted via insert_bulk'; + + is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, + 'column set correctly via insert_bulk'); + + my %bulk_ids; + @bulk_ids{map $_->artistid, $bulk_rs->all} = (); + + is ((scalar keys %bulk_ids), 3, + 'identities generated correctly in insert_bulk'); + + $bulk_rs->delete; + } # now test insert_bulk with IDENTITY_INSERT - lives_ok { - $schema->resultset('Artist')->populate([ - { - artistid => 2001, - name => 'bulk artist 1', - charfield => 'foo', - }, - { - artistid => 2002, - name => 'bulk artist 2', - charfield => 'foo', - }, - { - artistid => 2003, - name => 'bulk artist 3', - charfield => 'foo', - }, - ]); - } 'insert_bulk with IDENTITY_INSERT via populate'; - - is $bulk_rs->count, 3, - 'correct number inserted via insert_bulk with IDENTITY_INSERT'; - - is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, - 'column set correctly via insert_bulk with IDENTITY_INSERT'); - - $bulk_rs->delete; + SKIP: { + skip 'insert_bulk not supported', 3 + unless $schema->storage->_can_insert_bulk; + + lives_ok { + $schema->resultset('Artist')->populate([ + { + artistid => 2001, + name => 'bulk artist 1', + charfield => 'foo', + }, + { + artistid => 2002, + name => 'bulk artist 2', + charfield => 'foo', + }, + { + artistid => 2003, + name => 'bulk artist 3', + charfield => 'foo', + }, + ]); + } 'insert_bulk with IDENTITY_INSERT via populate'; + + is $bulk_rs->count, 3, + 'correct number inserted via insert_bulk with IDENTITY_INSERT'; + + is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, + 'column set correctly via insert_bulk with IDENTITY_INSERT'); + + $bulk_rs->delete; + } # test correlated subquery my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } }) @@ -264,7 +290,7 @@ SQL # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t SKIP: { - skip 'TEXT/IMAGE support does not work with FreeTDS', 13 + skip 'TEXT/IMAGE support does not work with FreeTDS', 15 if $schema->storage->using_freetds; my $dbh = $schema->storage->_dbh; @@ -317,20 +343,9 @@ SQL # blob insert with explicit PK # also a good opportunity to test IDENTITY_INSERT - { - local $SIG{__WARN__} = sub {}; - eval { $dbh->do('DROP TABLE bindtype_test') }; - $dbh->do(qq[ - CREATE TABLE bindtype_test - ( - id INT IDENTITY PRIMARY KEY, - bytea INT NULL, - blob IMAGE NULL, - clob TEXT NULL - ) - ],{ RaiseError => 1, PrintError => 0 }); - } + $rs->delete; + my $created = eval { $rs->create( { id => 1, blob => $binstr{large} } ) }; ok(!$@, "inserted large blob without dying with manual PK"); diag $@ if $@; @@ -359,13 +374,27 @@ SQL diag $@ if $@; ok($got eq $new_str, "verified updated blob"); + # try a blob update with IDENTITY_UPDATE + lives_and { + $new_str = $binstr{large} . 'hlagh'; + $rs->find(1)->update({ id => 999, blob => $new_str }); + ok($rs->find(999)->blob eq $new_str); + } 'verified updated blob with IDENTITY_UPDATE'; + ## try multi-row blob update # first insert some blobs - $rs->find(1)->delete; + $rs->delete; $rs->create({ blob => $binstr{large} }) for (1..3); $new_str = $binstr{large} . 'foo'; $rs->update({ blob => $new_str }); is((grep $_->blob eq $new_str, $rs->all), 3, 'multi-row blob update'); + + # make sure impossible blob update throws + throws_ok { + $rs->update({ clob => 'foo' }); + $rs->create({ clob => 'bar' }); + $rs->search({ clob => 'foo' })->update({ clob => 'bar' }); + } qr/impossible/, 'impossible blob update throws'; } # test MONEY column support @@ -381,10 +410,7 @@ SQL }); # test insert transaction when there's an active cursor - SKIP: { - skip 'not testing insert with active cursor if using ::NoBindVars', 1 - if $storage_type =~ /NoBindVars/i; - + { my $artist_rs = $schema->resultset('Artist'); $artist_rs->first; lives_ok {