X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=t%2F746sybase.t;h=677d78ad4b48b70ad82346a44d1c6c1ca0197512;hb=26283ee38f220f6c6bae720ea5a189c9c0f47f6f;hp=62a9e528bea095da25f927be3955f4bd9000d105;hpb=b5453fbb2a8b2a70d852a8673dcc98f7d51f8bf7;p=dbsrgits%2FDBIx-Class.git diff --git a/t/746sybase.t b/t/746sybase.t index 62a9e52..677d78a 100644 --- a/t/746sybase.t +++ b/t/746sybase.t @@ -1,6 +1,5 @@ use strict; -use warnings; -no warnings 'uninitialized'; +use warnings; use Test::More; use Test::Exception; @@ -9,220 +8,84 @@ use DBICTest; my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/}; -my $TESTS = 29 + 2; +plan skip_all => 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' + unless ($dsn && $user); -if (not ($dsn && $user)) { - plan skip_all => - 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' . - "\nWarning: This test drops and creates the tables " . - "'artist' and 'bindtype_test'"; -} else { - plan tests => $TESTS*2; -} +plan tests => 13; + +my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1}); + +# start disconnected to test reconnection +$schema->storage->ensure_connected; +$schema->storage->_dbh->disconnect; + +isa_ok( $schema->storage, 'DBIx::Class::Storage::DBI::Sybase' ); + +my $dbh; +lives_ok (sub { + $dbh = $schema->storage->dbh; +}, 'reconnect works'); + +$schema->storage->dbh_do (sub { + my ($storage, $dbh) = @_; + eval { $dbh->do("DROP TABLE artist") }; + $dbh->do(<<'SQL'); -my @storage_types = ( - 'DBI::Sybase', - 'DBI::Sybase::NoBindVars', -); -my $schema; -my $storage_idx = -1; - -for my $storage_type (@storage_types) { - $storage_idx++; - $schema = DBICTest::Schema->clone; - - unless ($storage_type eq 'DBI::Sybase') { # autodetect - $schema->storage_type("::$storage_type"); - } - $schema->connection($dsn, $user, $pass, { - AutoCommit => 1, - on_connect_call => [ - [ blob_setup => log_on_update => 1 ], # this is a safer option - ], - }); - - $schema->storage->ensure_connected; - $schema->storage->_dbh->disconnect; - - if ($storage_idx == 0 && - $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::NoBindVars')) { -# no placeholders in this version of Sybase or DBD::Sybase - my $tb = Test::More->builder; - $tb->skip('no placeholders') for 1..$TESTS; - next; - } - - isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" ); - - lives_ok (sub { $schema->storage->dbh }, 'reconnect works'); - - $schema->storage->dbh_do (sub { - my ($storage, $dbh) = @_; - eval { $dbh->do("DROP TABLE artist") }; - $dbh->do(<<'SQL'); CREATE TABLE artist ( - artistid INT IDENTITY PRIMARY KEY, + artistid INT IDENTITY NOT NULL, name VARCHAR(100), rank INT DEFAULT 13 NOT NULL, - charfield CHAR(10) NULL + charfield CHAR(10) NULL, + primary key(artistid) ) + SQL - }); - my %seen_id; +}); -# so we start unconnected - $schema->storage->disconnect; +my %seen_id; + +# fresh $schema so we start unconnected +$schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1}); # test primary key handling - my $new = $schema->resultset('Artist')->create({ name => 'foo' }); - ok($new->artistid > 0, "Auto-PK worked"); +my $new = $schema->resultset('Artist')->create({ name => 'foo' }); +ok($new->artistid > 0, "Auto-PK worked"); - $seen_id{$new->artistid}++; +$seen_id{$new->artistid}++; - for (1..6) { +# test LIMIT support +for (1..6) { $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ }); is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" ); $seen_id{$new->artistid}++; - } +} -# test simple count - is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok'); +my $it; -# test LIMIT support - my $it = $schema->resultset('Artist')->search({ - artistid => { '>' => 0 } - }, { +$it = $schema->resultset('Artist')->search( {}, { rows => 3, order_by => 'artistid', - }); - - is( $it->count, 3, "LIMIT count ok" ); +}); - is( $it->next->name, "foo", "iterator->next ok" ); - $it->next; - is( $it->next->name, "Artist 2", "iterator->next ok" ); - is( $it->next, undef, "next past end of resultset ok" ); +TODO: { + local $TODO = 'Sybase is very very fucked in the limit department'; -# now try with offset - $it = $schema->resultset('Artist')->search({}, { - rows => 3, - offset => 3, - order_by => 'artistid', - }); - - is( $it->count, 3, "LIMIT with offset count ok" ); - - is( $it->next->name, "Artist 3", "iterator->next ok" ); - $it->next; - is( $it->next->name, "Artist 5", "iterator->next ok" ); - is( $it->next, undef, "next past end of resultset ok" ); - -# now try a grouped count - $schema->resultset('Artist')->create({ name => 'Artist 6' }) - for (1..6); - - $it = $schema->resultset('Artist')->search({}, { - group_by => 'name' - }); - - is( $it->count, 7, 'COUNT of GROUP_BY ok' ); - -# mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t - SKIP: { - skip 'Need at least version 1.09 of DBD::Sybase to test TEXT/IMAGE', 12 - unless $DBD::Sybase::VERSION >= 1.09; - - my $dbh = $schema->storage->dbh; - { - local $SIG{__WARN__} = sub {}; - eval { $dbh->do('DROP TABLE bindtype_test') }; - - $dbh->do(qq[ - CREATE TABLE bindtype_test - ( - id INT IDENTITY PRIMARY KEY, - bytea INT NULL, - blob IMAGE NULL, - clob TEXT NULL - ) - ],{ RaiseError => 1, PrintError => 0 }); - } - - my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) ); - $binstr{'large'} = $binstr{'small'} x 1024; - - my $maxloblen = length $binstr{'large'}; - note - "Localizing LongReadLen to $maxloblen to avoid truncation of test data"; - local $dbh->{'LongReadLen'} = $maxloblen; - - my $rs = $schema->resultset('BindType'); - my $last_id; - - foreach my $type (qw(blob clob)) { - foreach my $size (qw(small large)) { - no warnings 'uninitialized'; - - my $created = eval { $rs->create( { $type => $binstr{$size} } ) }; - ok(!$@, "inserted $size $type without dying"); - diag $@ if $@; - - $last_id = $created->id if $created; - - my $got = eval { - $rs->search({ id => $last_id }, { select => [$type] })->single->$type - }; - diag $@ if $@; - ok($got eq $binstr{$size}, "verified inserted $size $type"); - } - } - - # try a blob update - TODO: { - local $TODO = 'updating TEXT/IMAGE does not work yet'; - - my $new_str = $binstr{large} . 'foo'; - eval { $rs->search({ id => $last_id })->update({ blob => $new_str }) }; - ok !$@, 'updated blob successfully'; - diag $@ if $@; - ok(eval { - $rs->search({ id => $last_id }, { select => ['blob'] })->single->blob - } eq $new_str, "verified updated blob" ); - diag $@ if $@; - } - - # blob insert with explicit PK - { - local $SIG{__WARN__} = sub {}; - eval { $dbh->do('DROP TABLE bindtype_test') }; - - $dbh->do(qq[ - CREATE TABLE bindtype_test - ( - id INT PRIMARY KEY, - bytea INT NULL, - blob IMAGE NULL, - clob TEXT NULL - ) - ],{ RaiseError => 1, PrintError => 0 }); - } - my $created = eval { $rs->create( { id => 1, blob => $binstr{large} } ) }; - ok(!$@, "inserted large blob without dying"); - diag $@ if $@; - - my $got = eval { - $rs->search({ id => 1 }, { select => ['blob'] })->single->blob - }; - diag $@ if $@; - ok($got eq $binstr{large}, "verified inserted large blob"); - } + is( $it->count, 3, "LIMIT count ok" ); } +# The iterator still works correctly with rows => 3, even though the sql is +# fucked, very interesting. + +is( $it->next->name, "foo", "iterator->next ok" ); +$it->next; +is( $it->next->name, "Artist 2", "iterator->next ok" ); +is( $it->next, undef, "next past end of resultset ok" ); + + # clean up our mess END { - if (my $dbh = eval { $schema->storage->_dbh }) { - $dbh->do('DROP TABLE artist'); - eval { $dbh->do('DROP TABLE bindtype_test') }; - } + my $dbh = eval { $schema->storage->_dbh }; + $dbh->do('DROP TABLE artist') if $dbh; } +