From: Rafael Kitover Date: Thu, 30 Jul 2009 13:46:04 +0000 (-0400) Subject: Merge 'trunk' into 'mssql_storage_minor_refactor' X-Git-Tag: v0.08109~47^2~17 X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=08d78f41d2257beb17845164f42006be0d7b3b57;hp=41308e8218a90291b3f01f01a06229a1b9fa3975;p=dbsrgits%2FDBIx-Class.git Merge 'trunk' into 'mssql_storage_minor_refactor' r6389@hlagh (orig r7140): caelum | 2009-07-30 08:46:04 -0400 update sqlite test schema --- diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 89b8eda..8a61a40 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -153,9 +153,15 @@ automatically. =item auto_nextval -Set this to a true value for a column whose value is retrieved -automatically from an oracle sequence. If you do not use an Oracle -trigger to get the nextval, you have to set sequence as well. +Set this to a true value for a column whose value is retrieved automatically +from a sequence or function (if supported by your Storage driver.) For a +sequence, if you do not use a trigger to get the nextval, you have to set the +L value as well. + +Also set this for MSSQL columns with the 'uniqueidentifier' +L whose values you want to automatically +generate using C, unless they are a primary key in which case this will +be done anyway. =item extra diff --git a/lib/DBIx/Class/SQLAHacks.pm b/lib/DBIx/Class/SQLAHacks.pm index a1e6d1c..c03adbb 100644 --- a/lib/DBIx/Class/SQLAHacks.pm +++ b/lib/DBIx/Class/SQLAHacks.pm @@ -170,7 +170,8 @@ sub _Top { $quoted_alias = $self->_quote ('column_' . (@inner_select + 1) ); } # column name seen more than once - alias it - elsif ($orig_colname && ($seen_names{$orig_colname} > 1) ) { + elsif ($orig_colname && + ($seen_names{$orig_colname} && $seen_names{$orig_colname} > 1) ) { $quoted_alias = $self->_quote ("${table}__${orig_colname}"); } diff --git a/lib/DBIx/Class/Storage/DBI/MSSQL.pm b/lib/DBIx/Class/Storage/DBI/MSSQL.pm index 3a1f868..37733f6 100644 --- a/lib/DBIx/Class/Storage/DBI/MSSQL.pm +++ b/lib/DBIx/Class/Storage/DBI/MSSQL.pm @@ -6,12 +6,152 @@ use warnings; use base qw/DBIx::Class::Storage::DBI::AmbiguousGlob DBIx::Class::Storage::DBI/; use mro 'c3'; +use List::Util(); + +__PACKAGE__->mk_group_accessors(simple => qw/ + _identity _identity_method +/); + __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::MSSQL'); -sub _dbh_last_insert_id { - my ($self, $dbh, $source, $col) = @_; - my ($id) = $dbh->selectrow_array('SELECT SCOPE_IDENTITY()'); - return $id; +sub insert_bulk { + my $self = shift; + my ($source, $cols, $data) = @_; + + my $identity_insert = 0; + + COLUMNS: + foreach my $col (@{$cols}) { + if ($source->column_info($col)->{is_auto_increment}) { + $identity_insert = 1; + last COLUMNS; + } + } + + if ($identity_insert) { + my $table = $source->from; + $self->dbh->do("SET IDENTITY_INSERT $table ON"); + } + + $self->next::method(@_); + + if ($identity_insert) { + my $table = $source->from; + $self->dbh->do("SET IDENTITY_INSERT $table OFF"); + } +} + +sub insert { + my $self = shift; + my ($source, $to_insert) = @_; + + my $updated_cols = {}; + + my %guid_cols; + my @pk_cols = $source->primary_columns; + my %pk_cols; + @pk_cols{@pk_cols} = (); + + my @pk_guids = grep { + $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i + } @pk_cols; + + my @auto_guids = grep { + $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i + && + $source->column_info($_)->{auto_nextval} + } grep { not exists $pk_cols{$_} } $source->columns; + + my @get_guids_for = + grep { not exists $to_insert->{$_} } (@pk_guids, @auto_guids); + + for my $guid_col (@get_guids_for) { + my ($new_guid) = $self->dbh->selectrow_array('SELECT NEWID()'); + $updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid; + } + + $updated_cols = { %$updated_cols, %{ $self->next::method(@_) } }; + + return $updated_cols; +} + +sub _prep_for_execute { + my $self = shift; + my ($op, $extra_bind, $ident, $args) = @_; + +# cast MONEY values properly + if ($op eq 'insert' || $op eq 'update') { + my $fields = $args->[0]; + my $col_info = $self->_resolve_column_info($ident, [keys %$fields]); + + for my $col (keys %$fields) { + if ($col_info->{$col}{data_type} =~ /^money\z/i) { + my $val = $fields->{$col}; + $fields->{$col} = \['CAST(? AS MONEY)', [ $col => $val ]]; + } + } + } + + my ($sql, $bind) = $self->next::method (@_); + + if ($op eq 'insert') { + $sql .= ';SELECT SCOPE_IDENTITY()'; + + my $col_info = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]); + if (List::Util::first { $_->{is_auto_increment} } (values %$col_info) ) { + + my $table = $ident->from; + my $identity_insert_on = "SET IDENTITY_INSERT $table ON"; + my $identity_insert_off = "SET IDENTITY_INSERT $table OFF"; + $sql = "$identity_insert_on; $sql; $identity_insert_off"; + } + } + + return ($sql, $bind); +} + +sub _execute { + my $self = shift; + my ($op) = @_; + + my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_); + if ($op eq 'insert') { + $self->_identity($self->_fetch_identity($sth)); + } + + return wantarray ? ($rv, $sth, @bind) : $rv; +} + +sub _fetch_identity { + my ($self, $sth) = @_; + my ($identity) = $sth->fetchrow_array; + $sth->finish; + + if ((not defined $identity) && $self->_identity_method && + $self->_identity_method eq '@@identity') { + ($identity) = $self->_dbh->selectrow_array('select @@identity'); + } + + return $identity; +} + +sub last_insert_id { shift->_identity } + +# savepoint syntax is the same as in Sybase ASE + +sub _svp_begin { + my ($self, $name) = @_; + + $self->dbh->do("SAVE TRANSACTION $name"); +} + +# A new SAVE TRANSACTION with the same name releases the previous one. +sub _svp_release { 1 } + +sub _svp_rollback { + my ($self, $name) = @_; + + $self->dbh->do("ROLLBACK TRANSACTION $name"); } sub build_datetime_parser { @@ -25,49 +165,51 @@ sub build_datetime_parser { sub sqlt_type { 'SQLServer' } sub _sql_maker_opts { - my ( $self, $opts ) = @_; + my ( $self, $opts ) = @_; - if ( $opts ) { - $self->{_sql_maker_opts} = { %$opts }; - } + if ( $opts ) { + $self->{_sql_maker_opts} = { %$opts }; + } - return { limit_dialect => 'Top', %{$self->{_sql_maker_opts}||{}} }; + return { limit_dialect => 'Top', %{$self->{_sql_maker_opts}||{}} }; } 1; =head1 NAME -DBIx::Class::Storage::DBI::MSSQL - Storage::DBI subclass for MSSQL +DBIx::Class::Storage::DBI::MSSQL - Base Class for Microsoft SQL Server support +in DBIx::Class =head1 SYNOPSIS -This subclass supports MSSQL, and can in theory be used directly -via the C mechanism: +This is the base class for Microsoft SQL Server support, used by +L and +L. - $schema->storage_type('::DBI::MSSQL'); - $schema->connect_info('dbi:....', ...); +=head1 IMPLEMENTATION NOTES -However, as there is no L, you will probably want to use -one of the other DBD-specific MSSQL classes, such as -L. These classes will -merge this class with a DBD-specific class to obtain fully -correct behavior for your scenario. +Microsoft SQL Server supports three methods of retrieving the IDENTITY +value for inserted row: IDENT_CURRENT, @@IDENTITY, and SCOPE_IDENTITY(). +SCOPE_IDENTITY is used here because it is the safest. However, it must +be called is the same execute statement, not just the same connection. -=head1 METHODS +So, this implementation appends a SELECT SCOPE_IDENTITY() statement +onto each INSERT to accommodate that requirement. -=head2 last_insert_id +C will +be used instead, which on SQL Server 2005 and later will return erroneous +results on tables which have an on insert trigger that inserts into another +table with an C column. + +=cut + +sub connect_call_use_dynamic_cursors { + my $self = shift; + + if (ref($self->_dbi_connect_info->[0]) eq 'CODE') { + croak 'cannot set DBI attributes on a CODE ref connect_info'; } - $self->next::method(@_); + my $dbi_attrs = $self->_dbi_connect_info->[-1]; - if ($identity_insert) { - my $table = $source->from; - $self->dbh->do("SET IDENTITY_INSERT $table OFF"); + unless (ref($dbi_attrs) && Scalar::Util::reftype($dbi_attrs) eq 'HASH') { + $dbi_attrs = {}; + push @{ $self->_dbi_connect_info }, $dbi_attrs; } + + if (not exists $dbi_attrs->{odbc_cursortype}) { + # turn on support for multiple concurrent statements, unless overridden + $dbi_attrs->{odbc_cursortype} = 2; + my $connected = defined $self->_dbh; + $self->disconnect; + $self->ensure_connected if $connected; + $self->_set_dynamic_cursors; + } +} + +sub _set_dynamic_cursors { + my $self = shift; + $self->_using_dynamic_cursors(1); + $self->_identity_method('@@identity'); } -sub _prep_for_execute { +sub _rebless { + no warnings 'uninitialized'; my $self = shift; - my ($op, $extra_bind, $ident, $args) = @_; - -# cast MONEY values properly - if ($op eq 'insert' || $op eq 'update') { - my $fields = $args->[0]; - my $col_info = $self->_resolve_column_info($ident, [keys %$fields]); - - for my $col (keys %$fields) { - if ($col_info->{$col}{data_type} =~ /^money\z/i) { - my $val = $fields->{$col}; - $fields->{$col} = \['CAST(? AS MONEY)', [ $col => $val ]]; - } - } + + if (ref($self->_dbi_connect_info->[0]) ne 'CODE' && + eval { $self->_dbi_connect_info->[-1]{odbc_cursortype} } == 2) { + $self->_set_dynamic_cursors; + return; } - my ($sql, $bind) = $self->next::method (@_); + $self->_using_dynamic_cursors(0); +} + +=head2 connect_call_use_server_cursors - if ($op eq 'insert') { - $sql .= ';SELECT SCOPE_IDENTITY()'; +Use as: - my $col_info = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]); - if (List::Util::first { $_->{is_auto_increment} } (values %$col_info) ) { + on_connect_call => 'use_server_cursors' - my $table = $ident->from; - my $identity_insert_on = "SET IDENTITY_INSERT $table ON"; - my $identity_insert_off = "SET IDENTITY_INSERT $table OFF"; - $sql = "$identity_insert_on; $sql; $identity_insert_off"; - } - } +May allow multiple active select statements. See +L for more information. - return ($sql, $bind); -} +Takes an optional parameter for the value to set the attribute to, default is +C<2>. -sub _execute { - my $self = shift; - my ($op) = @_; +B: this does not work on all versions of SQL Server, and may lock up +your database! - my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_); - if ($op eq 'insert') { - $self->{_scope_identity} = $sth->fetchrow_array; - $sth->finish; - } +=cut + +sub connect_call_use_server_cursors { + my $self = shift; + my $sql_rowset_size = shift || 2; - return wantarray ? ($rv, $sth, @bind) : $rv; + $self->_dbh->{odbc_SQL_ROWSET_SIZE} = $sql_rowset_size; } -sub last_insert_id { shift->{_scope_identity} } +=head2 connect_call_use_mars -1; +Use as: -__END__ + on_connect_call => 'use_mars' -=head1 NAME +Use to enable a feature of SQL Server 2005 and later, "Multiple Active Result +Sets". See L +for more information. -DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server - Support specific -to Microsoft SQL Server over ODBC +B: This has implications for the way transactions are handled. -=head1 DESCRIPTION +=cut -This class implements support specific to Microsoft SQL Server over ODBC, -including auto-increment primary keys and SQL::Abstract::Limit dialect. It -is loaded automatically by by DBIx::Class::Storage::DBI::ODBC when it -detects a MSSQL back-end. +sub connect_call_use_mars { + my $self = shift; -=head1 IMPLEMENTATION NOTES + my $dsn = $self->_dbi_connect_info->[0]; -Microsoft SQL Server supports three methods of retrieving the IDENTITY -value for inserted row: IDENT_CURRENT, @@IDENTITY, and SCOPE_IDENTITY(). -SCOPE_IDENTITY is used here because it is the safest. However, it must -be called is the same execute statement, not just the same connection. + if (ref($dsn) eq 'CODE') { + croak 'cannot change the DBI DSN on a CODE ref connect_info'; + } -So, this implementation appends a SELECT SCOPE_IDENTITY() statement -onto each INSERT to accommodate that requirement. + if ($dsn !~ /MARS_Connection=/) { + $self->_dbi_connect_info->[0] = "$dsn;MARS_Connection=Yes"; + my $connected = defined $self->_dbh; + $self->disconnect; + $self->ensure_connected if $connected; + } +} + +1; -=head1 AUTHORS +=head1 AUTHOR -Marc Mims C<< >> +See L. =head1 LICENSE diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm b/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm index be57610..af2a98f 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm @@ -27,6 +27,19 @@ sub _ping { return $@ ? 0 : 1; } +sub _placeholders_supported { + my $self = shift; + my $dbh = $self->_dbh; + + return eval { +# There's also $dbh->{syb_dynamic_supported} but it can be inaccurate for this +# purpose. + local $dbh->{PrintError} = 0; +# this specifically tests a bind that is NOT a string + $dbh->selectrow_array('select 1 where 1 = ?', {}, 1); + }; +} + 1; =head1 AUTHORS diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm b/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm index 600db7a..5e53118 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm @@ -5,35 +5,53 @@ use warnings; use base qw/ DBIx::Class::Storage::DBI::Sybase::Base - DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server + DBIx::Class::Storage::DBI::MSSQL /; use mro 'c3'; +sub _rebless { + my $self = shift; + my $dbh = $self->_dbh; + + if (not $self->_placeholders_supported) { + bless $self, + 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars'; + $self->_rebless; + } + +# LongReadLen doesn't work with MSSQL through DBD::Sybase, and the default is +# huge on some versions of SQL server and can cause memory problems, so we +# fix it up here. + my $text_size = eval { $self->_dbi_connect_info->[-1]->{LongReadLen} } || + 32768; # the DBD::Sybase default + + $dbh->do("set textsize $text_size"); +} + 1; =head1 NAME -DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server - Storage::DBI subclass for MSSQL via -DBD::Sybase +DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server - Support for Microsoft +SQL Server via DBD::Sybase =head1 SYNOPSIS This subclass supports MSSQL server connections via L. -=head1 CAVEATS - -This storage driver uses L as a base. -This means that bind variables will be interpolated (properly quoted of course) -into the SQL query itself, without using bind placeholders. +=head1 DESCRIPTION -More importantly this means that caching of prepared statements is explicitly -disabled, as the interpolation renders it useless. +This driver tries to determine whether your version of L and +supporting libraries (usually FreeTDS) support using placeholders, if not the +storage will be reblessed to +L. -=head1 AUTHORS +The MSSQL specific functionality is provided by +L. -Brandon L Black +=head1 AUTHOR -Justin Hunter +See L. =head1 LICENSE diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server/NoBindVars.pm b/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server/NoBindVars.pm new file mode 100644 index 0000000..16db6d1 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server/NoBindVars.pm @@ -0,0 +1,53 @@ +package DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars; + +use strict; +use warnings; + +use base qw/ + DBIx::Class::Storage::DBI::NoBindVars + DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server +/; +use mro 'c3'; + +sub _rebless { + my $self = shift; + + $self->disable_sth_caching(1); +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars - Support for Microsoft +SQL Server via DBD::Sybase without placeholders + +=head1 SYNOPSIS + +This subclass supports MSSQL server connections via DBD::Sybase when ? style +placeholders are not available. + +=head1 DESCRIPTION + +If you are using this driver then your combination of L and +libraries (most likely FreeTDS) does not support ? style placeholders. + +This storage driver uses L as a base. +This means that bind variables will be interpolated (properly quoted of course) +into the SQL query itself, without using bind placeholders. + +More importantly this means that caching of prepared statements is explicitly +disabled, as the interpolation renders it useless. + +In all other respects, it is a subclass of +L. + +=head1 AUTHOR + +See L. + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/t/746mssql.t b/t/746mssql.t index fa8f137..f9cbf85 100644 --- a/t/746mssql.t +++ b/t/746mssql.t @@ -12,8 +12,9 @@ my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PA plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test' unless ($dsn && $user); -plan tests => 33; +plan tests => 39; +DBICTest::Schema->load_classes('ArtistGUID'); my $schema = DBICTest::Schema->connect($dsn, $user, $pass); { @@ -33,7 +34,6 @@ $schema->storage->dbh_do (sub { my ($storage, $dbh) = @_; eval { $dbh->do("DROP TABLE artist") }; $dbh->do(<<'SQL'); - CREATE TABLE artist ( artistid INT IDENTITY NOT NULL, name VARCHAR(100), @@ -41,19 +41,27 @@ CREATE TABLE artist ( charfield CHAR(10) NULL, primary key(artistid) ) - SQL - }); my %seen_id; -# fresh $schema so we start unconnected -$schema = DBICTest::Schema->connect($dsn, $user, $pass); +my @opts = ( + { on_connect_call => 'use_dynamic_cursors' }, + {}, +); +my $new; -# test primary key handling -my $new = $schema->resultset('Artist')->create({ name => 'foo' }); -ok($new->artistid > 0, "Auto-PK worked"); +# test Auto-PK with different options +for my $opts (@opts) { + $schema = DBICTest::Schema->clone; + $schema->connection($dsn, $user, $pass, $opts); + + $schema->resultset('Artist')->search({ name => 'foo' })->delete; + + $new = $schema->resultset('Artist')->create({ name => 'foo' }); + ok($new->artistid > 0, "Auto-PK worked"); +} $seen_id{$new->artistid}++; @@ -75,6 +83,49 @@ $it->next; is( $it->next->name, "Artist 2", "iterator->next ok" ); is( $it->next, undef, "next past end of resultset ok" ); +# test GUID columns + +$schema->storage->dbh_do (sub { + my ($storage, $dbh) = @_; + eval { $dbh->do("DROP TABLE artist") }; + $dbh->do(<<'SQL'); +CREATE TABLE artist ( + artistid UNIQUEIDENTIFIER NOT NULL, + name VARCHAR(100), + rank INT NOT NULL DEFAULT '13', + charfield CHAR(10) NULL, + a_guid UNIQUEIDENTIFIER, + primary key(artistid) +) +SQL +}); + +my $row; +lives_ok { + $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' }) +} 'created a row with a GUID'; + +ok( + eval { $row->artistid }, + 'row has GUID PK col populated', +); +diag $@ if $@; + +ok( + eval { $row->a_guid }, + 'row has a GUID col with auto_nextval populated', +); +diag $@ if $@; + +my $row_from_db = $schema->resultset('ArtistGUID') + ->search({ name => 'mtfnpy' })->first; + +is $row_from_db->artistid, $row->artistid, + 'PK GUID round trip'; + +is $row_from_db->a_guid, $row->a_guid, + 'NON-PK GUID round trip'; + # test MONEY type $schema->storage->dbh_do (sub { my ($storage, $dbh) = @_; @@ -92,7 +143,6 @@ SQL my $rs = $schema->resultset('Money'); -my $row; lives_ok { $row = $rs->create({ amount => 100 }); } 'inserted a money value'; @@ -116,8 +166,6 @@ $schema->storage->dbh_do (sub { eval { $dbh->do("DROP TABLE Owners") }; eval { $dbh->do("DROP TABLE Books") }; $dbh->do(<<'SQL'); - - CREATE TABLE Books ( id INT IDENTITY (1, 1) NOT NULL, source VARCHAR(100), @@ -130,7 +178,6 @@ CREATE TABLE Owners ( id INT IDENTITY (1, 1) NOT NULL, name VARCHAR(100), ) - SQL }); @@ -268,11 +315,9 @@ $schema->storage->_sql_maker->{name_sep} = '.'; # clean up our mess END { - if (my $dbh = eval { $schema->storage->_dbh }) { - $dbh->do('DROP TABLE artist'); - $dbh->do('DROP TABLE money_test'); - $dbh->do('DROP TABLE Books'); - $dbh->do('DROP TABLE Owners'); - } + if (my $dbh = eval { $schema->storage->_dbh }) { + eval { $dbh->do("DROP TABLE $_") } + for qw/artist money_test Books Owners/; + } } # vim:sw=2 sts=2 diff --git a/t/74mssql.t b/t/74mssql.t index cbaffc0..55d599f 100644 --- a/t/74mssql.t +++ b/t/74mssql.t @@ -18,104 +18,128 @@ my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/}; plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test' unless ($dsn); -plan tests => 13; +my $TESTS = 13; -my $schema = DBICTest::Schema->clone; -$schema->connection($dsn, $user, $pass); +plan tests => $TESTS * 2; -# start disconnected to test reconnection -$schema->storage->ensure_connected; -$schema->storage->_dbh->disconnect; +my @storage_types = ( + 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server', + 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars', +); +my $storage_idx = -1; +my $schema; + +for my $storage_type (@storage_types) { + $storage_idx++; -isa_ok($schema->storage, 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server'); + $schema = DBICTest::Schema->clone; + + if ($storage_idx != 0) { # autodetect + $schema->storage_type($storage_type); + } -my $dbh; -lives_ok (sub { - $dbh = $schema->storage->dbh; -}, 'reconnect works'); + $schema->connection($dsn, $user, $pass); -$dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL - DROP TABLE artist"); -$dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL - DROP TABLE cd"); + $schema->storage->ensure_connected; -$dbh->do("CREATE TABLE artist (artistid INT IDENTITY PRIMARY KEY, name VARCHAR(100), rank INT DEFAULT '13', charfield CHAR(10) NULL);"); -$dbh->do("CREATE TABLE cd (cdid INT IDENTITY PRIMARY KEY, artist INT, title VARCHAR(100), year VARCHAR(100), genreid INT NULL, single_track INT NULL);"); + if ($storage_idx == 0 && ref($schema->storage) =~ /NoBindVars\z/) { + my $tb = Test::More->builder; + $tb->skip('no placeholders') for 1..$TESTS; + next; + } + + isa_ok($schema->storage, $storage_type); + +# start disconnected to test reconnection + $schema->storage->_dbh->disconnect; + + my $dbh; + lives_ok (sub { + $dbh = $schema->storage->dbh; + }, 'reconnect works'); + + $dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL + DROP TABLE artist"); + $dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL + DROP TABLE cd"); + + $dbh->do("CREATE TABLE artist (artistid INT IDENTITY PRIMARY KEY, name VARCHAR(100), rank INT DEFAULT '13', charfield CHAR(10) NULL);"); + $dbh->do("CREATE TABLE cd (cdid INT IDENTITY PRIMARY KEY, artist INT, title VARCHAR(100), year VARCHAR(100), genreid INT NULL, single_track INT NULL);"); # Just to test compat shim, Auto is in Core -$schema->class('Artist')->load_components('PK::Auto::MSSQL'); + $schema->class('Artist')->load_components('PK::Auto::MSSQL'); # Test PK -my $new = $schema->resultset('Artist')->create( { name => 'foo' } ); -ok($new->artistid, "Auto-PK worked"); + my $new = $schema->resultset('Artist')->create( { name => 'foo' } ); + ok($new->artistid, "Auto-PK worked"); # Test LIMIT -for (1..6) { - $schema->resultset('Artist')->create( { name => 'Artist ' . $_, rank => $_ } ); -} + for (1..6) { + $schema->resultset('Artist')->create( { name => 'Artist ' . $_, rank => $_ } ); + } -my $it = $schema->resultset('Artist')->search( { }, - { rows => 3, - offset => 2, - order_by => 'artistid' - } -); + my $it = $schema->resultset('Artist')->search( { }, + { rows => 3, + offset => 2, + order_by => 'artistid' + } + ); # Test ? in data don't get treated as placeholders -my $cd = $schema->resultset('CD')->create( { - artist => 1, - title => 'Does this break things?', - year => 2007, -} ); -ok($cd->id, 'Not treating ? in data as placeholders'); - -is( $it->count, 3, "LIMIT count ok" ); -ok( $it->next->name, "iterator->next ok" ); -$it->next; -$it->next; -is( $it->next, undef, "next past end of resultset ok" ); + my $cd = $schema->resultset('CD')->create( { + artist => 1, + title => 'Does this break things?', + year => 2007, + } ); + ok($cd->id, 'Not treating ? in data as placeholders'); + + is( $it->count, 3, "LIMIT count ok" ); + ok( $it->next->name, "iterator->next ok" ); + $it->next; + $it->next; + is( $it->next, undef, "next past end of resultset ok" ); # test MONEY column support -$schema->storage->dbh_do (sub { - my ($storage, $dbh) = @_; - eval { $dbh->do("DROP TABLE money_test") }; - $dbh->do(<<'SQL'); + $schema->storage->dbh_do (sub { + my ($storage, $dbh) = @_; + eval { $dbh->do("DROP TABLE money_test") }; + $dbh->do(<<'SQL'); -CREATE TABLE money_test ( - id INT IDENTITY PRIMARY KEY, - amount MONEY NULL -) + CREATE TABLE money_test ( + id INT IDENTITY PRIMARY KEY, + amount MONEY NULL + ) SQL -}); + }); -my $rs = $schema->resultset('Money'); + my $rs = $schema->resultset('Money'); -my $row; -lives_ok { - $row = $rs->create({ amount => 100 }); -} 'inserted a money value'; + my $row; + lives_ok { + $row = $rs->create({ amount => 100 }); + } 'inserted a money value'; -is $rs->find($row->id)->amount, 100, 'money value round-trip'; + is $rs->find($row->id)->amount, 100, 'money value round-trip'; -lives_ok { - $row->update({ amount => 200 }); -} 'updated a money value'; + lives_ok { + $row->update({ amount => 200 }); + } 'updated a money value'; -is $rs->find($row->id)->amount, 200, 'updated money value round-trip'; + is $rs->find($row->id)->amount, 200, 'updated money value round-trip'; -lives_ok { - $row->update({ amount => undef }); -} 'updated a money value to NULL'; + lives_ok { + $row->update({ amount => undef }); + } 'updated a money value to NULL'; -is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip'; + is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip'; +} # clean up our mess END { - $dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL DROP TABLE artist") - if $dbh; - $dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL DROP TABLE cd") - if $dbh; - $dbh->do("IF OBJECT_ID('money_test', 'U') IS NOT NULL DROP TABLE money_test") - if $dbh; + if (my $dbh = eval { $schema->storage->dbh }) { + $dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL DROP TABLE artist"); + $dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL DROP TABLE cd"); + $dbh->do("IF OBJECT_ID('money_test', 'U') IS NOT NULL DROP TABLE money_test"); + } } diff --git a/t/inflate/datetime_mssql.t b/t/inflate/datetime_mssql.t new file mode 100644 index 0000000..bc85fdc --- /dev/null +++ b/t/inflate/datetime_mssql.t @@ -0,0 +1,85 @@ +use strict; +use warnings; + +use Test::More; +use Test::Exception; +use lib qw(t/lib); +use DBICTest; + +my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/}; + +if (not ($dsn && $user)) { + plan skip_all => + 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test' . + "\nWarning: This test drops and creates a table called 'track'"; +} else { + eval "use DateTime; use DateTime::Format::Strptime;"; + if ($@) { + plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing'; + } + else { + plan tests => 4 * 2; # (tests * dt_types) + } +} + +my $schema = DBICTest::Schema->clone; + +$schema->connection($dsn, $user, $pass); +$schema->storage->ensure_connected; + +# coltype, column, datehash +my @dt_types = ( + ['DATETIME', + 'last_updated_at', + { + year => 2004, + month => 8, + day => 21, + hour => 14, + minute => 36, + second => 48, + nanosecond => 500000000, + }], + ['SMALLDATETIME', # minute precision + 'small_dt', + { + year => 2004, + month => 8, + day => 21, + hour => 14, + minute => 36, + }], +); + +for my $dt_type (@dt_types) { + my ($type, $col, $sample_dt) = @$dt_type; + + eval { $schema->storage->dbh->do("DROP TABLE track") }; + $schema->storage->dbh->do(<<"SQL"); +CREATE TABLE track ( + trackid INT IDENTITY PRIMARY KEY, + cd INT, + position INT, + $col $type, +) +SQL + ok(my $dt = DateTime->new($sample_dt)); + + my $row; + ok( $row = $schema->resultset('Track')->create({ + $col => $dt, + cd => 1, + })); + ok( $row = $schema->resultset('Track') + ->search({ trackid => $row->trackid }, { select => [$col] }) + ->first + ); + is( $row->$col, $dt, 'DateTime roundtrip' ); +} + +# clean up our mess +END { + if (my $dbh = eval { $schema->storage->_dbh }) { + $dbh->do('DROP TABLE track'); + } +} diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index 4966800..a6de595 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -30,6 +30,10 @@ __PACKAGE__->add_columns( data_type => 'datetime', is_nullable => 1 }, + small_dt => { # for mssql and sybase DT tests + data_type => 'smalldatetime', + is_nullable => 1 + }, ); __PACKAGE__->set_primary_key('trackid');