X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI%2FSybase.pm;h=eeb4f01edf2a3806495f10925641a7d166d036b6;hb=e6dd7b42b418053f02424d3ab2703dc97e2afbde;hp=e8b90527e0d51b4e54c723cab2af9bd2b83ec0bf;hpb=6a9765c19a33187ad323e0eb685d38c08a97deaf;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI/Sybase.pm b/lib/DBIx/Class/Storage/DBI/Sybase.pm index e8b9052..eeb4f01 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase.pm @@ -9,8 +9,9 @@ use base qw/ /; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; -use List::Util (); -use Sub::Name (); +use List::Util(); +use Sub::Name(); +use Data::Dumper::Concise(); __PACKAGE__->mk_group_accessors('simple' => qw/_identity _blob_log_on_update _writer_storage _is_extra_storage @@ -277,7 +278,8 @@ sub _prep_for_execute { ); } - if ($op eq 'insert' && (not $bound_identity_col) && $identity_col) { + if ($op eq 'insert' && (not $bound_identity_col) && $identity_col && + (not $self->{insert_bulk})) { $sql = "$sql\n" . $self->_fetch_identity_sql($ident, $identity_col); @@ -449,6 +451,8 @@ sub update { # it is originally put by _remove_blob_cols .) my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols; +# We can't only update NULL blobs, because blobs cannot be in the WHERE clause. + $self->next::method($source, \%blobs_to_empty, $where, @rest); # Now update the blobs before the other columns in case the update of other @@ -505,20 +509,14 @@ EOF if (not $use_bulk_api) { my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data); - my $dumb_last_insert_id = - $identity_col - && (not $is_identity_insert) - && ($self->_identity_method||'') ne '@@IDENTITY'; +# _execute_array uses a txn anyway, but it ends too early in case we need to +# select max(col) to get the identity for inserting blobs. + ($self, my $guard) = $self->{transaction_depth} == 0 ? + ($self->_writer_storage, $self->_writer_storage->txn_scope_guard) + : + ($self, undef); - ($self, my ($guard)) = do { - if ($self->{transaction_depth} == 0 && $blob_cols && - $dumb_last_insert_id) { - ($self->_writer_storage, $self->_writer_storage->txn_scope_guard); - } - else { - ($self, undef); - } - }; + local $self->{insert_bulk} = 1; $self->next::method(@_); @@ -547,6 +545,7 @@ EOF } $guard->commit if $guard; + return; } @@ -631,7 +630,9 @@ EOF }; $self->_execute_array( - $source, $sth, \@bind, \@source_columns, \@new_data, $guard + $source, $sth, \@bind, \@source_columns, \@new_data, sub { + $guard->commit + } ); $bulk->_query_end($sql); @@ -661,6 +662,15 @@ EOF } } +sub _dbh_execute_array { + my ($self, $sth, $tuple_status, $cb) = @_; + + my $rv = $self->next::method($sth, $tuple_status); + $cb->() if $cb; + + return $rv; +} + # Make sure blobs are not bound as placeholders, and return any non-empty ones # as a hash. sub _remove_blob_cols { @@ -770,7 +780,7 @@ sub _insert_blobs { $self->throw_exception( "Could not find row in table '$table' for blob update:\n" - . $self->_pretty_print (\%where) + . Data::Dumper::Concise::Dumper (\%where) ); } @@ -852,7 +862,7 @@ C columns only have minute precision. sub connect_call_datetime_setup { my $self = shift; - my $dbh = $self->_dbh; + my $dbh = $self->_get_dbh; if ($dbh->can('syb_date_fmt')) { # amazingly, this works with FreeTDS