X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI%2FSybase%2FASE.pm;h=017709c999d7a8a3240ae6a051deb30536a9cb9a;hb=514b84f6b60b566d75d2ff2ddd08659c4cf7b427;hp=eceef2016cc4c4e12b97d307ff963402e30238b0;hpb=5529838f7afff91467ef2664087999ab222da48d;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm index eceef20..017709c 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm @@ -11,12 +11,9 @@ use base qw/ use mro 'c3'; use DBIx::Class::Carp; use Scalar::Util qw/blessed weaken/; -use List::Util 'first'; -use Sub::Name(); -use Data::Dumper::Concise 'Dumper'; use Try::Tiny; use Context::Preserve 'preserve_context'; -use DBIx::Class::_Util 'sigwarn_silencer'; +use DBIx::Class::_Util qw( sigwarn_silencer dbic_internal_try dump_value scope_guard set_subname ); use namespace::clean; __PACKAGE__->sql_limit_dialect ('GenericSubQ'); @@ -166,7 +163,7 @@ for my $method (@also_proxy_to_extra_storages) { my $replaced = __PACKAGE__->can($method); - *{$method} = Sub::Name::subname $method => sub { + *{$method} = set_subname $method => sub { my $self = shift; $self->_writer_storage->$replaced(@_) if $self->_writer_storage; $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage; @@ -256,23 +253,6 @@ sub _is_lob_column { sub _prep_for_execute { my ($self, $op, $ident, $args) = @_; - # -### This is commented out because all tests pass. However I am leaving it -### here as it may prove necessary (can't think through all combinations) -### BTW it doesn't currently work exactly - need better sensitivity to - # currently set value - # - #my ($op, $ident) = @_; - # - # inherit these from the parent for the duration of _prep_for_execute - # Don't know how to make a localizing loop with if's, otherwise I would - #local $self->{_autoinc_supplied_for_op} - # = $self->_parent_storage->_autoinc_supplied_for_op - #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage; - #local $self->{_perform_autoinc_retrieval} - # = $self->_parent_storage->_perform_autoinc_retrieval - #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage; - my $limit; # extract and use shortcut on limit without offset if ($op eq 'select' and ! $args->[4] and $limit = $args->[3]) { $args = [ @$args ]; @@ -353,10 +333,12 @@ sub insert { my $columns_info = $source->columns_info; - my $identity_col = - (first { $columns_info->{$_}{is_auto_increment} } - keys %$columns_info ) - || ''; + my ($identity_col) = grep + { $columns_info->{$_}{is_auto_increment} } + keys %$columns_info + ; + + $identity_col = '' if ! defined $identity_col; # FIXME - this is duplication from DBI.pm. When refactored towards # the LobWriter this can be folded back where it belongs. @@ -364,10 +346,10 @@ sub insert { ? 1 : 0 ; - local $self->{_perform_autoinc_retrieval} = - ($identity_col and ! exists $to_insert->{$identity_col}) - ? $identity_col - : undef + + local $self->{_perform_autoinc_retrieval} = $self->{_autoinc_supplied_for_op} + ? undef + : $identity_col ; # check for empty insert @@ -391,53 +373,42 @@ sub insert { my $blob_cols = $self->_remove_blob_cols($source, $to_insert); - # do we need the horrific SELECT MAX(COL) hack? - my $need_dumb_last_insert_id = ( - $self->_perform_autoinc_retrieval - && - ($self->_identity_method||'') ne '@@IDENTITY' - ); - - my $next = $self->next::can; - - # we are already in a transaction, or there are no blobs - # and we don't need the PK - just (try to) do it - if ($self->{transaction_depth} - || (!$blob_cols && !$need_dumb_last_insert_id) + # if a new txn is needed - it must happen on the _writer/new connection (for now) + my $guard; + if ( + ! $self->transaction_depth + and + ( + $blob_cols + or + # do we need the horrific SELECT MAX(COL) hack? + ( + $self->_perform_autoinc_retrieval + and + ( ($self->_identity_method||'') ne '@@IDENTITY' ) + ) + ) ) { - return $self->_insert ( - $next, $source, $to_insert, $blob_cols, $identity_col - ); + $self = $self->_writer_storage; + $guard = $self->txn_scope_guard; } - # otherwise use the _writer_storage to do the insert+transaction on another - # connection - my $guard = $self->_writer_storage->txn_scope_guard; - - my $updated_cols = $self->_writer_storage->_insert ( - $next, $source, $to_insert, $blob_cols, $identity_col - ); - - $self->_identity($self->_writer_storage->_identity); - - $guard->commit; - - return $updated_cols; -} - -sub _insert { - my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_; + my $updated_cols = $self->next::method ($source, $to_insert); - my $updated_cols = $self->$next ($source, $to_insert); - - my $final_row = { - ($identity_col ? - ($identity_col => $self->last_insert_id($source, $identity_col)) : ()), - %$to_insert, - %$updated_cols, - }; + $self->_insert_blobs ( + $source, + $blob_cols, + { + ( $identity_col + ? ( $identity_col => $self->last_insert_id($source, $identity_col) ) + : () + ), + %$to_insert, + %$updated_cols, + }, + ) if $blob_cols; - $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols; + $guard->commit if $guard; return $updated_cols; } @@ -474,10 +445,10 @@ sub update { if (keys %$fields) { # Now set the identity update flags for the actual update - local $self->{_autoinc_supplied_for_op} = (first + local $self->{_autoinc_supplied_for_op} = grep { $_->{is_auto_increment} } values %{ $source->columns_info([ keys %$fields ]) } - ) ? 1 : 0; + ; my $next = $self->next::can; my $args = \@_; @@ -492,10 +463,10 @@ sub update { } else { # Set the identity update flags for the actual update - local $self->{_autoinc_supplied_for_op} = (first + local $self->{_autoinc_supplied_for_op} = grep { $_->{is_auto_increment} } values %{ $source->columns_info([ keys %$fields ]) } - ) ? 1 : 0; + ; return $self->next::method(@_); } @@ -507,17 +478,14 @@ sub _insert_bulk { my $columns_info = $source->columns_info; - my $identity_col = - first { $columns_info->{$_}{is_auto_increment} } + my ($identity_col) = + grep { $columns_info->{$_}{is_auto_increment} } keys %$columns_info; # FIXME - this is duplication from DBI.pm. When refactored towards # the LobWriter this can be folded back where it belongs. - local $self->{_autoinc_supplied_for_op} = - (first { $_ eq $identity_col } @$cols) - ? 1 - : 0 - ; + local $self->{_autoinc_supplied_for_op} + = grep { $_ eq $identity_col } @$cols; my $use_bulk_api = $self->_bulk_storage && @@ -535,10 +503,10 @@ sub _insert_bulk { # next::method uses a txn anyway, but it ends too early in case we need to # select max(col) to get the identity for inserting blobs. - ($self, my $guard) = $self->{transaction_depth} == 0 ? - ($self->_writer_storage, $self->_writer_storage->txn_scope_guard) - : - ($self, undef); + ($self, my $guard) = $self->transaction_depth + ? ($self, undef) + : ($self->_writer_storage, $self->_writer_storage->txn_scope_guard) + ; $self->next::method(@_); @@ -580,7 +548,7 @@ sub _insert_bulk { my @source_columns = $source->columns; # bcp identity index is 1-based - my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns); + my ($identity_idx) = grep { $source_columns[$_] eq $identity_col } (0..$#source_columns); $identity_idx = defined $identity_idx ? $identity_idx + 1 : 0; my @new_data; @@ -607,7 +575,7 @@ sub _insert_bulk { # This ignores any data conversion errors detected by the client side libs, as # they are usually harmless. my $orig_cslib_cb = DBD::Sybase::set_cslib_cb( - Sub::Name::subname _insert_bulk_cslib_errhandler => sub { + set_subname _insert_bulk_cslib_errhandler => sub { my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_; return 1 if $errno == 36; @@ -622,7 +590,7 @@ sub _insert_bulk { }); my $exception = ''; - try { + dbic_internal_try { my $bulk = $self->_bulk_storage; my $guard = $bulk->txn_scope_guard; @@ -678,7 +646,7 @@ sub _insert_bulk { if ($exception =~ /-Y option/) { my $w = 'Sybase bulk API operation failed due to character set incompatibility, ' - . 'reverting to regular array inserts. Try unsetting the LANG environment variable' + . 'reverting to regular array inserts. Try unsetting the LC_ALL environment variable' ; $w .= "\n$exception" if $self->debug; carp $w; @@ -709,7 +677,8 @@ sub _remove_blob_cols { } else { $fields->{$col} = \"''"; - $blob_cols{$col} = $blob_val unless $blob_val eq ''; + $blob_cols{$col} = $blob_val + if length $blob_val; } } } @@ -735,7 +704,7 @@ sub _remove_blob_cols_array { else { $data->[$j][$i] = \"''"; $blob_cols[$j][$i] = $blob_val - unless $blob_val eq ''; + if length $blob_val; } } } @@ -747,7 +716,7 @@ sub _remove_blob_cols_array { sub _update_blobs { my ($self, $source, $blob_cols, $where) = @_; - my @primary_cols = try + my @primary_cols = dbic_internal_try { $source->_pri_cols_or_die } catch { $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_") @@ -757,7 +726,7 @@ sub _update_blobs { if ( ref $where eq 'HASH' and - @primary_cols == grep { defined $where->{$_} } @primary_cols + ! grep { ! defined $where->{$_} } @primary_cols ) { my %row_to_update; @row_to_update{@primary_cols} = @{$where}{@primary_cols}; @@ -776,26 +745,29 @@ sub _update_blobs { } sub _insert_blobs { - my ($self, $source, $blob_cols, $row) = @_; - my $dbh = $self->_get_dbh; + my ($self, $source, $blob_cols, $row_data) = @_; my $table = $source->name; - my %row = %$row; - my @primary_cols = try + my @primary_cols = dbic_internal_try { $source->_pri_cols_or_die } catch { $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_") }; $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values') - if ((grep { defined $row{$_} } @primary_cols) != @primary_cols); + if grep { ! defined $row_data->{$_} } @primary_cols; + + # if we are 2-phase inserting a blob - there is nothing to retrieve anymore, + # regardless of the previous state of the flag + local $self->{_perform_autoinc_retrieval} + if $self->_perform_autoinc_retrieval; + + my %where = map {( $_ => $row_data->{$_} )} @primary_cols; for my $col (keys %$blob_cols) { my $blob = $blob_cols->{$col}; - my %where = map { ($_, $row{$_}) } @primary_cols; - my $cursor = $self->select ($source, [$col], \%where, {}); $cursor->next; my $sth = $cursor->sth; @@ -803,11 +775,17 @@ sub _insert_blobs { if (not $sth) { $self->throw_exception( "Could not find row in table '$table' for blob update:\n" - . (Dumper \%where) + . dump_value \%where ); } - try { + # FIXME - it is not clear if this is needed at all. But it's been + # there since 2009 ( d867eedaa ), might as well let sleeping dogs + # lie... sigh. + weaken( my $wsth = $sth ); + my $g = scope_guard { $wsth->finish if $wsth }; + + dbic_internal_try { do { $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr; } while $sth->fetch; @@ -835,9 +813,6 @@ sub _insert_blobs { else { $self->throw_exception($_); } - } - finally { - $sth->finish if $sth; }; } } @@ -1130,7 +1105,7 @@ L call, eg.: B the L calls in your C classes B list columns in database order for this -to work. Also, you may have to unset the C environment variable before +to work. Also, you may have to unset the C environment variable before loading your app, as C is not yet supported in DBD::Sybase . When inserting IMAGE columns using this method, you'll need to use @@ -1196,13 +1171,13 @@ bulk_insert using prepare_cached (see comments.) =back -=head1 AUTHOR +=head1 FURTHER QUESTIONS? -See L and L. +Check the list of L. -=head1 LICENSE +=head1 COPYRIGHT AND LICENSE -You may distribute this code under the same terms as Perl itself. - -=cut -# vim:sts=2 sw=2: +This module is free software L +by the L. You can +redistribute it and/or modify it under the same terms as the +L.