From: Rafael Kitover Date: Fri, 18 Sep 2009 09:10:16 +0000 (+0000) Subject: support for blobs in insert_bulk fallback X-Git-Tag: v0.08112~14^2~1^2 X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=905e6f073b89facb367cdedecd42ddbc5d770d61;p=dbsrgits%2FDBIx-Class.git support for blobs in insert_bulk fallback --- diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 8c7f647..1ce71fc 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1337,18 +1337,52 @@ sub insert_bulk { } my %colvalues; - my $table = $source->from; @colvalues{@$cols} = (0..$#$cols); + # bind literal sql if it's the same in all slices + for my $i (0..$#$cols) { + my $first_val = $data->[0][$i]; + next unless (Scalar::Util::reftype($first_val)||'') eq 'SCALAR'; + + $colvalues{ $cols->[$i] } = $first_val + if (grep { + (Scalar::Util::reftype($_)||'') eq 'SCALAR' && + $$_ eq $$first_val + } map $data->[$_][$i], (1..$#$data)) == (@$data - 1); + } + my ($sql, $bind) = $self->_prep_for_execute ( 'insert', undef, $source, [\%colvalues] ); - my @bind = @$bind - or croak 'Cannot insert_bulk without support for placeholders'; + my @bind = @$bind; + + my $empty_bind = 1 if (not @bind) && + (grep { (Scalar::Util::reftype($_)||'') eq 'SCALAR' } values %colvalues) + == @$cols; + + if ((not @bind) && (not $empty_bind)) { + croak 'Cannot insert_bulk without support for placeholders'; + } $self->_query_start( $sql, @bind ); my $sth = $self->sth($sql, 'insert', $sth_attr); + if ($empty_bind) { + # bind_param_array doesn't work if there are no binds + eval { + local $self->_get_dbh->{RaiseError} = 1; + local $self->_get_dbh->{PrintError} = 0; + foreach (0..$#$data) { + $sth->execute; + $sth->fetchall_arrayref; + } + }; + my $exception = $@; + $sth->finish; + $self->throw_exception($exception) if $exception; + return; + } + # @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args ## This must be an arrayref, else nothing works! @@ -1375,6 +1409,7 @@ sub insert_bulk { $sth->bind_param_array( $placeholder_index, [@data], $attributes ); $placeholder_index++; } + my $rv = eval { $sth->execute_array({ArrayTupleStatus => $tuple_status}) }; $sth->finish; if (my $err = $@ || $sth->errstr) { diff --git a/lib/DBIx/Class/Storage/DBI/Sybase.pm b/lib/DBIx/Class/Storage/DBI/Sybase.pm index 27d9f37..219ca56 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase.pm @@ -520,8 +520,13 @@ sub insert_bulk { my $self = shift; my ($source, $cols, $data) = @_; + my $identity_col = List::Util::first + { $source->column_info($_)->{is_auto_increment} } + $source->columns; + my $is_identity_insert = (List::Util::first - { $source->column_info ($_)->{is_auto_increment} } @{$cols} + { $source->column_info ($_)->{is_auto_increment} } + @{$cols} ) ? 1 : 0; my @source_columns = $source->columns; @@ -541,16 +546,52 @@ EOF } if (not $use_bulk_api) { - if ($is_identity_insert) { - $self->_set_identity_insert ($source->name); - } + my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data); + + my $dumb_last_insert_id = + $identity_col + && (not $is_identity_insert) + && ($self->_identity_method||'') ne '@@IDENTITY'; + + ($self, my ($guard)) = do { + if ($self->{transaction_depth} == 0 && + ($blob_cols || $dumb_last_insert_id)) { + ($self->_writer_storage, $self->_writer_storage->txn_scope_guard); + } + else { + ($self, undef); + } + }; + $self->_set_identity_insert ($source->name) if $is_identity_insert; $self->next::method(@_); + $self->_unset_identity_insert ($source->name) if $is_identity_insert; - if ($is_identity_insert) { - $self->_unset_identity_insert ($source->name); + if ($blob_cols) { + if ($is_identity_insert) { + $self->_insert_blobs_array ($source, $blob_cols, $cols, $data); + } + else { + my @cols_with_identities = (@$cols, $identity_col); + + ## calculate identities + # XXX This assumes identities always increase by 1, which may or may not + # be true. + my ($last_identity) = + $self->_dbh->selectrow_array ( + $self->_fetch_identity_sql($source, $identity_col) + ); + my @identities = (($last_identity - @$data + 1) .. $last_identity); + + my @data_with_identities = map [@$_, shift @identities], @$data; + + $self->_insert_blobs_array ( + $source, $blob_cols, \@cols_with_identities, \@data_with_identities + ); + } } + $guard->commit if $guard; return; } @@ -575,9 +616,6 @@ EOF push @new_data, $new_datum; } - my $identity_col = List::Util::first - { $source->column_info($_)->{is_auto_increment} } @source_columns; - # bcp identity index is 1-based my $identity_idx = exists $new_idx{$identity_col} ? $new_idx{$identity_col} + 1 : 0; @@ -727,6 +765,33 @@ sub _remove_blob_cols { return keys %blob_cols ? \%blob_cols : undef; } +# same for insert_bulk +sub _remove_blob_cols_array { + my ($self, $source, $cols, $data) = @_; + + my @blob_cols; + + for my $i (0..$#$cols) { + my $col = $cols->[$i]; + + if ($self->_is_lob_type($source->column_info($col)->{data_type})) { + for my $j (0..$#$data) { + my $blob_val = delete $data->[$j][$i]; + if (not defined $blob_val) { + $data->[$j][$i] = \'NULL'; + } + else { + $data->[$j][$i] = \"''"; + $blob_cols[$j][$i] = $blob_val + unless $blob_val eq ''; + } + } + } + } + + return @blob_cols ? \@blob_cols : undef; +} + sub _update_blobs { my ($self, $source, $blob_cols, $where) = @_; @@ -829,6 +894,26 @@ sub _insert_blobs { } } +sub _insert_blobs_array { + my ($self, $source, $blob_cols, $cols, $data) = @_; + + for my $i (0..$#$data) { + my $datum = $data->[$i]; + + my %row; + @row{ @$cols } = @$datum; + + my %blob_vals; + for my $j (0..$#$cols) { + if (exists $blob_cols->[$i][$j]) { + $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j]; + } + } + + $self->_insert_blobs ($source, \%blob_vals, \%row); + } +} + =head2 connect_call_datetime_setup Used as: diff --git a/t/746sybase.t b/t/746sybase.t index cc43de6..430d5a4 100644 --- a/t/746sybase.t +++ b/t/746sybase.t @@ -245,30 +245,35 @@ SQL } # make sure insert_bulk works a second time on the same connection - lives_ok { - $schema->resultset('Artist')->populate([ - { - name => 'bulk artist 1', - charfield => 'bar', - }, - { - name => 'bulk artist 2', - charfield => 'bar', - }, - { - name => 'bulk artist 3', - charfield => 'bar', - }, - ]); - } 'insert_bulk via populate called a second time'; + SKIP: { + skip 'insert_bulk not supported', 3 + unless $schema->storage->_can_insert_bulk; + + lives_ok { + $schema->resultset('Artist')->populate([ + { + name => 'bulk artist 1', + charfield => 'bar', + }, + { + name => 'bulk artist 2', + charfield => 'bar', + }, + { + name => 'bulk artist 3', + charfield => 'bar', + }, + ]); + } 'insert_bulk via populate called a second time'; - is $bulk_rs->count, 3, - 'correct number inserted via insert_bulk'; + is $bulk_rs->count, 3, + 'correct number inserted via insert_bulk'; - is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3, - 'column set correctly via insert_bulk'); + is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3, + 'column set correctly via insert_bulk'); - $bulk_rs->delete; + $bulk_rs->delete; + } # test invalid insert_bulk (missing required column) # @@ -280,9 +285,10 @@ SQL charfield => 'foo', } ]); - } qr/no value or default|does not allow null/i, + } qr/no value or default|does not allow null|placeholders/i, # The second pattern is the error from fallback to regular array insert on # incompatible charset. +# The third is for ::NoBindVars with no syb_has_blk. 'insert_bulk with missing required column throws error'; # now test insert_bulk with IDENTITY_INSERT @@ -342,7 +348,7 @@ SQL CREATE TABLE bindtype_test ( id INT IDENTITY PRIMARY KEY, - bytea INT NULL, + bytea IMAGE NULL, blob IMAGE NULL, clob TEXT NULL )