my ($source, $fields, $where) = @_;
my $wantarray = wantarray;
+
my $blob_cols = $self->_remove_blob_cols($source, $fields);
+ my $table = $source->name;
+
+ my $identity_col = List::Util::first
+ { $source->column_info($_)->{is_auto_increment} }
+ $source->columns;
+
+ my $is_identity_update = $identity_col && defined $fields->{$identity_col};
+
if (not $blob_cols) {
+ $self->_set_identity_insert($table, 'update') if $is_identity_update;
return $self->next::method(@_);
+ $self->_unset_identity_insert($table, 'update') if $is_identity_update;
}
+# check if condition and fields allow for a 2-step update
+ $self->_assert_blob_update_possible($source, $fields, $where);
+
# update+blob update(s) done atomically on separate connection
$self = $self->_writer_storage;
my $guard = $self->txn_scope_guard;
+ $self->_set_identity_insert($table, 'update') if $is_identity_update;
+
my @res;
if ($wantarray) {
@res = $self->next::method(@_);
$self->next::method(@_);
}
- $self->_update_blobs($source, $blob_cols, $where);
+ $self->_unset_identity_insert($table, 'update') if $is_identity_update;
+
+ my %new_where = map { $_ => ($fields->{$_} || $where->{$_}) } keys %$where;
+
+ $self->_update_blobs($source, $blob_cols, \%new_where);
$guard->commit;
return $wantarray ? @res : $res[0];
}
+sub _assert_blob_update_possible {
+ my ($self, $source, $fields, $where) = @_;
+
+ my $table = $source->name;
+
+# If $where condition is mutually exclusive from $fields (what gets updated)
+# then update is safe.
+ my %count;
+ $count{$_}++ foreach keys %$where, keys %$fields;
+ return 1 unless List::Util::first { $_ == 2 } values %count;
+
+# Otherwise check that what is updated includes either a primary or unique key.
+ my (@primary_cols) = $source->primary_columns;
+ return 1 if (grep exists $fields->{$_}, @primary_cols) == @primary_cols;
+
+ my %unique_constraints = $source->unique_constraints;
+ for my $uniq_constr (values %unique_constraints) {
+ return 1 if (grep exists $fields->{$_}, @$uniq_constr) == @$uniq_constr;
+ }
+
+# otherwise throw exception
+ require Data::Dumper;
+ local $Data::Dumper::Terse = 1;
+ local $Data::Dumper::Indent = 1;
+ local $Data::Dumper::Useqq = 1;
+ local $Data::Dumper::Quotekeys = 0;
+ local $Data::Dumper::Sortkeys = 1;
+
+ croak sprintf
+"2-step TEXT/IMAGE update on table '$table' impossible for condition: \n%s\n".
+"Setting columns: \n%s\n",
+ Data::Dumper::Dumper($where),
+ Data::Dumper::Dumper($fields);
+}
+
### the insert_bulk stuff stolen from DBI/MSSQL.pm
sub _set_identity_insert {
- my ($self, $table) = @_;
+ my ($self, $table, $op) = @_;
my $sql = sprintf (
- 'SET IDENTITY_INSERT %s ON',
+ 'SET IDENTITY_%s %s ON',
+ (uc($op) || 'INSERT'),
$self->sql_maker->_quote ($table),
);
+ $self->_query_start($sql);
+
my $dbh = $self->_get_dbh;
eval { $dbh->do ($sql) };
- if ($@) {
+ my $exception = $@;
+
+ $self->_query_end($sql);
+
+ if ($exception) {
$self->throw_exception (sprintf "Error executing '%s': %s",
$sql,
$dbh->errstr,
}
sub _unset_identity_insert {
- my ($self, $table) = @_;
+ my ($self, $table, $op) = @_;
my $sql = sprintf (
- 'SET IDENTITY_INSERT %s OFF',
+ 'SET IDENTITY_%s %s OFF',
+ (uc($op) || 'INSERT'),
$self->sql_maker->_quote ($table),
);
+ $self->_query_start($sql);
+
my $dbh = $self->_get_dbh;
$dbh->do ($sql);
+
+ $self->_query_end($sql);
}
# XXX this should use the DBD::Sybase bulk API, where possible
my ($self, $source, $blob_cols, $row) = @_;
my $dbh = $self->_get_dbh;
- my $table = $source->from;
+ my $table = $source->name;
my %row = %$row;
my (@primary_cols) = $source->primary_columns;
$cursor->next;
my $sth = $cursor->sth;
+ if (not $sth) {
+ require Data::Dumper;
+ local $Data::Dumper::Terse = 1;
+ local $Data::Dumper::Indent = 1;
+ local $Data::Dumper::Useqq = 1;
+ local $Data::Dumper::Quotekeys = 0;
+ local $Data::Dumper::Sortkeys = 1;
+
+ croak "\nCould not find row in table '$table' for blob update:\n".
+ Data::Dumper::Dumper(\%where)."\n";
+ }
+
eval {
do {
$sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
use Test::More;
use Test::Exception;
use lib qw(t/lib);
+
+BEGIN {
+ require DBICTest::Schema::BindType;
+ DBICTest::Schema::BindType->add_column(
+ anint => { data_type => 'integer' }
+ );
+}
+
use DBICTest;
-use DBIx::Class::Storage::DBI::Sybase;
-use DBIx::Class::Storage::DBI::Sybase::NoBindVars;
+
+require DBIx::Class::Storage::DBI::Sybase;
+require DBIx::Class::Storage::DBI::Sybase::NoBindVars;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
-my $TESTS = 48 + 2;
+my $TESTS = 51 + 2;
if (not ($dsn && $user)) {
plan skip_all =>
is( $it->count, 7, 'COUNT of GROUP_BY ok' );
-# do an identity insert (which should happen with no txn when using
-# placeholders.)
+# do an IDENTITY_INSERT
{
no warnings 'redefine';
$schema->resultset('Artist')
->create({ artistid => 999, name => 'mtfnpy' });
- ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT');
+ ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used');
SKIP: {
skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1
}
}
+# do an IDENTITY_UPDATE
+ {
+ my @debug_out;
+ local $schema->storage->{debug} = 1;
+ local $schema->storage->debugobj->{callback} = sub {
+ push @debug_out, $_[1];
+ };
+
+ lives_and {
+ $schema->resultset('Artist')
+ ->find(999)->update({ artistid => 555 });
+ ok((grep /IDENTITY_UPDATE/i, @debug_out));
+ } 'IDENTITY_UPDATE used';
+ $ping_count-- if $@;
+ }
+
+
# test insert_bulk using populate, this should always pass whether or not it
# does anything Sybase specific or not. Just here to aid debugging.
lives_ok {
# mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t
SKIP: {
- skip 'TEXT/IMAGE support does not work with FreeTDS', 13
+ skip 'TEXT/IMAGE support does not work with FreeTDS', 16
if $schema->storage->using_freetds;
my $dbh = $schema->storage->_dbh;
id INT IDENTITY PRIMARY KEY,
bytea INT NULL,
blob IMAGE NULL,
- clob TEXT NULL
+ clob TEXT NULL,
+ anint INT NULL
)
],{ RaiseError => 1, PrintError => 0 });
}
# blob insert with explicit PK
# also a good opportunity to test IDENTITY_INSERT
- {
- local $SIG{__WARN__} = sub {};
- eval { $dbh->do('DROP TABLE bindtype_test') };
- $dbh->do(qq[
- CREATE TABLE bindtype_test
- (
- id INT IDENTITY PRIMARY KEY,
- bytea INT NULL,
- blob IMAGE NULL,
- clob TEXT NULL
- )
- ],{ RaiseError => 1, PrintError => 0 });
- }
+ $rs->delete;
+
my $created = eval { $rs->create( { id => 1, blob => $binstr{large} } ) };
ok(!$@, "inserted large blob without dying with manual PK");
diag $@ if $@;
diag $@ if $@;
ok($got eq $new_str, "verified updated blob");
+ # try a blob update with IDENTITY_UPDATE
+ lives_and {
+ $new_str = $binstr{large} . 'hlagh';
+ $rs->find(1)->update({ id => 999, blob => $new_str });
+ ok($rs->find(999)->blob eq $new_str);
+ } 'verified updated blob with IDENTITY_UPDATE';
+
## try multi-row blob update
# first insert some blobs
- $rs->find(1)->delete;
+ $rs->delete;
$rs->create({ blob => $binstr{large} }) for (1..3);
$new_str = $binstr{large} . 'foo';
$rs->update({ blob => $new_str });
is((grep $_->blob eq $new_str, $rs->all), 3, 'multi-row blob update');
+
+ # make sure impossible blob update throws
+ throws_ok {
+ $rs->update({ anint => 5 });
+ $rs->create({ anint => 6 });
+ $rs->search({ anint => 5 })->update({ blob => $new_str, anint => 6 });
+ } qr/impossible/, 'impossible blob update throws';
}
# test MONEY column support