__PACKAGE__->mk_group_accessors('simple' =>
qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
_bulk_storage _is_bulk_storage _began_bulk_work
- _bulk_disabled_due_to_coderef_connect_info_warned
_identity_method/
);
my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
# We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
-
$self->next::method($source, \%blobs_to_empty, $where, @rest);
# Now update the blobs before the other columns in case the update of other
my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;
- my @source_columns = $source->columns;
-
my $use_bulk_api =
$self->_bulk_storage &&
$self->_get_dbh->{syb_has_blk};
- if ((not $use_bulk_api)
- &&
- (ref($self->_dbi_connect_info->[0]) eq 'CODE')
- &&
- (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
- carp <<'EOF';
-Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
-regular array inserts.
-EOF
- $self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
+ if (! $use_bulk_api and ref($self->_dbi_connect_info->[0]) eq 'CODE') {
+ carp_unique( join ' ',
+ 'Bulk API support disabled due to use of a CODEREF connect_info.',
+ 'Reverting to regular array inserts.',
+ );
}
if (not $use_bulk_api) {
# otherwise, use the bulk API
# rearrange @$data so that columns are in database order
- my %orig_idx;
- @orig_idx{@$cols} = 0..$#$cols;
+# and so we submit a full column list
+ my %orig_order = map { $cols->[$_] => $_ } 0..$#$cols;
- my %new_idx;
- @new_idx{@source_columns} = 0..$#source_columns;
+ my @source_columns = $source->columns;
+
+ # bcp identity index is 1-based
+ my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns);
+ $identity_idx = defined $identity_idx ? $identity_idx + 1 : 0;
my @new_data;
- for my $datum (@$data) {
- my $new_datum = [];
- for my $col (@source_columns) {
-# identity data will be 'undef' if not $is_identity_insert
-# columns with defaults will also be 'undef'
- $new_datum->[ $new_idx{$col} ] =
- exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
- }
- push @new_data, $new_datum;
+ for my $slice_idx (0..$#$data) {
+ push @new_data, [map {
+ # identity data will be 'undef' if not $is_identity_insert
+ # columns with defaults will also be 'undef'
+ exists $orig_order{$_}
+ ? $data->[$slice_idx][$orig_order{$_}]
+ : undef
+ } @source_columns];
}
-# bcp identity index is 1-based
- my $identity_idx = exists $new_idx{$identity_col} ?
- $new_idx{$identity_col} + 1 : 0;
+ my $proto_bind = $self->_resolve_bindattrs(
+ $source,
+ [map {
+ [ { dbic_colname => $source_columns[$_], _bind_data_slice_idx => $_ }
+ => $new_data[0][$_] ]
+ } (0 ..$#source_columns) ],
+ $columns_info
+ );
## Set a client-side conversion error handler, straight from DBD::Sybase docs.
# This ignores any data conversion errors detected by the client side libs, as
my $guard = $bulk->txn_scope_guard;
+## FIXME - once this is done - address the FIXME on finish() below
## XXX get this to work instead of our own $sth
## will require SQLA or *Hacks changes for ordered columns
# $bulk->next::method($source, \@source_columns, \@new_data, {
}
);
- my @bind = map { [ $source_columns[$_] => $_ ] } (0 .. $#source_columns);
+ {
+ # FIXME the $sth->finish in _execute_array does a rollback for some
+ # reason. Disable it temporarily until we fix the SQLMaker thing above
+ no warnings 'redefine';
+ no strict 'refs';
+ local *{ref($sth).'::finish'} = sub {};
- $self->_execute_array(
- $source, $sth, \@bind, \@source_columns, \@new_data, sub {
- $guard->commit
- }
- );
+ $self->_execute_array(
+ $source, $sth, $proto_bind, \@source_columns, \@new_data
+ );
+ }
+
+ $guard->commit;
$bulk->_query_end($sql);
} catch {
}
}
-sub _dbh_execute_array {
- my ($self, $sth, $tuple_status, $cb) = @_;
-
- my $rv = $self->next::method($sth, $tuple_status);
- $cb->() if $cb;
-
- return $rv;
-}
-
# Make sure blobs are not bound as placeholders, and return any non-empty ones
# as a hash.
sub _remove_blob_cols {
use lib qw(t/lib);
use DBICTest;
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
+if (not ($dsn && $user)) {
+ plan skip_all => join ' ',
+ 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test.',
+ 'Warning: This test drops and creates the tables:',
+ "'artist', 'money_test' and 'bindtype_test'",
+ ;
+};
+
plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_ase')
unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_ase');
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
+# first run the test without the lang variable set
+# it is important to do this before module load, hence
+# the subprocess before the optdep check
+if ($ENV{LANG} and $ENV{LANG} ne 'C') {
+ my $oldlang = $ENV{LANG};
+ local $ENV{LANG} = 'C';
-my $TESTS = 66 + 2;
+ pass ("Your lang is set to $oldlang - testing with C first");
-if (not ($dsn && $user)) {
- plan skip_all =>
- 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' .
- "\nWarning: This test drops and creates the tables " .
- "'artist', 'money_test' and 'bindtype_test'";
-} else {
- plan tests => $TESTS*2 + 1;
+ my @cmd = ($^X, __FILE__);
+
+ # this is cheating, and may even hang here and there (testing on windows passed fine)
+ # will be replaced with Test::SubExec::Noninteractive in due course
+ require IPC::Open2;
+ IPC::Open2::open2(my $out, my $in, @cmd);
+ while (my $ln = <$out>) {
+ print " $ln";
+ }
+
+ wait;
+ ok (! $?, "Wstat $? from: @cmd");
}
my @storage_types = (
if ($storage_idx == 0 &&
$schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')) {
-# no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
- my $tb = Test::More->builder;
- $tb->skip('no placeholders') for 1..$TESTS;
+ # no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
+ skip "Skipping entire test for $storage_type - no placeholder support", 1;
next;
}
is $ping_count, 0, 'no pings';
+done_testing;
+
# clean up our mess
END {
if (my $dbh = eval { $schema->storage->_dbh }) {