__PACKAGE__->sql_limit_dialect ('RowCountOrGenericSubQ');
__PACKAGE__->sql_quote_char ([qw/[ ]/]);
-__PACKAGE__->datetime_parser_type('DateTime::Format::Sybase');
+__PACKAGE__->datetime_parser_type(
+ 'DBIx::Class::Storage::DBI::Sybase::ASE::DateTime::Format'
+);
__PACKAGE__->mk_group_accessors('simple' =>
qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
_bulk_storage _is_bulk_storage _began_bulk_work
- _bulk_disabled_due_to_coderef_connect_info_warned
_identity_method/
);
+
my @also_proxy_to_extra_storages = qw/
connect_call_set_auto_cast auto_cast connect_call_blob_setup
connect_call_datetime_setup
my $no_bind_vars = __PACKAGE__ . '::NoBindVars';
if ($self->using_freetds) {
- carp <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
+ carp_once <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
You are using FreeTDS with Sybase.
keys %$columns_info
;
- if (($op eq 'insert' && $bound_identity_col) ||
- ($op eq 'update' && exists $args->[0]{$identity_col})) {
+ if (
+ ($bound_identity_col and $op eq 'insert')
+ or
+ (
+ $op eq 'update'
+ and
+ defined $identity_col
+ and
+ exists $args->[0]{$identity_col}
+ )
+ ) {
$sql = join ("\n",
$self->_set_table_identity_sql($op => $table, 'on'),
$sql,
);
}
- if ($op eq 'insert' && (not $bound_identity_col) && $identity_col &&
- (not $self->{insert_bulk})) {
+ if (
+ (not $bound_identity_col)
+ and
+ $identity_col
+ and
+ (not $self->{insert_bulk})
+ and
+ $op eq 'insert'
+ ) {
$sql =
"$sql\n" .
$self->_fetch_identity_sql($ident, $identity_col);
my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
# We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
-
$self->next::method($source, \%blobs_to_empty, $where, @rest);
# Now update the blobs before the other columns in case the update of other
my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;
- my @source_columns = $source->columns;
-
my $use_bulk_api =
$self->_bulk_storage &&
$self->_get_dbh->{syb_has_blk};
- if ((not $use_bulk_api)
- &&
- (ref($self->_dbi_connect_info->[0]) eq 'CODE')
- &&
- (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
- carp <<'EOF';
-Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
-regular array inserts.
-EOF
- $self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
+ if (! $use_bulk_api and ref($self->_dbi_connect_info->[0]) eq 'CODE') {
+ carp_unique( join ' ',
+ 'Bulk API support disabled due to use of a CODEREF connect_info.',
+ 'Reverting to regular array inserts.',
+ );
}
if (not $use_bulk_api) {
my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data);
-# _execute_array uses a txn anyway, but it ends too early in case we need to
+# next::method uses a txn anyway, but it ends too early in case we need to
# select max(col) to get the identity for inserting blobs.
($self, my $guard) = $self->{transaction_depth} == 0 ?
($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
# otherwise, use the bulk API
# rearrange @$data so that columns are in database order
- my %orig_idx;
- @orig_idx{@$cols} = 0..$#$cols;
+# and so we submit a full column list
+ my %orig_order = map { $cols->[$_] => $_ } 0..$#$cols;
- my %new_idx;
- @new_idx{@source_columns} = 0..$#source_columns;
+ my @source_columns = $source->columns;
+
+ # bcp identity index is 1-based
+ my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns);
+ $identity_idx = defined $identity_idx ? $identity_idx + 1 : 0;
my @new_data;
- for my $datum (@$data) {
- my $new_datum = [];
- for my $col (@source_columns) {
-# identity data will be 'undef' if not $is_identity_insert
-# columns with defaults will also be 'undef'
- $new_datum->[ $new_idx{$col} ] =
- exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
- }
- push @new_data, $new_datum;
+ for my $slice_idx (0..$#$data) {
+ push @new_data, [map {
+ # identity data will be 'undef' if not $is_identity_insert
+ # columns with defaults will also be 'undef'
+ exists $orig_order{$_}
+ ? $data->[$slice_idx][$orig_order{$_}]
+ : undef
+ } @source_columns];
}
-# bcp identity index is 1-based
- my $identity_idx = exists $new_idx{$identity_col} ?
- $new_idx{$identity_col} + 1 : 0;
+ my $proto_bind = $self->_resolve_bindattrs(
+ $source,
+ [map {
+ [ { dbic_colname => $source_columns[$_], _bind_data_slice_idx => $_ }
+ => $new_data[0][$_] ]
+ } (0 ..$#source_columns) ],
+ $columns_info
+ );
## Set a client-side conversion error handler, straight from DBD::Sybase docs.
# This ignores any data conversion errors detected by the client side libs, as
my $guard = $bulk->txn_scope_guard;
+## FIXME - once this is done - address the FIXME on finish() below
## XXX get this to work instead of our own $sth
## will require SQLA or *Hacks changes for ordered columns
# $bulk->next::method($source, \@source_columns, \@new_data, {
}
);
- my @bind = map { [ $source_columns[$_] => $_ ] } (0 .. $#source_columns);
+ {
+ # FIXME the $sth->finish in _execute_array does a rollback for some
+ # reason. Disable it temporarily until we fix the SQLMaker thing above
+ no warnings 'redefine';
+ no strict 'refs';
+ local *{ref($sth).'::finish'} = sub {};
- $self->_execute_array(
- $source, $sth, \@bind, \@source_columns, \@new_data, sub {
- $guard->commit
- }
- );
+ $self->_dbh_execute_for_fetch(
+ $source, $sth, $proto_bind, \@source_columns, \@new_data
+ );
+ }
+
+ $guard->commit;
$bulk->_query_end($sql);
} catch {
}
}
-sub _dbh_execute_array {
- my ($self, $sth, $tuple_status, $cb) = @_;
-
- my $rv = $self->next::method($sth, $tuple_status);
- $cb->() if $cb;
-
- return $rv;
-}
-
# Make sure blobs are not bound as placeholders, and return any non-empty ones
# as a hash.
sub _remove_blob_cols {
$self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
};
-# check if we're updating a single row by PK
- my $pk_cols_in_where = 0;
- for my $col (@primary_cols) {
- $pk_cols_in_where++ if defined $where->{$col};
- }
- my @rows;
-
- if ($pk_cols_in_where == @primary_cols) {
+ my @pks_to_update;
+ if (
+ ref $where eq 'HASH'
+ and
+ @primary_cols == grep { defined $where->{$_} } @primary_cols
+ ) {
my %row_to_update;
@row_to_update{@primary_cols} = @{$where}{@primary_cols};
- @rows = \%row_to_update;
- } else {
+ @pks_to_update = \%row_to_update;
+ }
+ else {
my $cursor = $self->select ($source, \@primary_cols, $where, {});
- @rows = map {
+ @pks_to_update = map {
my %row; @row{@primary_cols} = @$_; \%row
} $cursor->all;
}
- for my $row (@rows) {
- $self->_insert_blobs($source, $blob_cols, $row);
+ for my $ident (@pks_to_update) {
+ $self->_insert_blobs($source, $blob_cols, $ident);
}
}
$dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
$dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080
-On connection for use with L<DBIx::Class::InflateColumn::DateTime>, using
-L<DateTime::Format::Sybase>, which you will need to install.
-
This works for both C<DATETIME> and C<SMALLDATETIME> columns, note that
C<SMALLDATETIME> columns only have minute precision.
'Your DBD::Sybase is too old to support '
.'DBIx::Class::InflateColumn::DateTime, please upgrade!';
+ # FIXME - in retrospect this is a rather bad US-centric choice
+ # of format. Not changing as a bugwards compat, though in reality
+ # the only piece that sees the results of $dt object formatting
+ # (as opposed to parsing) is the database itself, so theoretically
+ # changing both this SET command and the formatter definition of
+ # ::S::D::Sybase::ASE::DateTime::Format below should be safe and
+ # transparent
+
$dbh->do('SET DATEFORMAT mdy');
- 1;
}
}
-sub _dbh_begin_work {
+sub _exec_txn_begin {
my $self = shift;
# bulkLogin=1 connections are always in a transaction, and can only call BEGIN
# savepoint support using ASE syntax
-sub _svp_begin {
+sub _exec_svp_begin {
my ($self, $name) = @_;
- $self->_get_dbh->do("SAVE TRANSACTION $name");
+ $self->_dbh->do("SAVE TRANSACTION $name");
}
# A new SAVE TRANSACTION with the same name releases the previous one.
-sub _svp_release { 1 }
+sub _exec_svp_release { 1 }
-sub _svp_rollback {
+sub _exec_svp_rollback {
my ($self, $name) = @_;
- $self->_get_dbh->do("ROLLBACK TRANSACTION $name");
+ $self->_dbh->do("ROLLBACK TRANSACTION $name");
+}
+
+package # hide from PAUSE
+ DBIx::Class::Storage::DBI::Sybase::ASE::DateTime::Format;
+
+my $datetime_parse_format = '%Y-%m-%dT%H:%M:%S.%3NZ';
+my $datetime_format_format = '%m/%d/%Y %H:%M:%S.%3N';
+
+my ($datetime_parser, $datetime_formatter);
+
+sub parse_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_parse_format,
+ on_error => 'croak',
+ );
+ return $datetime_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_formatter ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_format_format,
+ on_error => 'croak',
+ );
+ return $datetime_formatter->format_datetime(shift);
}
1;