use Data::Dumper::Concise 'Dumper';
use Try::Tiny;
use Context::Preserve 'preserve_context';
+use DBIx::Class::_Util qw( sigwarn_silencer dbic_internal_try );
use namespace::clean;
__PACKAGE__->sql_limit_dialect ('GenericSubQ');
# Even though we call $sth->finish for uses off the bulk API, there's still an
# "active statement" warning on disconnect, which we throw away here.
-# This is due to the bug described in insert_bulk.
+# This is due to the bug described in _insert_bulk.
# Currently a noop because 'prepare' is used instead of 'prepare_cached'.
- local $SIG{__WARN__} = sub {
- warn $_[0] unless $_[0] =~ /active statement/i;
- } if $self->_is_bulk_storage;
+ local $SIG{__WARN__} = sigwarn_silencer(qr/active statement/i)
+ if $self->_is_bulk_storage;
# so that next transaction gets a dbh
$self->_began_bulk_work(0) if $self->_is_bulk_storage;
C<1>, but C<0> is better if your database is configured for it.
See
-L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
+L<DBD::Sybase/Handling IMAGE/TEXT data with syb_ct_get_data()/syb_ct_send_data()>.
=cut
sub _prep_for_execute {
my ($self, $op, $ident, $args) = @_;
- #
-### This is commented out because all tests pass. However I am leaving it
-### here as it may prove necessary (can't think through all combinations)
-### BTW it doesn't currently work exactly - need better sensitivity to
- # currently set value
- #
- #my ($op, $ident) = @_;
- #
- # inherit these from the parent for the duration of _prep_for_execute
- # Don't know how to make a localizing loop with if's, otherwise I would
- #local $self->{_autoinc_supplied_for_op}
- # = $self->_parent_storage->_autoinc_supplied_for_op
- #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage;
- #local $self->{_perform_autoinc_retrieval}
- # = $self->_parent_storage->_perform_autoinc_retrieval
- #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage;
-
my $limit; # extract and use shortcut on limit without offset
if ($op eq 'select' and ! $args->[4] and $limit = $args->[3]) {
$args = [ @$args ];
my $columns_info = $source->columns_info;
- my $identity_col =
- (first { $columns_info->{$_}{is_auto_increment} }
- keys %$columns_info )
- || '';
+ my ($identity_col) = grep
+ { $columns_info->{$_}{is_auto_increment} }
+ keys %$columns_info
+ ;
+
+ $identity_col = '' if ! defined $identity_col;
# FIXME - this is duplication from DBI.pm. When refactored towards
# the LobWriter this can be folded back where it belongs.
? 1
: 0
;
- local $self->{_perform_autoinc_retrieval} =
- ($identity_col and ! exists $to_insert->{$identity_col})
- ? $identity_col
- : undef
+
+ local $self->{_perform_autoinc_retrieval} = $self->{_autoinc_supplied_for_op}
+ ? undef
+ : $identity_col
;
# check for empty insert
my $blob_cols = $self->_remove_blob_cols($source, $to_insert);
- # do we need the horrific SELECT MAX(COL) hack?
- my $need_dumb_last_insert_id = (
- $self->_perform_autoinc_retrieval
- &&
- ($self->_identity_method||'') ne '@@IDENTITY'
- );
-
- my $next = $self->next::can;
-
- # we are already in a transaction, or there are no blobs
- # and we don't need the PK - just (try to) do it
- if ($self->{transaction_depth}
- || (!$blob_cols && !$need_dumb_last_insert_id)
+ # if a new txn is needed - it must happen on the _writer/new connection (for now)
+ my $guard;
+ if (
+ ! $self->transaction_depth
+ and
+ (
+ $blob_cols
+ or
+ # do we need the horrific SELECT MAX(COL) hack?
+ (
+ $self->_perform_autoinc_retrieval
+ and
+ ( ($self->_identity_method||'') ne '@@IDENTITY' )
+ )
+ )
) {
- return $self->_insert (
- $next, $source, $to_insert, $blob_cols, $identity_col
- );
+ $self = $self->_writer_storage;
+ $guard = $self->txn_scope_guard;
}
- # otherwise use the _writer_storage to do the insert+transaction on another
- # connection
- my $guard = $self->_writer_storage->txn_scope_guard;
-
- my $updated_cols = $self->_writer_storage->_insert (
- $next, $source, $to_insert, $blob_cols, $identity_col
- );
-
- $self->_identity($self->_writer_storage->_identity);
+ my $updated_cols = $self->next::method ($source, $to_insert);
- $guard->commit;
-
- return $updated_cols;
-}
-
-sub _insert {
- my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_;
-
- my $updated_cols = $self->$next ($source, $to_insert);
-
- my $final_row = {
- ($identity_col ?
- ($identity_col => $self->last_insert_id($source, $identity_col)) : ()),
- %$to_insert,
- %$updated_cols,
- };
+ $self->_insert_blobs (
+ $source,
+ $blob_cols,
+ {
+ ( $identity_col
+ ? ( $identity_col => $self->last_insert_id($source, $identity_col) )
+ : ()
+ ),
+ %$to_insert,
+ %$updated_cols,
+ },
+ ) if $blob_cols;
- $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols;
+ $guard->commit if $guard;
return $updated_cols;
}
}
}
-sub insert_bulk {
+sub _insert_bulk {
my $self = shift;
my ($source, $cols, $data) = @_;
# next::method uses a txn anyway, but it ends too early in case we need to
# select max(col) to get the identity for inserting blobs.
- ($self, my $guard) = $self->{transaction_depth} == 0 ?
- ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
- :
- ($self, undef);
+ ($self, my $guard) = $self->transaction_depth
+ ? ($self, undef)
+ : ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
+ ;
$self->next::method(@_);
# This ignores any data conversion errors detected by the client side libs, as
# they are usually harmless.
my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
- Sub::Name::subname insert_bulk => sub {
+ Sub::Name::subname _insert_bulk_cslib_errhandler => sub {
my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
return 1 if $errno == 36;
});
my $exception = '';
- try {
+ dbic_internal_try {
my $bulk = $self->_bulk_storage;
my $guard = $bulk->txn_scope_guard;
if ($exception =~ /-Y option/) {
my $w = 'Sybase bulk API operation failed due to character set incompatibility, '
- . 'reverting to regular array inserts. Try unsetting the LANG environment variable'
+ . 'reverting to regular array inserts. Try unsetting the LC_ALL environment variable'
;
$w .= "\n$exception" if $self->debug;
carp $w;
$self->_bulk_storage(undef);
unshift @_, $self;
- goto \&insert_bulk;
+ goto \&_insert_bulk;
}
elsif ($exception) {
# rollback makes the bulkLogin connection unusable
}
else {
$fields->{$col} = \"''";
- $blob_cols{$col} = $blob_val unless $blob_val eq '';
+ $blob_cols{$col} = $blob_val
+ if length $blob_val;
}
}
}
return %blob_cols ? \%blob_cols : undef;
}
-# same for insert_bulk
+# same for _insert_bulk
sub _remove_blob_cols_array {
my ($self, $source, $cols, $data) = @_;
else {
$data->[$j][$i] = \"''";
$blob_cols[$j][$i] = $blob_val
- unless $blob_val eq '';
+ if length $blob_val;
}
}
}
sub _update_blobs {
my ($self, $source, $blob_cols, $where) = @_;
- my @primary_cols = try
- { $source->_pri_cols }
+ my @primary_cols = dbic_internal_try
+ { $source->_pri_cols_or_die }
catch {
$self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
};
if (
ref $where eq 'HASH'
and
- @primary_cols == grep { defined $where->{$_} } @primary_cols
+ ! grep { ! defined $where->{$_} } @primary_cols
) {
my %row_to_update;
@row_to_update{@primary_cols} = @{$where}{@primary_cols};
}
sub _insert_blobs {
- my ($self, $source, $blob_cols, $row) = @_;
- my $dbh = $self->_get_dbh;
+ my ($self, $source, $blob_cols, $row_data) = @_;
my $table = $source->name;
- my %row = %$row;
- my @primary_cols = try
- { $source->_pri_cols }
+ my @primary_cols = dbic_internal_try
+ { $source->_pri_cols_or_die }
catch {
$self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
};
$self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
- if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
+ if grep { ! defined $row_data->{$_} } @primary_cols;
+
+ # if we are 2-phase inserting a blob - there is nothing to retrieve anymore,
+ # regardless of the previous state of the flag
+ local $self->{_perform_autoinc_retrieval}
+ if $self->_perform_autoinc_retrieval;
+
+ my %where = map {( $_ => $row_data->{$_} )} @primary_cols;
for my $col (keys %$blob_cols) {
my $blob = $blob_cols->{$col};
- my %where = map { ($_, $row{$_}) } @primary_cols;
-
my $cursor = $self->select ($source, [$col], \%where, {});
$cursor->next;
my $sth = $cursor->sth;
);
}
- try {
+ dbic_internal_try {
do {
$sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
} while $sth->fetch;
$schema->txn_do(sub {
my $rs = $schema->resultset('Book');
- while (my $row = $rs->next) {
+ while (my $result = $rs->next) {
$schema->resultset('MetaData')->create({
- book_id => $row->id,
+ book_id => $result->id,
...
});
}
=head1 LIMITED QUERIES
-Because ASE does not have a good way to limit results in SQL that works for all
-types of queries, the limit dialect is set to
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ>.
+Because ASE does not have a good way to limit results in SQL that works for
+all types of queries, the limit dialect is set to
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ>.
Fortunately, ASE and L<DBD::Sybase> support cursors properly, so when
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ> is too slow you can use
-the L<software_limit|DBIx::Class::ResultSet/software_limit>
-L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping over
-records.
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ> is too slow
+you can use the L<software_limit|DBIx::Class::ResultSet/software_limit>
+L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping
+over records.
=head1 TEXT/IMAGE COLUMNS
B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns>
calls in your C<Result> classes B<must> list columns in database order for this
-to work. Also, you may have to unset the C<LANG> environment variable before
+to work. Also, you may have to unset the C<LC_ALL> environment variable before
loading your app, as C<BCP -Y> is not yet supported in DBD::Sybase .
When inserting IMAGE columns using this method, you'll need to use
=back
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
-You may distribute this code under the same terms as Perl itself.
-
-=cut
-# vim:sts=2 sw=2:
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.