use Data::Dumper::Concise 'Dumper';
use Try::Tiny;
use Context::Preserve 'preserve_context';
+use DBIx::Class::_Util 'sigwarn_silencer';
use namespace::clean;
__PACKAGE__->sql_limit_dialect ('GenericSubQ');
# Even though we call $sth->finish for uses off the bulk API, there's still an
# "active statement" warning on disconnect, which we throw away here.
-# This is due to the bug described in insert_bulk.
+# This is due to the bug described in _insert_bulk.
# Currently a noop because 'prepare' is used instead of 'prepare_cached'.
- local $SIG{__WARN__} = sub {
- warn $_[0] unless $_[0] =~ /active statement/i;
- } if $self->_is_bulk_storage;
+ local $SIG{__WARN__} = sigwarn_silencer(qr/active statement/i)
+ if $self->_is_bulk_storage;
# so that next transaction gets a dbh
$self->_began_bulk_work(0) if $self->_is_bulk_storage;
}
}
-sub insert_bulk {
+sub _insert_bulk {
my $self = shift;
my ($source, $cols, $data) = @_;
# This ignores any data conversion errors detected by the client side libs, as
# they are usually harmless.
my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
- Sub::Name::subname insert_bulk => sub {
+ Sub::Name::subname _insert_bulk_cslib_errhandler => sub {
my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
return 1 if $errno == 36;
$self->_bulk_storage(undef);
unshift @_, $self;
- goto \&insert_bulk;
+ goto \&_insert_bulk;
}
elsif ($exception) {
# rollback makes the bulkLogin connection unusable
return %blob_cols ? \%blob_cols : undef;
}
-# same for insert_bulk
+# same for _insert_bulk
sub _remove_blob_cols_array {
my ($self, $source, $cols, $data) = @_;