use Data::Dumper::Concise 'Dumper';
use Try::Tiny;
use Context::Preserve 'preserve_context';
+use DBIx::Class::_Util 'sigwarn_silencer';
use namespace::clean;
-__PACKAGE__->sql_limit_dialect ('RowCountOrGenericSubQ');
+__PACKAGE__->sql_limit_dialect ('GenericSubQ');
__PACKAGE__->sql_quote_char ([qw/[ ]/]);
__PACKAGE__->datetime_parser_type(
'DBIx::Class::Storage::DBI::Sybase::ASE::DateTime::Format'
# Even though we call $sth->finish for uses off the bulk API, there's still an
# "active statement" warning on disconnect, which we throw away here.
-# This is due to the bug described in insert_bulk.
+# This is due to the bug described in _insert_bulk.
# Currently a noop because 'prepare' is used instead of 'prepare_cached'.
- local $SIG{__WARN__} = sub {
- warn $_[0] unless $_[0] =~ /active statement/i;
- } if $self->_is_bulk_storage;
+ local $SIG{__WARN__} = sigwarn_silencer(qr/active statement/i)
+ if $self->_is_bulk_storage;
# so that next transaction gets a dbh
$self->_began_bulk_work(0) if $self->_is_bulk_storage;
C<1>, but C<0> is better if your database is configured for it.
See
-L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
+L<DBD::Sybase/Handling IMAGE/TEXT data with syb_ct_get_data()/syb_ct_send_data()>.
=cut
}
sub _prep_for_execute {
- my $self = shift;
- my ($op, $ident) = @_;
+ my ($self, $op, $ident, $args) = @_;
#
### This is commented out because all tests pass. However I am leaving it
### BTW it doesn't currently work exactly - need better sensitivity to
# currently set value
#
+ #my ($op, $ident) = @_;
+ #
# inherit these from the parent for the duration of _prep_for_execute
# Don't know how to make a localizing loop with if's, otherwise I would
#local $self->{_autoinc_supplied_for_op}
# = $self->_parent_storage->_perform_autoinc_retrieval
#if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage;
- my ($sql, $bind) = $self->next::method (@_);
+ my $limit; # extract and use shortcut on limit without offset
+ if ($op eq 'select' and ! $args->[4] and $limit = $args->[3]) {
+ $args = [ @$args ];
+ $args->[3] = undef;
+ }
+
+ my ($sql, $bind) = $self->next::method($op, $ident, $args);
+
+ # $limit is already sanitized by now
+ $sql = join( "\n",
+ "SET ROWCOUNT $limit",
+ $sql,
+ "SET ROWCOUNT 0",
+ ) if $limit;
if (my $identity_col = $self->_perform_autoinc_retrieval) {
$sql .= "\n" . $self->_fetch_identity_sql($ident, $identity_col)
sub _execute {
my $self = shift;
- my ($op) = @_;
-
my ($rv, $sth, @bind) = $self->next::method(@_);
$self->_identity( ($sth->fetchall_arrayref)->[0][0] )
}
}
-sub insert_bulk {
+sub _insert_bulk {
my $self = shift;
my ($source, $cols, $data) = @_;
# This ignores any data conversion errors detected by the client side libs, as
# they are usually harmless.
my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
- Sub::Name::subname insert_bulk => sub {
+ Sub::Name::subname _insert_bulk_cslib_errhandler => sub {
my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
return 1 if $errno == 36;
$self->_bulk_storage(undef);
unshift @_, $self;
- goto \&insert_bulk;
+ goto \&_insert_bulk;
}
elsif ($exception) {
# rollback makes the bulkLogin connection unusable
return %blob_cols ? \%blob_cols : undef;
}
-# same for insert_bulk
+# same for _insert_bulk
sub _remove_blob_cols_array {
my ($self, $source, $cols, $data) = @_;
my ($self, $source, $blob_cols, $where) = @_;
my @primary_cols = try
- { $source->_pri_cols }
+ { $source->_pri_cols_or_die }
catch {
$self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
};
my %row = %$row;
my @primary_cols = try
- { $source->_pri_cols }
+ { $source->_pri_cols_or_die }
catch {
$self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
};
$schema->txn_do(sub {
my $rs = $schema->resultset('Book');
- while (my $row = $rs->next) {
+ while (my $result = $rs->next) {
$schema->resultset('MetaData')->create({
- book_id => $row->id,
+ book_id => $result->id,
...
});
}
=head1 LIMITED QUERIES
-Because ASE does not have a good way to limit results in SQL that works for all
-types of queries, the limit dialect is set to
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ>.
+Because ASE does not have a good way to limit results in SQL that works for
+all types of queries, the limit dialect is set to
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ>.
Fortunately, ASE and L<DBD::Sybase> support cursors properly, so when
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ> is too slow you can use
-the L<software_limit|DBIx::Class::ResultSet/software_limit>
-L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping over
-records.
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ> is too slow
+you can use the L<software_limit|DBIx::Class::ResultSet/software_limit>
+L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping
+over records.
=head1 TEXT/IMAGE COLUMNS