$self->store_column($auto_pri[$_] => $ids[$_]) for 0 .. $#ids;
}
+ # get non-PK auto-incs
+ {
+ my %pk;
+ @pk{ $self->primary_columns } = ();
+
+ my @non_pk_autoincs = grep {
+ (not exists $pk{$_})
+ && $self->column_info($_)->{is_auto_increment}
+ } $self->columns;
+
+ if (@non_pk_autoincs) {
+ my @ids = $self->result_source->storage->last_insert_id(
+ $self->result_source,
+ @non_pk_autoincs
+ );
+
+ if (@ids == @non_pk_autoincs) {
+ $self->store_column($non_pk_autoincs[$_] => $ids[$_]) for 0 .. $#ids;
+ }
+ }
+ }
$self->{_dirty_columns} = {};
$self->{related_resultsets} = {};
);
}
+# Firebird specific limit, reverse of _SkipFirst for Informix
+sub _FirstSkip {
+ my ($self, $sql, $order, $rows, $offset) = @_;
+
+ $sql =~ s/^ \s* SELECT \s+ //ix
+ or croak "Unrecognizable SELECT: $sql";
+
+ return sprintf ('SELECT %s%s%s%s',
+ sprintf ('FIRST %d ', $rows),
+ $offset
+ ? sprintf ('SKIP %d ', $offset)
+ : ''
+ ,
+ $sql,
+ $self->_order_by ($order),
+ );
+}
+
# Crappy Top based Limit/Offset support. Legacy from MSSQL.
sub _Top {
my ( $self, $sql, $order, $rows, $offset ) = @_;
package DBIx::Class::Storage::DBI::InterBase;
-# mostly stolen from DBIx::Class::Storage::DBI::MSSQL
+# partly stolen from DBIx::Class::Storage::DBI::MSSQL
use strict;
use warnings;
-
use base qw/DBIx::Class::Storage::DBI/;
use mro 'c3';
-
use List::Util();
__PACKAGE__->mk_group_accessors(simple => qw/
- _identity
+ _fb_auto_incs
/);
-sub insert_bulk {
- my $self = shift;
- my ($source, $cols, $data) = @_;
-
- my $is_identity_insert = (List::Util::first
- { $source->column_info ($_)->{is_auto_increment} }
- (@{$cols})
- )
- ? 1
- : 0;
-
- $self->next::method(@_);
-}
-
-
sub _prep_for_execute {
my $self = shift;
my ($op, $extra_bind, $ident, $args) = @_;
my ($sql, $bind) = $self->next::method (@_);
if ($op eq 'insert') {
- $sql .= 'RETURNING "Id"';
+ my $quote_char = $self->sql_maker->quote_char || '"';
+
+ my @auto_inc_cols =
+ grep $ident->column_info($_)->{is_auto_increment}, $ident->columns;
+ if (@auto_inc_cols) {
+ my $auto_inc_cols =
+ join ', ',
+# XXX quoting the columns breaks ODBC
+# map qq{${quote_char}${_}${quote_char}},
+ @auto_inc_cols;
+
+ $sql .= " RETURNING ($auto_inc_cols)";
+
+ $self->_fb_auto_incs([]);
+ $self->_fb_auto_incs->[0] = \@auto_inc_cols;
+ }
}
return ($sql, $bind);
my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
if ($op eq 'insert') {
-
- # this should bring back the result of SELECT SCOPE_IDENTITY() we tacked
- # on in _prep_for_execute above
local $@;
- my ($identity) = eval { $sth->fetchrow_array };
-
- $self->_identity($identity);
+ my (@auto_incs) = eval {
+ local $SIG{__WARN__} = sub {};
+ $sth->fetchrow_array
+ };
+ $self->_fb_auto_incs->[1] = \@auto_incs;
$sth->finish;
}
return wantarray ? ($rv, $sth, @bind) : $rv;
}
-sub last_insert_id { shift->_identity }
+sub last_insert_id {
+ my ($self, $source, @cols) = @_;
+ my @result;
-1;
+ my %auto_incs;
+ @auto_incs{ @{ $self->_fb_auto_incs->[0] } } =
+ @{ $self->_fb_auto_incs->[1] };
+
+ push @result, $auto_incs{$_} for @cols;
+
+ return @result;
+}
+
+# this sub stolen from DB2
+sub _sql_maker_opts {
+ my ( $self, $opts ) = @_;
+
+ if ( $opts ) {
+ $self->{_sql_maker_opts} = { %$opts };
+ }
+
+ return { limit_dialect => 'FirstSkip', %{$self->{_sql_maker_opts}||{}} };
+}
+
+1;
[ $dsn2, $user2, $pass2 ],
);
-my @handles_to_clean;
+my $schema;
foreach my $info (@info) {
my ($dsn, $user, $pass) = @$info;
next unless $dsn;
- my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass);
my $dbh = $schema->storage->dbh;
- push @handles_to_clean, $dbh;
-
my $sg = Scope::Guard->new(\&cleanup);
eval { $dbh->do("DROP TABLE artist") };
for (1..2) {
push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
}
- $ars->populate (\@pop);
+ # XXX why does insert_bulk not work here?
+ my @foo = $ars->populate (\@pop);
});
# count what we did so far
}
# test blobs (stolen from 73oracle.t)
- eval { $dbh->do('DROP TABLE bindtype_test') };
- $dbh->do(q[
- CREATE TABLE bindtype_test
- (
- id INT NOT NULL PRIMARY KEY,
- bytea INT,
- blob BLOB,
- clob CLOB
- )
- ]);
+ SKIP: {
+ eval { $dbh->do('DROP TABLE bindtype_test') };
+ $dbh->do(q[
+ CREATE TABLE bindtype_test
+ (
+ id INT PRIMARY KEY,
+ bytea INT,
+ a_blob BLOB,
+ a_clob BLOB SUB_TYPE TEXT
+ )
+ ]);
+
+ last SKIP; # XXX blob ops cause segfaults!
- my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
- $binstr{'large'} = $binstr{'small'} x 1024;
+ my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
+ $binstr{'large'} = $binstr{'small'} x 1024;
- my $maxloblen = length $binstr{'large'};
- local $dbh->{'LongReadLen'} = $maxloblen;
+ my $maxloblen = length $binstr{'large'};
+ local $dbh->{'LongReadLen'} = $maxloblen;
- my $rs = $schema->resultset('BindType');
- my $id = 0;
+ my $rs = $schema->resultset('BindType');
+ my $id = 0;
- foreach my $type (qw( blob clob )) {
- foreach my $size (qw( small large )) {
- $id++;
+ foreach my $type (qw( a_blob a_clob )) {
+ foreach my $size (qw( small large )) {
+ $id++;
# turn off horrendous binary DBIC_TRACE output
- local $schema->storage->{debug} = 0;
+ local $schema->storage->{debug} = 0;
- lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
- "inserted $size $type without dying";
+ lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
+ "inserted $size $type without dying";
- ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
+ ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
+ }
}
}
}
# clean up our mess
sub cleanup {
- foreach my $dbh (@handles_to_clean) {
- eval { $dbh->do('DROP TRIGGER artist_bi') };
- diag $@ if $@;
- eval { $dbh->do('DROP GENERATOR gen_artist_artistid') };
- diag $@ if $@;
- foreach my $table (qw/artist bindtype_test/) {
- $dbh->do("DROP TABLE $table");
- diag $@ if $@;
- }
+ my $dbh;
+ eval {
+ $schema->storage->disconnect; # to avoid object FOO is in use errors
+ $dbh = $schema->storage->dbh;
+ };
+ return unless $dbh;
+
+ eval { $dbh->do('DROP TRIGGER artist_bi') };
+ diag $@ if $@;
+
+ eval { $dbh->do('DROP GENERATOR gen_artist_artistid') };
+ diag $@ if $@;
+
+ foreach my $table (qw/artist bindtype_test/) {
+ eval { $dbh->do("DROP TABLE $table") };
+ #diag $@ if $@;
}
}