'DateTime::Format::Strptime' => 0,
) : ()
,
+ grep $_, @ENV{qw/DBICTEST_FIREBIRD_DSN DBICTEST_FIREBIRD_ODBC_DSN/}
+ ? (
+ 'DateTime::Format::Strptime' => 0,
+ ) : ()
+ ,
);
#************************************************************************#
# Make ABSOLUTELY SURE that nothing on the list above is a real require, #
$self->store_column($auto_pri[$_] => $ids[$_]) for 0 .. $#ids;
}
+ # get non-PK auto-incs
+ {
+ my $rsrc = $self->result_source;
+ my %pk;
+ @pk{ $rsrc->primary_columns } = ();
+
+ my @non_pk_autoincs = grep {
+ (not exists $pk{$_})
+ && $rsrc->column_info($_)->{is_auto_increment}
+ } $rsrc->columns;
+
+ if (@non_pk_autoincs) {
+ my @ids = $rsrc->storage->last_insert_id($rsrc, @non_pk_autoincs);
+
+ if (@ids == @non_pk_autoincs) {
+ $self->store_column($non_pk_autoincs[$_] => $ids[$_]) for 0 .. $#ids;
+ }
+ }
+ }
$self->{_dirty_columns} = {};
$self->{related_resultsets} = {};
);
}
+# Firebird specific limit, reverse of _SkipFirst for Informix
+sub _FirstSkip {
+ my ($self, $sql, $order, $rows, $offset) = @_;
+
+ $sql =~ s/^ \s* SELECT \s+ //ix
+ or croak "Unrecognizable SELECT: $sql";
+
+ return sprintf ('SELECT %s%s%s%s',
+ sprintf ('FIRST %d ', $rows),
+ $offset
+ ? sprintf ('SKIP %d ', $offset)
+ : ''
+ ,
+ $sql,
+ $self->_order_by ($order),
+ );
+}
+
# Crappy Top based Limit/Offset support. Legacy from MSSQL.
sub _Top {
my ( $self, $sql, $order, $rows, $offset ) = @_;
--- /dev/null
+package DBIx::Class::Storage::DBI::InterBase;
+
+# partly stolen from DBIx::Class::Storage::DBI::MSSQL
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+use List::Util();
+
+__PACKAGE__->mk_group_accessors(simple => qw/
+ _auto_incs
+/);
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::InterBase - Driver for the Firebird RDBMS
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for Firebird using C<RETURNING>, sets the
+limit dialect to C<FIRST X SKIP X> and provides preliminary
+L<DBIx::Class::InflateColumn::DateTime> support.
+
+For ODBC support, see L<DBIx::Class::Storage::DBI::ODBC::Firebird>.
+
+To turn on L<DBIx::Class::InflateColumn::DateTime> support, add:
+
+ on_connect_call => 'datetime_setup'
+
+to your L<DBIx::Class::Storage::DBI/connect_info>.
+
+=cut
+
+sub _prep_for_execute {
+ my $self = shift;
+ my ($op, $extra_bind, $ident, $args) = @_;
+
+ if ($op eq 'insert') {
+ my @pk = $ident->primary_columns;
+ my %pk;
+ @pk{@pk} = ();
+
+ my @auto_inc_cols = grep {
+ my $inserting = $args->[0]{$_};
+
+ ($ident->column_info($_)->{is_auto_increment}
+ || exists $pk{$_})
+ && (
+ (not defined $inserting)
+ ||
+ (ref $inserting eq 'SCALAR' && $$inserting =~ /^null\z/i)
+ )
+ } $ident->columns;
+
+ if (@auto_inc_cols) {
+ $args->[1]{returning} = \@auto_inc_cols;
+
+ $self->_auto_incs([]);
+ $self->_auto_incs->[0] = \@auto_inc_cols;
+ }
+ }
+
+ return $self->next::method(@_);
+}
+
+sub _execute {
+ my $self = shift;
+ my ($op) = @_;
+
+ my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
+
+ if ($op eq 'insert' && $self->_auto_incs) {
+ local $@;
+ my (@auto_incs) = eval {
+ local $SIG{__WARN__} = sub {};
+ $sth->fetchrow_array
+ };
+ $self->_auto_incs->[1] = \@auto_incs;
+ $sth->finish;
+ }
+
+ return wantarray ? ($rv, $sth, @bind) : $rv;
+}
+
+sub last_insert_id {
+ my ($self, $source, @cols) = @_;
+ my @result;
+
+ my %auto_incs;
+ @auto_incs{ @{ $self->_auto_incs->[0] } } =
+ @{ $self->_auto_incs->[1] };
+
+ push @result, $auto_incs{$_} for @cols;
+
+ return @result;
+}
+
+# this sub stolen from DB2
+
+sub _sql_maker_opts {
+ my ( $self, $opts ) = @_;
+
+ if ( $opts ) {
+ $self->{_sql_maker_opts} = { %$opts };
+ }
+
+ return { limit_dialect => 'FirstSkip', %{$self->{_sql_maker_opts}||{}} };
+}
+
+sub _svp_begin {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("SAVEPOINT $name");
+}
+
+sub _svp_release {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("RELEASE SAVEPOINT $name");
+}
+
+sub _svp_rollback {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("ROLLBACK TO SAVEPOINT $name")
+}
+
+sub _ping {
+ my $self = shift;
+
+ my $dbh = $self->_dbh or return 0;
+
+ local $dbh->{RaiseError} = 1;
+
+ eval {
+ $dbh->do('select 1 from rdb$database');
+ };
+
+ return $@ ? 0 : 1;
+}
+
+# We want dialect 3 for new features and quoting to work, DBD::InterBase uses
+# dialect 1 (interbase compat) by default.
+sub _init {
+ my $self = shift;
+ $self->_set_sql_dialect(3);
+}
+
+sub _set_sql_dialect {
+ my $self = shift;
+ my $val = shift || 3;
+
+ my $dsn = $self->_dbi_connect_info->[0];
+
+ return if ref($dsn) eq 'CODE';
+
+ if ($dsn !~ /ib_dialect=/) {
+ $self->_dbi_connect_info->[0] = "$dsn;ib_dialect=$val";
+ my $connected = defined $self->_dbh;
+ $self->disconnect;
+ $self->ensure_connected if $connected;
+ }
+}
+
+# softcommit makes savepoints work
+sub _run_connection_actions {
+ my $self = shift;
+
+ $self->_dbh->{ib_softcommit} = 1;
+
+ $self->next::method(@_);
+}
+
+=head2 connect_call_datetime_setup
+
+Used as:
+
+ on_connect_call => 'datetime_setup'
+
+In L<DBIx::Class::Storage::DBI/connect_info> to set the date and timestamp
+formats using:
+
+ $dbh->{ib_time_all} = 'ISO';
+
+See L<DBD::InterBase> for more details.
+
+The C<TIMESTAMP> data type supports up to 4 digits after the decimal point for
+second precision. The full precision is used.
+
+You will need the L<DateTime::Format::Strptime> module for inflation to work.
+
+For L<DBIx::Class::Storage::DBI::ODBC::Firebird>, this is a noop and sub-second
+precision is not currently available.
+
+=cut
+
+sub connect_call_datetime_setup {
+ my $self = shift;
+
+ $self->_get_dbh->{ib_time_all} = 'ISO';
+}
+
+
+# from MSSQL
+
+sub build_datetime_parser {
+ my $self = shift;
+ my $type = "DateTime::Format::Strptime";
+ eval "use ${type}";
+ $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ return $type->new(
+ pattern => '%Y-%m-%d %H:%M:%S.%4N', # %F %T
+ on_error => 'croak',
+ );
+}
+
+1;
+
+=head1 CAVEATS
+
+=over 4
+
+=item *
+
+C<last_insert_id> support only works for Firebird versions 2 or greater. To
+work with earlier versions, we'll need to figure out how to retrieve the bodies
+of C<BEFORE INSERT> triggers and parse them for the C<GENERATOR> name.
+
+=back
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::ODBC::Firebird;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI::InterBase/;
+use mro 'c3';
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::ODBC::Firebird - Driver for using the Firebird RDBMS
+through ODBC
+
+=head1 SYNOPSIS
+
+Most functionality is provided by L<DBIx::Class::Storage::DBI::Interbase>, see
+that module for details.
+
+To build the ODBC driver for Firebird on Linux for unixODBC, see:
+
+L<http://www.firebirdnews.org/?p=1324>
+
+=cut
+
+# XXX seemingly no equivalent to ib_time_all in DBD::InterBase via ODBC
+sub connect_call_datetime_setup { 1 }
+
+# from MSSQL
+
+sub build_datetime_parser {
+ my $self = shift;
+ my $type = "DateTime::Format::Strptime";
+ eval "use ${type}";
+ $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ return $type->new(
+ pattern => '%Y-%m-%d %H:%M:%S', # %F %T
+ on_error => 'croak',
+ );
+}
+
+# we don't need DBD::InterBase-specific initialization
+sub _init { 1 }
+
+# ODBC uses dialect 3 by default, good
+sub _set_sql_dialect { 1 }
+
+1;
+
+=head1 CAVEATS
+
+=over 4
+
+=item *
+
+This driver (unlike L<DBD::InterBase>) does not currently support reading or
+writing C<TIMESTAMP> values with sub-second precision.
+
+=back
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use Scope::Guard ();
+
+# tests stolen from 749sybase_asa.t
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_FIREBIRD_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
+
+plan skip_all => <<'EOF' unless $dsn || $dsn2;
+Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},
+_USER and _PASS to run these tests
+EOF
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my $schema;
+
+foreach my $conn_idx (0..1) {
+ my ($dsn, $user, $pass) = @{ $info[$conn_idx] || [] };
+
+ next unless $dsn;
+
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ auto_savepoint => 1,
+ quote_char => q["],
+ name_sep => q[.],
+ });
+ my $dbh = $schema->storage->dbh;
+
+ my $sg = Scope::Guard->new(\&cleanup);
+
+ eval { $dbh->do(q[DROP TABLE "artist"]) };
+ $dbh->do(<<EOF);
+ CREATE TABLE "artist" (
+ "artistid" INT PRIMARY KEY,
+ "name" VARCHAR(255),
+ "charfield" CHAR(10),
+ "rank" INT DEFAULT 13
+ )
+EOF
+ eval { $dbh->do(q[DROP GENERATOR "gen_artist_artistid"]) };
+ $dbh->do('CREATE GENERATOR "gen_artist_artistid"');
+ eval { $dbh->do('DROP TRIGGER "artist_bi"') };
+ $dbh->do(<<EOF);
+ CREATE TRIGGER "artist_bi" FOR "artist"
+ ACTIVE BEFORE INSERT POSITION 0
+ AS
+ BEGIN
+ IF (NEW."artistid" IS NULL) THEN
+ NEW."artistid" = GEN_ID("gen_artist_artistid",1);
+ END
+EOF
+
+ my $ars = $schema->resultset('Artist');
+ is ( $ars->count, 0, 'No rows at first' );
+
+# test primary key handling
+ my $new = $ars->create({ name => 'foo' });
+ ok($new->artistid, "Auto-PK worked");
+
+# test savepoints
+ eval {
+ $schema->txn_do(sub {
+ eval {
+ $schema->txn_do(sub {
+ $ars->create({ name => 'in_savepoint' });
+ die "rolling back savepoint";
+ });
+ };
+ ok ((not $ars->search({ name => 'in_savepoint' })->first),
+ 'savepoint rolled back');
+ $ars->create({ name => 'in_outer_txn' });
+ die "rolling back outer txn";
+ });
+ };
+ ok ((not $ars->search({ name => 'in_outer_txn' })->first),
+ 'outer txn rolled back');
+
+# test explicit key spec
+ $new = $ars->create ({ name => 'bar', artistid => 66 });
+ is($new->artistid, 66, 'Explicit PK worked');
+ $new->discard_changes;
+ is($new->artistid, 66, 'Explicit PK assigned');
+
+ lives_ok {
+ $new->update({ name => 'baz' })
+ } 'update survived';
+ $new->discard_changes;
+ is $new->name, 'baz', 'row updated';
+
+# test populate
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_$_" };
+ }
+ $ars->populate (\@pop);
+ });
+
+# test populate with explicit key
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
+ }
+ # XXX why does insert_bulk not work here?
+ my @foo = $ars->populate (\@pop);
+ });
+
+# count what we did so far
+ is ($ars->count, 6, 'Simple count works');
+
+# test UPDATE
+ lives_ok {
+ $schema->resultset('Artist')
+ ->search({name => 'foo'})
+ ->update({rank => 4 });
+ } 'Can update a column';
+
+ my ($updated) = $schema->resultset('Artist')->search({name => 'foo'});
+ is $updated->rank, 4, 'and the update made it to the database';
+
+
+# test LIMIT support
+ my $lim = $ars->search( {},
+ {
+ rows => 3,
+ offset => 4,
+ order_by => 'artistid'
+ }
+ );
+ is( $lim->count, 2, 'ROWS+OFFSET count ok' );
+ is( $lim->all, 2, 'Number of ->all objects matches count' );
+
+# test iterator
+ $lim->reset;
+ is( $lim->next->artistid, 101, "iterator->next ok" );
+ is( $lim->next->artistid, 102, "iterator->next ok" );
+ is( $lim->next, undef, "next past end of resultset ok" );
+
+# test empty insert
+ {
+ local $ars->result_source->column_info('artistid')->{is_auto_increment} = 0;
+
+ lives_ok { $ars->create({}) }
+ 'empty insert works';
+ }
+
+# test blobs (stolen from 73oracle.t)
+ SKIP: {
+ eval { $dbh->do('DROP TABLE "bindtype_test2"') };
+ $dbh->do(q[
+ CREATE TABLE "bindtype_test2"
+ (
+ "id" INT PRIMARY KEY,
+ "bytea" INT,
+ "a_blob" BLOB,
+ "a_clob" BLOB SUB_TYPE TEXT
+ )
+ ]);
+
+ my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
+ $binstr{'large'} = $binstr{'small'} x 1024;
+
+ my $maxloblen = length $binstr{'large'};
+ local $dbh->{'LongReadLen'} = $maxloblen;
+
+ my $rs = $schema->resultset('BindType2');
+ my $id = 0;
+
+ foreach my $type (qw( a_blob a_clob )) {
+ foreach my $size (qw( small large )) {
+ $id++;
+
+# turn off horrendous binary DBIC_TRACE output
+ local $schema->storage->{debug} = 0;
+
+ lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
+ "inserted $size $type without dying";
+
+ ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
+ }
+ }
+ }
+}
+
+done_testing;
+
+# clean up our mess
+
+sub cleanup {
+ my $dbh;
+ eval {
+ $schema->storage->disconnect; # to avoid object FOO is in use errors
+ $dbh = $schema->storage->dbh;
+ };
+ return unless $dbh;
+
+ eval { $dbh->do('DROP TRIGGER "artist_bi"') };
+ diag $@ if $@;
+
+ eval { $dbh->do('DROP GENERATOR "gen_artist_artistid"') };
+ diag $@ if $@;
+
+ foreach my $table (qw/artist bindtype_test/) {
+ eval { $dbh->do(qq[DROP TABLE "$table"]) };
+ #diag $@ if $@;
+ }
+}
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use Scope::Guard ();
+
+# XXX we're only testing TIMESTAMP here
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_FIREBIRD_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
+
+if (not ($dsn || $dsn2)) {
+ plan skip_all => <<'EOF';
+Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN}
+_USER and _PASS to run this test'.
+Warning: This test drops and creates a table called 'event'";
+EOF
+} else {
+ eval "use DateTime; use DateTime::Format::Strptime;";
+ if ($@) {
+ plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
+ }
+}
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my $schema;
+
+foreach my $conn_idx (0..$#info) {
+ my ($dsn, $user, $pass) = @{ $info[$conn_idx] };
+
+ next unless $dsn;
+
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ on_connect_call => [ 'datetime_setup' ],
+ });
+
+ my $sg = Scope::Guard->new(\&cleanup);
+
+ eval { $schema->storage->dbh->do("DROP TABLE event") };
+ $schema->storage->dbh->do(<<"SQL");
+ CREATE TABLE event (
+ id INT PRIMARY KEY,
+ created_on TIMESTAMP
+ )
+SQL
+ my $now = DateTime->now;
+ $now->set_nanosecond(555600000);
+ my $row;
+ ok( $row = $schema->resultset('Event')->create({
+ id => 1,
+ created_on => $now,
+ }));
+ ok( $row = $schema->resultset('Event')
+ ->search({ id => 1 }, { select => ['created_on'] })
+ ->first
+ );
+ is $row->created_on, $now, 'DateTime roundtrip';
+
+ if ($conn_idx == 0) { # skip for ODBC
+ cmp_ok $row->created_on->nanosecond, '==', 555600000,
+ 'fractional part of a second survived';
+ }
+}
+
+done_testing;
+
+# clean up our mess
+sub cleanup {
+ my $dbh;
+ eval {
+ $schema->storage->disconnect; # to avoid object FOO is in use errors
+ $dbh = $schema->storage->dbh;
+ };
+ return unless $dbh;
+
+ eval { $dbh->do("DROP TABLE $_") } for qw/event/;
+}
Artist
SequenceTest
BindType
+ BindType2
Employee
CD
FileColumn
--- /dev/null
+package # hide from PAUSE
+ DBICTest::Schema::BindType2;
+
+use base qw/DBICTest::BaseResult/;
+
+__PACKAGE__->table('bindtype_test2');
+
+__PACKAGE__->add_columns(
+ 'id' => {
+ data_type => 'integer',
+ is_auto_increment => 1,
+ },
+ 'bytea' => {
+ data_type => 'bytea',
+ is_nullable => 1,
+ },
+ 'a_blob' => {
+ data_type => 'blob',
+ is_nullable => 1,
+ },
+ 'a_clob' => {
+ data_type => 'clob',
+ is_nullable => 1,
+ },
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Sat Jan 30 19:18:55 2010
+-- Created on Sat Feb 6 08:01:11 2010
--
;
);
--
+-- Table: bindtype_test2
+--
+CREATE TABLE bindtype_test2 (
+ id INTEGER PRIMARY KEY NOT NULL,
+ bytea blob,
+ a_blob blob,
+ a_clob clob
+);
+
+--
-- Table: collection
--
CREATE TABLE collection (