L</sequence> value as well.
Also set this for MSSQL columns with the 'uniqueidentifier'
-L<DBIx::Class::ResultSource/data_type> whose values you want to automatically
-generate using C<NEWID()>, unless they are a primary key in which case this will
-be done anyway.
+L<data_type|DBIx::Class::ResultSource/data_type> whose values you want to
+automatically generate using C<NEWID()>, unless they are a primary key in which
+case this will be done anyway.
=item extra
$self->store_column($auto_pri[$_] => $ids[$_]) for 0 .. $#ids;
}
-
$self->{_dirty_columns} = {};
$self->{related_resultsets} = {};
);
}
+# Firebird specific limit, reverse of _SkipFirst for Informix
+sub _FirstSkip {
+ my ($self, $sql, $order, $rows, $offset) = @_;
+
+ $sql =~ s/^ \s* SELECT \s+ //ix
+ or croak "Unrecognizable SELECT: $sql";
+
+ return sprintf ('SELECT %s%s%s%s',
+ sprintf ('FIRST %d ', $rows),
+ $offset
+ ? sprintf ('SKIP %d ', $offset)
+ : ''
+ ,
+ $sql,
+ $self->_order_by ($order),
+ );
+}
+
# Crappy Top based Limit/Offset support. Legacy from MSSQL.
sub _Top {
my ( $self, $sql, $order, $rows, $offset ) = @_;
$self->_dbh_rollback unless $self->_dbh_autocommit;
+ %{ $self->_dbh->{CachedKids} } = ();
$self->_dbh->disconnect;
$self->_dbh(undef);
$self->{_dbh_gen}++;
if ( $col_info->{auto_nextval} ) {
$updated_cols->{$col} = $to_insert->{$col} = $self->_sequence_fetch(
'nextval',
- $col_info->{sequence} ||
- $self->_dbh_get_autoinc_seq($self->_get_dbh, $source)
+ $col_info->{sequence} ||=
+ $self->_dbh_get_autoinc_seq($self->_get_dbh, $source, $col)
);
}
}
my @data = map { $_->[$data_index] } @$data;
- $sth->bind_param_array( $placeholder_index, [@data], $attributes );
+ $sth->bind_param_array(
+ $placeholder_index,
+ [@data],
+ (%$attributes ? $attributes : ()),
+ );
$placeholder_index++;
}
--- /dev/null
+package DBIx::Class::Storage::DBI::InterBase;
+
+# partly stolen from DBIx::Class::Storage::DBI::MSSQL
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+use List::Util();
+
+__PACKAGE__->mk_group_accessors(simple => qw/
+ _auto_incs
+/);
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::InterBase - Driver for the Firebird RDBMS
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for Firebird using C<RETURNING> as well as
+L<auto_nextval|DBIx::Class::ResultSource/auto_nextval> sets the limit dialect to
+C<FIRST X SKIP X> and provides L<DBIx::Class::InflateColumn::DateTime> support.
+
+You need to use either the
+L<disable_sth_caching|DBIx::Class::Storage::DBI/disable_sth_caching> option or
+L</connect_call_use_softcommit> (see L</CAVEATS>) for your code to function
+correctly with this driver. Otherwise you will likely get bizarre error messages
+such as C<no statement executing>.
+
+For ODBC support, see L<DBIx::Class::Storage::DBI::ODBC::Firebird>.
+
+To turn on L<DBIx::Class::InflateColumn::DateTime> support, see
+L</connect_call_datetime_setup>.
+
+=cut
+
+sub _prep_for_execute {
+ my $self = shift;
+ my ($op, $extra_bind, $ident, $args) = @_;
+
+ if ($op eq 'insert') {
+ $self->_auto_incs([]);
+
+ @pk{$ident->primary_columns} = ();
+
+ my @auto_inc_cols = grep {
+ my $inserting = $args->[0]{$_};
+
+ ($ident->column_info($_)->{is_auto_increment}
+ || exists $pk{$_})
+ && (
+ (not defined $inserting)
+ ||
+ (ref $inserting eq 'SCALAR' && $$inserting =~ /^null\z/i)
+ )
+ } $ident->columns;
+
+ if (@auto_inc_cols) {
+ $args->[1]{returning} = \@auto_inc_cols;
+
+ $self->_auto_incs->[0] = \@auto_inc_cols;
+ }
+ }
+
+ return $self->next::method(@_);
+}
+
+sub _execute {
+ my $self = shift;
+ my ($op) = @_;
+
+ my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
+
+ if ($op eq 'insert' && $self->_auto_incs) {
+ local $@;
+ my (@auto_incs) = eval {
+ local $SIG{__WARN__} = sub {};
+ $sth->fetchrow_array
+ };
+ $self->_auto_incs->[1] = \@auto_incs;
+ $sth->finish;
+ }
+
+ return wantarray ? ($rv, $sth, @bind) : $rv;
+}
+
+sub _sequence_fetch {
+ my ($self, $nextval, $sequence) = @_;
+
+ if ($nextval ne 'nextval') {
+ $self->throw_exception("Can only fetch 'nextval' for a sequence");
+ }
+
+ $self->throw_exception('No sequence to fetch') unless $sequence;
+
+ my ($val) = $self->_get_dbh->selectrow_array(
+'SELECT GEN_ID(' . $self->sql_maker->_quote($sequence) .
+', 1) FROM rdb$database');
+
+ return $val;
+}
+
+sub _dbh_get_autoinc_seq {
+ my ($self, $dbh, $source, $col) = @_;
+
+ my $table_name = $source->from;
+ $table_name = $$table_name if ref $table_name;
+ $table_name = $self->sql_maker->quote_char ? $table_name : uc($table_name);
+
+ local $dbh->{LongReadLen} = 100000;
+ local $dbh->{LongTruncOk} = 1;
+
+ my $sth = $dbh->prepare(<<'EOF');
+SELECT t.rdb$trigger_source
+FROM rdb$triggers t
+WHERE t.rdb$relation_name = ?
+AND t.rdb$system_flag = 0 -- user defined
+AND t.rdb$trigger_type = 1 -- BEFORE INSERT
+EOF
+ $sth->execute($table_name);
+
+ while (my ($trigger) = $sth->fetchrow_array) {
+ my @trig_cols = map {
+ /^"([^"]+)/ ? $1 : uc($1)
+ } $trigger =~ /new\.("?\w+"?)/ig;
+
+ my ($quoted, $generator) = $trigger =~
+/(?:gen_id\s* \( \s* |next \s* value \s* for \s*)(")?(\w+)/ix;
+
+ if ($generator) {
+ $generator = uc $generator unless $quoted;
+
+ return $generator
+ if List::Util::first {
+ $self->sql_maker->quote_char ? ($_ eq $col) : (uc($_) eq uc($col))
+ } @trig_cols;
+ }
+ }
+
+ return undef;
+}
+
+sub last_insert_id {
+ my ($self, $source, @cols) = @_;
+ my @result;
+
+ my %auto_incs;
+ @auto_incs{ @{ $self->_auto_incs->[0] } } =
+ @{ $self->_auto_incs->[1] };
+
+ push @result, $auto_incs{$_} for @cols;
+
+ return @result;
+}
+
+sub insert {
+ my $self = shift;
+
+ my $updated_cols = $self->next::method(@_);
+
+ if ($self->_auto_incs->[0]) {
+ my %auto_incs;
+ @auto_incs{ @{ $self->_auto_incs->[0] } } = @{ $self->_auto_incs->[1] };
+
+ $updated_cols = { %$updated_cols, %auto_incs };
+ }
+
+ return $updated_cols;
+}
+
+# this sub stolen from DB2
+
+sub _sql_maker_opts {
+ my ( $self, $opts ) = @_;
+
+ if ( $opts ) {
+ $self->{_sql_maker_opts} = { %$opts };
+ }
+
+ return { limit_dialect => 'FirstSkip', %{$self->{_sql_maker_opts}||{}} };
+}
+
+sub _svp_begin {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("SAVEPOINT $name");
+}
+
+sub _svp_release {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("RELEASE SAVEPOINT $name");
+}
+
+sub _svp_rollback {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("ROLLBACK TO SAVEPOINT $name")
+}
+
+sub _ping {
+ my $self = shift;
+
+ my $dbh = $self->_dbh or return 0;
+
+ local $dbh->{RaiseError} = 1;
+
+ eval {
+ $dbh->do('select 1 from rdb$database');
+ };
+
+ return $@ ? 0 : 1;
+}
+
+# We want dialect 3 for new features and quoting to work, DBD::InterBase uses
+# dialect 1 (interbase compat) by default.
+sub _init {
+ my $self = shift;
+ $self->_set_sql_dialect(3);
+}
+
+sub _set_sql_dialect {
+ my $self = shift;
+ my $val = shift || 3;
+
+ my $dsn = $self->_dbi_connect_info->[0];
+
+ return if ref($dsn) eq 'CODE';
+
+ if ($dsn !~ /ib_dialect=/) {
+ $self->_dbi_connect_info->[0] = "$dsn;ib_dialect=$val";
+ my $connected = defined $self->_dbh;
+ $self->disconnect;
+ $self->ensure_connected if $connected;
+ }
+}
+
+=head2 connect_call_use_softcommit
+
+Used as:
+
+ on_connect_call => 'use_softcommit'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set the
+L<DBD::InterBase> C<ib_softcommit> option.
+
+You need either this option or C<< disable_sth_caching => 1 >> for
+L<DBIx::Class> code to function correctly (otherwise you may get C<no statement
+executing> errors.)
+
+The downside of using this option is that your process will B<NOT> see UPDATEs,
+INSERTs and DELETEs from other processes for already open statements.
+
+=cut
+
+sub connect_call_use_softcommit {
+ my $self = shift;
+
+ $self->_dbh->{ib_softcommit} = 1;
+}
+
+=head2 connect_call_datetime_setup
+
+Used as:
+
+ on_connect_call => 'datetime_setup'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set the date and
+timestamp formats using:
+
+ $dbh->{ib_time_all} = 'ISO';
+
+See L<DBD::InterBase> for more details.
+
+The C<TIMESTAMP> data type supports up to 4 digits after the decimal point for
+second precision. The full precision is used.
+
+The C<DATE> data type stores the date portion only, and it B<MUST> be declared
+with:
+
+ data_type => 'date'
+
+in your Result class.
+
+Timestamp columns can be declared with either C<datetime> or C<timestamp>.
+
+You will need the L<DateTime::Format::Strptime> module for inflation to work.
+
+For L<DBIx::Class::Storage::DBI::ODBC::Firebird>, this is a noop and sub-second
+precision is not currently available.
+
+=cut
+
+sub connect_call_datetime_setup {
+ my $self = shift;
+
+ $self->_get_dbh->{ib_time_all} = 'ISO';
+}
+
+sub datetime_parser_type {
+ 'DBIx::Class::Storage::DBI::InterBase::DateTime::Format'
+}
+
+package # hide from PAUSE
+ DBIx::Class::Storage::DBI::InterBase::DateTime::Format;
+
+my $timestamp_format = '%Y-%m-%d %H:%M:%S.%4N'; # %F %T
+my $date_format = '%Y-%m-%d';
+
+my ($timestamp_parser, $date_parser);
+
+sub parse_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $timestamp_parser ||= DateTime::Format::Strptime->new(
+ pattern => $timestamp_format,
+ on_error => 'croak',
+ );
+ return $timestamp_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $timestamp_parser ||= DateTime::Format::Strptime->new(
+ pattern => $timestamp_format,
+ on_error => 'croak',
+ );
+ return $timestamp_parser->format_datetime(shift);
+}
+
+sub parse_date {
+ shift;
+ require DateTime::Format::Strptime;
+ $date_parser ||= DateTime::Format::Strptime->new(
+ pattern => $date_format,
+ on_error => 'croak',
+ );
+ return $date_parser->parse_datetime(shift);
+}
+
+sub format_date {
+ shift;
+ require DateTime::Format::Strptime;
+ $date_parser ||= DateTime::Format::Strptime->new(
+ pattern => $date_format,
+ on_error => 'croak',
+ );
+ return $date_parser->format_datetime(shift);
+}
+
+1;
+
+=head1 CAVEATS
+
+=over 4
+
+=item *
+
+with L</connect_call_use_softcommit>, you will not be able to see changes made
+to data in other processes. If this is an issue, use
+L<disable_sth_caching|DBIx::Class::Storage::DBI/disable_sth_caching> as a
+workaround for the C<no statement executing> errors, this of course adversely
+affects performance.
+
+=item *
+
+C<last_insert_id> support by default only works for Firebird versions 2 or
+greater, L<auto_nextval|DBIx::Class::ResultSource/auto_nextval> however should
+work with earlier versions.
+
+=item *
+
+Sub-second precision for TIMESTAMPs is not currently available with ODBC.
+
+=back
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::ODBC::Firebird;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI::InterBase/;
+use mro 'c3';
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::ODBC::Firebird - Driver for using the Firebird RDBMS
+through ODBC
+
+=head1 SYNOPSIS
+
+Most functionality is provided by L<DBIx::Class::Storage::DBI::Interbase>, see
+that module for details.
+
+To build the ODBC driver for Firebird on Linux for unixODBC, see:
+
+L<http://www.firebirdnews.org/?p=1324>
+
+=cut
+
+# XXX seemingly no equivalent to ib_time_all from DBD::InterBase via ODBC
+sub connect_call_datetime_setup { 1 }
+
+# we don't need DBD::InterBase-specific initialization
+sub _init { 1 }
+
+# ODBC uses dialect 3 by default, good
+sub _set_sql_dialect { 1 }
+
+# releasing savepoints doesn't work, but that shouldn't matter
+sub _svp_release { 1 }
+
+sub datetime_parser_type {
+ 'DBIx::Class::Storage::DBI::ODBC::Firebird::DateTime::Format'
+}
+
+package # hide from PAUSE
+ DBIx::Class::Storage::DBI::ODBC::Firebird::DateTime::Format;
+
+# inherit parse/format date
+our @ISA = 'DBIx::Class::Storage::DBI::InterBase::DateTime::Format';
+
+my $timestamp_format = '%Y-%m-%d %H:%M:%S'; # %F %T, no fractional part
+my $timestamp_parser;
+
+sub parse_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $timestamp_parser ||= DateTime::Format::Strptime->new(
+ pattern => $timestamp_format,
+ on_error => 'croak',
+ );
+ return $timestamp_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $timestamp_parser ||= DateTime::Format::Strptime->new(
+ pattern => $timestamp_format,
+ on_error => 'croak',
+ );
+ return $timestamp_parser->format_datetime(shift);
+}
+
+1;
+
+=head1 CAVEATS
+
+=over 4
+
+=item *
+
+This driver (unlike L<DBD::InterBase>) does not currently support reading or
+writing C<TIMESTAMP> values with sub-second precision.
+
+=back
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
$self->throw_exception($exception) if $exception;
- wantarray ? @res : $res[0]
+ $wantarray ? @res : $res[0]
}
=head2 get_autoinc_seq
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use Scope::Guard ();
+
+# tests stolen from 749sybase_asa.t
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_FIREBIRD_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
+
+plan skip_all => <<'EOF' unless $dsn || $dsn2;
+Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},
+_USER and _PASS to run these tests
+EOF
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my $schema;
+
+foreach my $conn_idx (0..$#info) {
+ my ($dsn, $user, $pass) = @{ $info[$conn_idx] || [] };
+
+ next unless $dsn;
+
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ auto_savepoint => 1,
+ quote_char => q["],
+ name_sep => q[.],
+ on_connect_call => 'use_softcommit',
+ });
+ my $dbh = $schema->storage->dbh;
+
+ my $sg = Scope::Guard->new(\&cleanup);
+
+ eval { $dbh->do(q[DROP TABLE "artist"]) };
+ $dbh->do(<<EOF);
+ CREATE TABLE "artist" (
+ "artistid" INT PRIMARY KEY,
+ "name" VARCHAR(255),
+ "charfield" CHAR(10),
+ "rank" INT DEFAULT 13
+ )
+EOF
+ eval { $dbh->do(q[DROP GENERATOR "gen_artist_artistid"]) };
+ $dbh->do('CREATE GENERATOR "gen_artist_artistid"');
+ eval { $dbh->do('DROP TRIGGER "artist_bi"') };
+ $dbh->do(<<EOF);
+ CREATE TRIGGER "artist_bi" FOR "artist"
+ ACTIVE BEFORE INSERT POSITION 0
+ AS
+ BEGIN
+ IF (NEW."artistid" IS NULL) THEN
+ NEW."artistid" = GEN_ID("gen_artist_artistid",1);
+ END
+EOF
+ eval { $dbh->do('DROP TABLE "sequence_test"') };
+ $dbh->do(<<EOF);
+ CREATE TABLE "sequence_test" (
+ "pkid1" INT NOT NULL,
+ "pkid2" INT NOT NULL,
+ "nonpkid" INT,
+ "name" VARCHAR(255)
+ )
+EOF
+ $dbh->do('ALTER TABLE "sequence_test" ADD CONSTRAINT "sequence_test_constraint" PRIMARY KEY ("pkid1", "pkid2")');
+ eval { $dbh->do('DROP GENERATOR "pkid1_seq"') };
+ eval { $dbh->do('DROP GENERATOR "pkid2_seq"') };
+ eval { $dbh->do('DROP GENERATOR "nonpkid_seq"') };
+ $dbh->do('CREATE GENERATOR "pkid1_seq"');
+ $dbh->do('CREATE GENERATOR "pkid2_seq"');
+ $dbh->do('SET GENERATOR "pkid2_seq" TO 9');
+ $dbh->do('CREATE GENERATOR "nonpkid_seq"');
+ $dbh->do('SET GENERATOR "nonpkid_seq" TO 19');
+
+ my $ars = $schema->resultset('Artist');
+ is ( $ars->count, 0, 'No rows at first' );
+
+# test primary key handling
+ my $new = $ars->create({ name => 'foo' });
+ ok($new->artistid, "Auto-PK worked");
+
+# test auto increment using generators WITHOUT triggers
+ for (1..5) {
+ my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
+ is($st->pkid1, $_, "Firebird Auto-PK without trigger: First primary key");
+ is($st->pkid2, $_ + 9, "Firebird Auto-PK without trigger: Second primary key");
+ is($st->nonpkid, $_ + 19, "Firebird Auto-PK without trigger: Non-primary key");
+ }
+ my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
+ is($st->pkid1, 55, "Firebird Auto-PK without trigger: First primary key set manually");
+
+# test savepoints
+ eval {
+ $schema->txn_do(sub {
+ eval {
+ $schema->txn_do(sub {
+ $ars->create({ name => 'in_savepoint' });
+ die "rolling back savepoint";
+ });
+ };
+ ok ((not $ars->search({ name => 'in_savepoint' })->first),
+ 'savepoint rolled back');
+ $ars->create({ name => 'in_outer_txn' });
+ die "rolling back outer txn";
+ });
+ };
+
+ like $@, qr/rolling back outer txn/,
+ 'correct exception for rollback';
+
+ ok ((not $ars->search({ name => 'in_outer_txn' })->first),
+ 'outer txn rolled back');
+
+# test explicit key spec
+ $new = $ars->create ({ name => 'bar', artistid => 66 });
+ is($new->artistid, 66, 'Explicit PK worked');
+ $new->discard_changes;
+ is($new->artistid, 66, 'Explicit PK assigned');
+
+# row update
+ lives_ok {
+ $new->update({ name => 'baz' })
+ } 'update survived';
+ $new->discard_changes;
+ is $new->name, 'baz', 'row updated';
+
+# test populate
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_$_" };
+ }
+ $ars->populate (\@pop);
+ });
+
+# test populate with explicit key
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
+ }
+ $ars->populate (\@pop);
+ });
+
+# count what we did so far
+ is ($ars->count, 6, 'Simple count works');
+
+# test ResultSet UPDATE
+ lives_and {
+ $ars->search({ name => 'foo' })->update({ rank => 4 });
+
+ is eval { $ars->search({ name => 'foo' })->first->rank }, 4;
+ } 'Can update a column';
+
+ my ($updated) = $schema->resultset('Artist')->search({name => 'foo'});
+ is eval { $updated->rank }, 4, 'and the update made it to the database';
+
+
+# test LIMIT support
+ my $lim = $ars->search( {},
+ {
+ rows => 3,
+ offset => 4,
+ order_by => 'artistid'
+ }
+ );
+ is( $lim->count, 2, 'ROWS+OFFSET count ok' );
+ is( $lim->all, 2, 'Number of ->all objects matches count' );
+
+# test iterator
+ $lim->reset;
+ is( eval { $lim->next->artistid }, 101, "iterator->next ok" );
+ is( eval { $lim->next->artistid }, 102, "iterator->next ok" );
+ is( $lim->next, undef, "next past end of resultset ok" );
+
+# test multiple executing cursors
+ {
+ my $rs1 = $ars->search({}, { order_by => { -asc => 'artistid' }});
+ my $rs2 = $ars->search({}, { order_by => { -desc => 'artistid' }});
+
+ is $rs1->next->artistid, 1, 'multiple cursors';
+ is $rs2->next->artistid, 102, 'multiple cursors';
+ }
+
+# test empty insert
+ {
+ local $ars->result_source->column_info('artistid')->{is_auto_increment} = 0;
+
+ lives_ok { $ars->create({}) }
+ 'empty insert works';
+ }
+
+# test blobs (stolen from 73oracle.t)
+ eval { $dbh->do('DROP TABLE "bindtype_test"') };
+ $dbh->do(q[
+ CREATE TABLE "bindtype_test"
+ (
+ "id" INT PRIMARY KEY,
+ "bytea" INT,
+ "blob" BLOB,
+ "clob" BLOB SUB_TYPE TEXT
+ )
+ ]);
+
+ my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
+ $binstr{'large'} = $binstr{'small'} x 1024;
+
+ my $maxloblen = length $binstr{'large'};
+ local $dbh->{'LongReadLen'} = $maxloblen;
+
+ my $rs = $schema->resultset('BindType');
+ my $id = 0;
+
+ foreach my $type (qw( blob clob )) {
+ foreach my $size (qw( small large )) {
+ $id++;
+
+# turn off horrendous binary DBIC_TRACE output
+ local $schema->storage->{debug} = 0;
+
+ lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
+ "inserted $size $type without dying";
+
+ ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
+ }
+ }
+}
+
+done_testing;
+
+# clean up our mess
+
+sub cleanup {
+ my $dbh;
+ eval {
+ $schema->storage->disconnect; # to avoid object FOO is in use errors
+ $dbh = $schema->storage->dbh;
+ };
+ return unless $dbh;
+
+ eval { $dbh->do('DROP TRIGGER "artist_bi"') };
+ diag $@ if $@;
+
+ foreach my $generator (qw/gen_artist_artistid pkid1_seq pkid2_seq
+ nonpkid_seq/) {
+ eval { $dbh->do(qq{DROP GENERATOR "$generator"}) };
+ diag $@ if $@;
+ }
+
+ foreach my $table (qw/artist bindtype_test sequence_test/) {
+ eval { $dbh->do(qq[DROP TABLE "$table"]) };
+ diag $@ if $@;
+ }
+}
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use Scope::Guard ();
+
+# XXX we're only testing TIMESTAMP here
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_FIREBIRD_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
+
+if (not ($dsn || $dsn2)) {
+ plan skip_all => <<'EOF';
+Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN}
+_USER and _PASS to run this test'.
+Warning: This test drops and creates a table called 'event'";
+EOF
+} else {
+ eval "use DateTime; use DateTime::Format::Strptime;";
+ if ($@) {
+ plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
+ }
+}
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my $schema;
+
+foreach my $conn_idx (0..$#info) {
+ my ($dsn, $user, $pass) = @{ $info[$conn_idx] || [] };
+
+ next unless $dsn;
+
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ quote_char => '"',
+ name_sep => '.',
+ on_connect_call => [ 'datetime_setup' ],
+ });
+
+ my $sg = Scope::Guard->new(\&cleanup);
+
+ eval { $schema->storage->dbh->do('DROP TABLE "event"') };
+ $schema->storage->dbh->do(<<'SQL');
+ CREATE TABLE "event" (
+ "id" INT PRIMARY KEY,
+ "starts_at" DATE,
+ "created_on" TIMESTAMP
+ )
+SQL
+ my $rs = $schema->resultset('Event');
+
+ my $dt = DateTime->now;
+ $dt->set_nanosecond($dsn =~ /odbc/i ? 0 : 555600000);
+
+ my $date_only = DateTime->new(
+ year => $dt->year, month => $dt->month, day => $dt->day
+ );
+
+ my $row;
+ ok( $row = $rs->create({
+ id => 1,
+ starts_at => $date_only,
+ created_on => $dt,
+ }));
+ ok( $row = $rs->search({ id => 1 }, { select => [qw/starts_at created_on/] })
+ ->first
+ );
+ is $row->created_on, $dt, 'TIMESTAMP as DateTime roundtrip';
+
+ cmp_ok $row->created_on->nanosecond, '==', $dt->nanosecond,
+ 'fractional part of a second survived' if 0+$dt->nanosecond;
+
+ is $row->starts_at, $date_only, 'DATE as DateTime roundtrip';
+}
+
+done_testing;
+
+# clean up our mess
+sub cleanup {
+ my $dbh;
+ eval {
+ $schema->storage->disconnect; # to avoid object FOO is in use errors
+ $dbh = $schema->storage->dbh;
+ };
+ return unless $dbh;
+
+ eval { $dbh->do(qq{DROP TABLE "$_"}) } for qw/event/;
+}
__PACKAGE__->add_columns(
id => { data_type => 'integer', is_auto_increment => 1 },
- starts_at => { data_type => 'datetime' },
+
+# this MUST be 'date' for the Firebird tests
+ starts_at => { data_type => 'date' },
+
created_on => { data_type => 'timestamp' },
varchar_date => { data_type => 'varchar', inflate_date => 1, size => 20, is_nullable => 1 },
varchar_datetime => { data_type => 'varchar', inflate_datetime => 1, size => 20, is_nullable => 1 },
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Sat Mar 6 13:01:48 2010
+-- Created on Sat Mar 6 18:04:27 2010
--
;
--
CREATE TABLE event (
id INTEGER PRIMARY KEY NOT NULL,
- starts_at datetime NOT NULL,
+ starts_at date NOT NULL,
created_on timestamp NOT NULL,
varchar_date varchar(20),
varchar_datetime varchar(20),