coderef, allowing better intergration with Catalyst
- Fixed a complex prefetch + regular join regression introduced
in 0.08108
+ - Fixed insert_bulk rebless handling
+ - Fixed Storable roundtrip regression, and general serialization
+ cleanup
- SQLT related fixes:
- sqlt_type is now called on the correct storage object
- hooks can now see the correct producer_type
+ - optional SQLT requirements for e.g. deploy() bumped to 0.11002
+ - Automatically detect MySQL v3 and use INNER JOIN instead of JOIN
- POD improvements
0.08109 2009-08-18 08:35:00 (UTC)
all_from 'lib/DBIx/Class.pm';
-test_requires 'Test::Builder' => 0.33;
-test_requires 'Test::Deep' => 0;
-test_requires 'Test::Exception' => 0;
-test_requires 'Test::More' => 0.92;
-test_requires 'Test::Warn' => 0.21;
+test_requires 'Test::Builder' => '0.33';
+test_requires 'Test::Deep' => '0';
+test_requires 'Test::Exception' => '0';
+test_requires 'Test::More' => '0.92';
+test_requires 'Test::Warn' => '0.21';
-test_requires 'File::Temp' => 0.22;
+test_requires 'File::Temp' => '0.22';
# Core
-requires 'List::Util' => 0;
-requires 'Scalar::Util' => 0;
-requires 'Storable' => 0;
+requires 'List::Util' => '0';
+requires 'Scalar::Util' => '0';
+requires 'Storable' => '0';
# Perl 5.8.0 doesn't have utf8::is_utf8()
-requires 'Encode' => 0 if ($] <= 5.008000);
+requires 'Encode' => '0' if ($] <= 5.008000);
# Dependencies (keep in alphabetical order)
-requires 'Carp::Clan' => 6.0;
-requires 'Class::Accessor::Grouped' => 0.09000;
-requires 'Class::C3::Componentised' => 1.0005;
-requires 'Class::Inspector' => 1.24;
-requires 'Data::Page' => 2.00;
-requires 'DBD::SQLite' => 1.25;
-requires 'DBI' => 1.605;
-requires 'JSON::Any' => 1.18;
-requires 'MRO::Compat' => 0.09;
-requires 'Module::Find' => 0.06;
-requires 'Path::Class' => 0.16;
-requires 'Scope::Guard' => 0.03;
-requires 'SQL::Abstract' => 1.56;
-requires 'SQL::Abstract::Limit' => 0.13;
-requires 'Sub::Name' => 0.04;
-
-recommends 'SQL::Translator' => 0.09004;
+requires 'Carp::Clan' => '6.0';
+requires 'Class::Accessor::Grouped' => '0.09000';
+requires 'Class::C3::Componentised' => '1.0005';
+requires 'Class::Inspector' => '1.24';
+requires 'Data::Page' => '2.00';
+requires 'DBD::SQLite' => '1.25';
+requires 'DBI' => '1.605';
+requires 'JSON::Any' => '1.18';
+requires 'MRO::Compat' => '0.09';
+requires 'Module::Find' => '0.06';
+requires 'Path::Class' => '0.16';
+requires 'Scope::Guard' => '0.03';
+requires 'SQL::Abstract' => '1.56';
+requires 'SQL::Abstract::Limit' => '0.13';
+requires 'Sub::Name' => '0.04';
my %replication_requires = (
- 'Moose', => 0.87,
- 'MooseX::AttributeHelpers' => 0.21,
- 'MooseX::Types', => 0.16,
- 'namespace::clean' => 0.11,
- 'Hash::Merge', => 0.11,
+ 'Moose', => '0.87',
+ 'MooseX::AttributeHelpers' => '0.21',
+ 'MooseX::Types', => '0.16',
+ 'namespace::clean' => '0.11',
+ 'Hash::Merge', => '0.11',
);
+# when changing also adjust $DBIx::Class::minimum_sqlt_version
+my $sqlt_recommends = '0.11002';
+
+recommends 'SQL::Translator' => $sqlt_recommends;
+
my %force_requires_if_author = (
%replication_requires,
-# 'Module::Install::Pod::Inherit' => 0.01,
- 'Test::Pod::Coverage' => 1.04,
- 'SQL::Translator' => 0.09007,
+# 'Module::Install::Pod::Inherit' => '0.01',
+ 'Test::Pod::Coverage' => '1.04',
+ 'SQL::Translator' => $sqlt_recommends,
# CDBI-compat related
- 'DBIx::ContextualFetch' => 0,
- 'Class::DBI::Plugin::DeepAbstractSearch' => 0,
- 'Class::Trigger' => 0,
- 'Time::Piece::MySQL' => 0,
- 'Clone' => 0,
- 'Date::Simple' => 3.03,
+ 'DBIx::ContextualFetch' => '0',
+ 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
+ 'Class::Trigger' => '0',
+ 'Time::Piece::MySQL' => '0',
+ 'Clone' => '0',
+ 'Date::Simple' => '3.03',
# t/52cycle.t
- 'Test::Memory::Cycle' => 0,
- 'Devel::Cycle' => 1.10,
+ 'Test::Memory::Cycle' => '0',
+ 'Devel::Cycle' => '1.10',
# t/36datetime.t
# t/60core.t
- 'DateTime::Format::SQLite' => 0,
+ 'DateTime::Format::SQLite' => '0',
# t/96_is_deteministic_value.t
- 'DateTime::Format::Strptime'=> 0,
+ 'DateTime::Format::Strptime'=> '0',
# database-dependent reqs
#
$ENV{DBICTEST_PG_DSN}
? (
- 'Sys::SigAction' => 0,
- 'DBD::Pg' => 2.009002,
- 'DateTime::Format::Pg' => 0,
+ 'Sys::SigAction' => '0',
+ 'DBD::Pg' => '2.009002',
+ 'DateTime::Format::Pg' => '0',
) : ()
,
$ENV{DBICTEST_MYSQL_DSN}
? (
- 'DateTime::Format::MySQL' => 0,
+ 'DateTime::Format::MySQL' => '0',
) : ()
,
$ENV{DBICTEST_ORACLE_DSN}
? (
- 'DateTime::Format::Oracle' => 0,
+ 'DateTime::Format::Oracle' => '0',
) : ()
,
$VERSION = eval $VERSION; # numify for warning-free dev releases
+# what version of sqlt do we require if deploy() without a ddl_dir is invoked
+# when changing also adjust $sqlt_recommends in Makefile.PL
+my $minimum_sqlt_version = '0.11002';
+
sub MODIFY_CODE_ATTRIBUTES {
my ($class,$code,@attrs) = @_;
$class->mk_classdata('__attr_cache' => {})
return $@ ? $cache : { %$cache, %$rest };
}
+# SQLT version handling
+{
+ my $_sqlt_version_ok; # private
+ my $_sqlt_version_error; # private
+
+ sub _sqlt_version_ok {
+ if (!defined $_sqlt_version_ok) {
+ eval "use SQL::Translator $minimum_sqlt_version";
+ if ($@) {
+ $_sqlt_version_ok = 0;
+ $_sqlt_version_error = $@;
+ }
+ else {
+ $_sqlt_version_ok = 1;
+ }
+ }
+ return $_sqlt_version_ok;
+ }
+
+ sub _sqlt_version_error {
+ shift->_sqlt_version_ok unless defined $_sqlt_version_ok;
+ return $_sqlt_version_error;
+ }
+
+ sub _sqlt_minimum_version { $minimum_sqlt_version };
+}
+
+
1;
=head1 NAME
L<DBIx::Class::Manual::DocMap> lists each task you might want help on, and
the modules where you will find documentation.
+=head1 COPYRIGHT
+
+Copyright (c) 2005 - 2009 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
+as listed below.
+
=head1 AUTHOR
mst: Matt S. Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
-You may distribute this code under the same terms as Perl itself.
+This library is free software and may be distributed under the same terms
+as perl itself.
=cut
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/
+ Serialize::Storable
Relationship
InflateColumn
PK::Auto
change, they may not work, etc. So, use them if you want, but
be warned.
-L<DBIx::Class::Serialize> - Hooks for Storable freeze/thaw.
-
-L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
-
L<DBIx::Class::Validation> - Validate all data before submitting to your database.
=head2 Core
L<DBIx::Class::ResultSourceProxy::Table> - Provides a classdata table object and method proxies.
+L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
+
L<DBIx::Class::Row> - Basic row methods.
=head1 SEE ALSO
=head1 AUTHOR
Aran Clary Deltac <bluefeet@cpan.org>
-
Alternatively, you can send the conversion sql scripts to your
customers as above.
-=head2 Setting quoting for the generated SQL.
+=head2 Setting quoting for the generated SQL
If the database contains column names with spaces and/or reserved words, they
need to be quoted in the SQL queries. This is done using:
=item L<DBIx::Class::Core> - Set of standard components to load.
-=item L<DBIx::Class::Serialize::Storable> - ?
-
=item L<DBIx::Class::InflateColumn> - Making objects out of your columns.
=item L<DBIx::Class::InflateColumn::DateTime> - Magically turn your datetime or timestamp columns into DateTime objects.
Install DBIx::Class via CPAN should be sufficient.
-=head3 Create the database/tables.
+=head3 Create the database/tables
First make and change the directory:
1;
-=head3 Write a script to insert some records.
+=head3 Write a script to insert some records
insertdb.pl
->search({'created_time' => { '>=', '2006-06-01 00:00:00' } })
-Note that to use a function here you need to make the whole value into
-a scalar reference:
+Note that to use a function here you need to make it a scalar
+reference:
- ->search({'created_time' => \'>= yesterday()' })
+ ->search({'created_time' => { '>=', \'yesterday()' } })
=item .. search in several tables simultaneously?
query, which can be accessed similarly to a table, see your database
documentation for details.
-=item .. search using greater-than or less-than and database functions?
-
-To use functions or literal SQL with conditions other than equality
-you need to supply the entire condition, for example:
-
- my $interval = "< now() - interval '12 hours'";
- ->search({last_attempt => \$interval})
-
-and not:
-
- my $interval = "now() - interval '12 hours'";
- ->search({last_attempt => { '<' => \$interval } })
-
=item .. search with an SQL function on the left hand side?
To use an SQL function on the left hand side of a comparison:
if ( $attrs->{join} || $attrs->{prefetch} ) {
- $self->throw_exception ('join/prefetch can not be used with a literal scalarref {from}')
+ $self->throw_exception ('join/prefetch can not be used with a custom {from}')
if ref $attrs->{from} ne 'ARRAY';
my $join = delete $attrs->{join} || {};
sub _calculate_score {
my ($self, $a, $b) = @_;
+ if (defined $a xor defined $b) {
+ return 0;
+ }
+ elsif (not defined $a) {
+ return 1;
+ }
+
if (ref $b eq 'HASH') {
my ($b_key) = keys %{$b};
if (ref $a eq 'HASH') {
use DBIx::Class::ResultSet;
use DBIx::Class::ResultSourceHandle;
use Carp::Clan qw/^DBIx::Class/;
-use Storable;
use base qw/DBIx::Class/;
my $type;
if ($force_left) {
$type = 'left';
- } else {
- $type = $rel_info->{attrs}{join_type} || '';
- $force_left = 1 if lc($type) eq 'left';
+ }
+ else {
+ $type = $rel_info->{attrs}{join_type};
+ $force_left = 1 if lc($type||'') eq 'left';
}
my $rel_src = $self->related_source($join);
my $to_serialize = { %$self };
- my $class = $self->schema->class($self->source_moniker);
- $to_serialize->{schema} = $class;
+ delete $to_serialize->{schema};
+ $to_serialize->{_frozen_from_class} = $self->schema->class($self->source_moniker);
+
return (Storable::freeze($to_serialize));
}
sub STORABLE_thaw {
- my ($self, $cloning,$ice) = @_;
+ my ($self, $cloning, $ice) = @_;
%$self = %{ Storable::thaw($ice) };
- my $class = delete $self->{schema};
+ my $class = delete $self->{_frozen_from_class};
if( $thaw_schema ) {
$self->{schema} = $thaw_schema;
}
foreach my $j (@join) {
my ($to, $on) = @$j;
+
# check whether a join type exists
- my $join_clause = '';
my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to;
- if (ref($to_jt) eq 'HASH' and exists($to_jt->{-join_type})) {
- $join_clause = ' '.uc($to_jt->{-join_type}).' JOIN ';
- } else {
- $join_clause = ' JOIN ';
+ my $join_type;
+ if (ref($to_jt) eq 'HASH' and defined($to_jt->{-join_type})) {
+ $join_type = $to_jt->{-join_type};
+ $join_type =~ s/^\s+ | \s+$//xg;
}
- push(@sqlf, $join_clause);
+
+ $join_type = $self->{_default_jointype} if not defined $join_type;
+
+ my $join_clause = sprintf ('%s JOIN ',
+ $join_type ? ' ' . uc($join_type) : ''
+ );
+ push @sqlf, $join_clause;
if (ref $to eq 'ARRAY') {
push(@sqlf, '(', $self->_recurse_from(@$to), ')');
return;
}
- eval 'require SQL::Translator "0.09003"';
- if ($@) {
- $self->throw_exception("SQL::Translator 0.09003 required");
- }
+ $self->throw_exception($self->_sqlt_version_error)
+ if (not $self->_sqlt_version_ok);
my $db_tr = SQL::Translator->new({
add_drop_table => 1,
my ($self, $cloning) = @_;
my $to_serialize = { %$self };
+ # The source is either derived from _source_handle or is
+ # reattached in the thaw handler below
delete $to_serialize->{result_source};
- delete $to_serialize->{related_resultsets};
- delete $to_serialize->{_inflated_column};
+
+ # If the parser is cached there is a chance that the interpeter
+ # which receives the ice will not have the parser loaded
+ # A re-determination will force an implicit load
+ delete $to_serialize->{__datetime_parser};
+
+ # Dynamic values, easy to recalculate
+ delete $to_serialize->{$_} for qw/related_resultsets _inflated_column/;
return (Storable::freeze($to_serialize));
}
my ($self, $cloning, $serialized) = @_;
%$self = %{ Storable::thaw($serialized) };
+
+ # if the handle went missing somehow, reattach
$self->result_source($self->result_source_instance)
- if $self->can('result_source_instance');
+ if !$self->_source_handle && $self->can('result_source_instance');
}
1;
my $schema = MySchema->connect('dbi:SQLite:my.db');
$schema->storage->debug(1);
- $schema->dbh_do("DROP TABLE authors");
+
+ my @stuff = $schema->storage->dbh_do(
+ sub {
+ my ($storage, $dbh, @args) = @_;
+ $dbh->do("DROP TABLE authors");
+ },
+ @column_list
+ );
$schema->resultset('Book')->search({
written_on => $schema->storage->datetime_parser(DateTime->now)
my $self = shift;
my $code = shift;
- my $dbh = $self->_dbh;
+ my $dbh = $self->_get_dbh;
return $self->$code($dbh, @_) if $self->{_in_dbh_do}
|| $self->{transaction_depth};
my $want_array = wantarray;
eval {
- $self->_verify_pid if $dbh;
- if(!$self->_dbh) {
- $self->_populate_dbh;
- $dbh = $self->_dbh;
- }
if($want_array) {
@result = $self->$code($dbh, @_);
my $tried = 0;
while(1) {
eval {
- $self->_verify_pid if $self->_dbh;
- $self->_populate_dbh if !$self->_dbh;
+ $self->_get_dbh;
$self->txn_begin;
if($want_array) {
# this is the internal "get dbh or connect (don't check)" method
sub _get_dbh {
my $self = shift;
+ $self->_verify_pid if $self->_dbh;
$self->_populate_dbh unless $self->_dbh;
return $self->_dbh;
}
if ($self->_dbh) { # we are connected
$driver = $self->_dbh->{Driver}{Name};
} else {
- # try to use dsn to not require being connected, the driver may still
- # force a connection in _rebless to determine version
- ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i;
- $started_unconnected = 1;
+ # if connect_info is a CODEREF, we have no choice but to connect
+ if (ref $self->_dbi_connect_info->[0] &&
+ Scalar::Util::reftype($self->_dbi_connect_info->[0]) eq 'CODE') {
+ $self->_populate_dbh;
+ $driver = $self->_dbh->{Driver}{Name};
+ }
+ else {
+ # try to use dsn to not require being connected, the driver may still
+ # force a connection in _rebless to determine version
+ ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i;
+ $started_unconnected = 1;
+ }
}
my $storage_class = "DBIx::Class::Storage::DBI::${driver}";
my @bind = map { [ undef, $_ ] } @do_args;
$self->_query_start($sql, @bind);
- $self->_dbh->do($sql, $attrs, @do_args);
+ $self->_get_dbh->do($sql, $attrs, @do_args);
$self->_query_end($sql, @bind);
}
## only prepped once.
sub insert_bulk {
my ($self, $source, $cols, $data) = @_;
+
+# redispatch to insert_bulk method of storage we reblessed into, if necessary
+ if (not $self->_driver_determined) {
+ $self->_determine_driver;
+ goto $self->can('insert_bulk');
+ }
+
my %colvalues;
my $table = $source->from;
@colvalues{@$cols} = (0..$#$cols);
my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues);
- $self->_determine_driver;
-
$self->_query_start( $sql, @bind );
my $sth = $self->sth($sql);
%{$sqltargs || {}}
};
- $self->throw_exception(q{Can't create a ddl file without SQL::Translator 0.09003: '}
- . $self->_check_sqlt_message . q{'})
- if !$self->_check_sqlt_version;
+ $self->throw_exception("Can't create a ddl file without SQL::Translator: " . $self->_sqlt_version_error)
+ if !$self->_sqlt_version_ok;
my $sqlt = SQL::Translator->new( $sqltargs );
return join('', @rows);
}
- $self->throw_exception(q{Can't deploy without SQL::Translator 0.09003: '}
- . $self->_check_sqlt_message . q{'})
- if !$self->_check_sqlt_version;
+ $self->throw_exception("Can't deploy without either SQL::Translator or a ddl_dir: " . $self->_sqlt_version_error )
+ if !$self->_sqlt_version_ok;
# sources needs to be a parser arg, but for simplicty allow at top level
# coming in
sub build_datetime_parser {
my $self = shift;
my $type = $self->datetime_parser_type(@_);
- eval "use ${type}";
- $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ $self->ensure_class_loaded ($type);
return $type;
}
-{
- my $_check_sqlt_version; # private
- my $_check_sqlt_message; # private
- sub _check_sqlt_version {
- return $_check_sqlt_version if defined $_check_sqlt_version;
- eval 'use SQL::Translator "0.09003"';
- $_check_sqlt_message = $@ || '';
- $_check_sqlt_version = !$@;
- }
-
- sub _check_sqlt_message {
- _check_sqlt_version if !defined $_check_sqlt_message;
- $_check_sqlt_message;
- }
-}
=head2 is_replicating
=head1 IMPLEMENTATION NOTES
+=head2 IDENTITY information
+
Microsoft SQL Server supports three methods of retrieving the IDENTITY
value for inserted row: IDENT_CURRENT, @@IDENTITY, and SCOPE_IDENTITY().
SCOPE_IDENTITY is used here because it is the safest. However, it must
inserts into another table with an identity will give erroneous results on
recent versions of SQL Server.
+=head2 bulk_insert
+
+Be aware that we have tried to make things as simple as possible for our users.
+For MSSQL that means that when a user tries to do a populate/bulk_insert which
+includes an autoincrementing column, we will try to tell the database to allow
+the insertion of the autoinc column. But the user must have the db_ddladmin
+role membership, otherwise you will get a fairly opaque error message.
+
=head1 AUTHOR
See L<DBIx::Class/CONTRIBUTORS>.
# delete them
splice @$r, $i+1, ($#{$r} - $i), ();
+# make sure master/replicants opts don't clash
+ my %master_opts = %{ $self->_master_connect_info_opts };
+ if (exists $opts{dbh_maker}) {
+ delete @master_opts{qw/dsn user password/};
+ }
+ delete $master_opts{dbh_maker};
+
# merge with master
- %opts = %{ merge(\%opts, $self->_master_connect_info_opts) };
+ %opts = %{ merge(\%opts, \%master_opts) };
# update
$r->[$i] = \%opts;
use DBIx::Class::Storage::DBI::Replicated::Replicant;
use List::Util 'sum';
use Scalar::Util 'reftype';
+use DBI ();
use Carp::Clan qw/^DBIx::Class/;
use MooseX::Types::Moose qw/Num Int ClassName HashRef/;
},
);
+has next_unknown_replicant_id => (
+ is => 'rw',
+ metaclass => 'Counter',
+ isa => Int,
+ default => 1,
+ provides => {
+ inc => 'inc_unknown_replicant_id'
+ },
+);
+
=head1 METHODS
This class defines the following methods.
$connect_info = [ $connect_info ]
if reftype $connect_info ne 'ARRAY';
- croak "coderef replicant connect_info not supported"
- if ref $connect_info->[0] && reftype $connect_info->[0] eq 'CODE';
-
- my $replicant = $self->connect_replicant($schema, $connect_info);
+ my $connect_coderef =
+ (reftype($connect_info->[0])||'') eq 'CODE' ? $connect_info->[0]
+ : (reftype($connect_info->[0])||'') eq 'HASH' &&
+ $connect_info->[0]->{dbh_maker};
+
+ my $dsn;
+ my $replicant = do {
+# yes this is evil, but it only usually happens once (for coderefs)
+# this will fail if the coderef does not actually DBI::connect
+ no warnings 'redefine';
+ my $connect = \&DBI::connect;
+ local *DBI::connect = sub {
+ $dsn = $_[1];
+ goto $connect;
+ };
+ $self->connect_replicant($schema, $connect_info);
+ };
+
+ my $key;
+
+ if (!$dsn) {
+ if (!$connect_coderef) {
+ $dsn = $connect_info->[0];
+ $dsn = $dsn->{dsn} if (reftype($dsn)||'') eq 'HASH';
+ }
+ else {
+ # all attempts to get the DSN failed
+ $key = "UNKNOWN_" . $self->next_unknown_replicant_id;
+ $self->inc_unknown_replicant_id;
+ }
+ }
+ if ($dsn) {
+ $replicant->dsn($dsn);
+ ($key) = ($dsn =~ m/^dbi\:.+\:(.+)$/i);
+ }
- my $key = $connect_info->[0];
- $key = $key->{dsn} if ref $key && reftype $key eq 'HASH';
- ($key) = ($key =~ m/^dbi\:.+\:(.+)$/);
+ $replicant->id($key);
+ $self->set_replicant($key => $replicant);
- $self->set_replicant( $key => $replicant);
push @newly_created, $replicant;
}
use Moose::Role;
requires qw/_query_start/;
with 'DBIx::Class::Storage::DBI::Replicated::WithDSN';
-use MooseX::Types::Moose 'Bool';
+use MooseX::Types::Moose qw/Bool Str/;
use namespace::clean -except => 'meta';
default=>1,
);
+has dsn => (is => 'rw', isa => Str);
+has id => (is => 'rw', isa => Str);
+
=head1 METHODS
This class defines the following methods.
package DBIx::Class::Storage::DBI::Replicated::WithDSN;
use Moose::Role;
+use Scalar::Util 'reftype';
requires qw/_query_start/;
use namespace::clean -except => 'meta';
around '_query_start' => sub {
my ($method, $self, $sql, @bind) = @_;
- my $dsn = $self->_dbi_connect_info->[0];
+
+ my $dsn = eval { $self->dsn } || $self->_dbi_connect_info->[0];
+
my($op, $rest) = (($sql=~m/^(\w+)(.+)$/),'NOP', 'NO SQL');
my $storage_type = $self->can('active') ? 'REPLICANT' : 'MASTER';
- $self->$method("$op [DSN_$storage_type=$dsn]$rest", @bind);
+ my $query = do {
+ if ((reftype($dsn)||'') ne 'CODE') {
+ "$op [DSN_$storage_type=$dsn]$rest";
+ }
+ elsif (my $id = eval { $self->id }) {
+ "$op [$storage_type=$id]$rest";
+ }
+ else {
+ "$op [$storage_type]$rest";
+ }
+ };
+
+ $self->$method($query, @bind);
};
=head1 ALSO SEE
$dbh->{mysql_insertid};
}
+# we need to figure out what mysql version we're running
+sub sql_maker {
+ my $self = shift;
+
+ unless ($self->_sql_maker) {
+ my $maker = $self->next::method (@_);
+
+ # mysql 3 does not understand a bare JOIN
+ my $mysql_ver = $self->_get_dbh->get_info(18);
+ $maker->{_default_jointype} = 'INNER' if $mysql_ver =~ /^3/;
+ }
+
+ return $self->_sql_maker;
+}
+
sub sqlt_type {
return 'MySQL';
}
'DBIx::Class::ResultSetProxy' => { skip => 1 },
'DBIx::Class::ResultSourceProxy' => { skip => 1 },
'DBIx::Class::Storage::Statistics' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::Replicated::Types' => { skip => 1 },
# test some specific components whose parents are exempt below
'DBIx::Class::Storage::DBI::Replicated*' => {},
use lib qw(t/lib);
use DBICTest;
use DBI::Const::GetInfoType;
+use DBIC::SqlMakerTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
-plan tests => 19;
-
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
my $dbh = $schema->storage->dbh;
#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', '');
+# make sure sqlt_type overrides work (::Storage::DBI::mysql does this)
+{
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+ ok (!$schema->storage->_dbh, 'definitely not connected');
+ is ($schema->storage->sqlt_type, 'MySQL', 'sqlt_type correct pre-connection');
+}
+
# This is in Core now, but it's here just to test that it doesn't break
$schema->class('Artist')->load_components('PK::Auto');
my $type_info = $schema->storage->columns_info_for('artist');
is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
+
+
}
my $cd = $schema->resultset ('CD')->create ({});
my $producer = $schema->resultset ('Producer')->create ({});
lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die';
+{
+ my $artist = $schema->resultset('Artist')->next;
+ my $cd = $schema->resultset('CD')->next;
+ $cd->set_from_related ('artist', $artist);
+ $cd->update;
+
+ my $rs = $schema->resultset('CD')->search ({}, { prefetch => 'artist' });
+
+ lives_ok sub {
+ my $cd = $rs->next;
+ is ($cd->artist->name, $artist->name, 'Prefetched artist');
+ }, 'join does not throw (mysql 3 test)';
+
+ # induce a jointype override, make sure it works even if we don't have mysql3
+ local $schema->storage->sql_maker->{_default_jointype} = 'inner';
+ is_same_sql_bind (
+ $rs->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield
+ FROM cd me
+ INNER JOIN artist artist ON artist.artistid = me.artist
+ )',
+ [],
+ 'overriden default join type works',
+ );
+}
## Can we properly deal with the null search problem?
##
is $artist => undef
=> 'Nothing Found!';
}
+
+done_testing;
$schema->source("SequenceTest")->name("testschema.sequence_test");
{
local $SIG{__WARN__} = sub {};
- _cleanup ($dbh);
+ _cleanup ($schema);
my $artist_table_def = <<EOS;
(
is($st->pkid1, 55, "Oracle Auto-PK without trigger: First primary key set manually");
sub _cleanup {
- my $dbh = shift or return;
+ my $schema = shift or return;
+ local $SIG{__WARN__} = sub {};
for my $stat (
- 'DROP TABLE testschema.artist',
- 'DROP TABLE testschema.casecheck',
- 'DROP TABLE testschema.sequence_test',
- 'DROP TABLE testschema.array_test',
+ 'DROP SCHEMA testschema CASCADE',
+ 'DROP SCHEMA anothertestschema CASCADE',
+ 'DROP SCHEMA yetanothertestschema CASCADE',
'DROP SEQUENCE pkid1_seq',
'DROP SEQUENCE pkid2_seq',
'DROP SEQUENCE nonpkid_seq',
- 'DROP SCHEMA testschema',
- 'DROP TABLE anothertestschema.artist',
- 'DROP SCHEMA anothertestschema',
- 'DROP TABLE yetanothertestschema.artist',
- 'DROP SCHEMA yetanothertestschema',
) {
- eval { $dbh->do ($stat) };
+ eval { $schema->storage->_do_query ($stat) };
}
}
done_testing;
-END { _cleanup($dbh) }
+END { _cleanup($schema) }
});
lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
$schema->populate ('Owners', [
[qw/id name /],
[qw/1 wiggle/],
}, 'populate with PKs supplied ok' );
lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
$schema->populate ('BooksInLibrary', [
[qw/source owner title /],
[qw/Library 1 secrets0/],
use lib qw(t/lib);
use DBICTest;
-eval "use SQL::Translator";
-plan skip_all => 'SQL::Translator required' if $@;
+BEGIN {
+ require DBIx::Class;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version
+ if not DBIx::Class->_sqlt_version_ok;
+}
my $schema = DBICTest->init_schema (no_deploy => 1);
#!/usr/bin/perl
+
use strict;
use warnings;
use Test::More;
plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn);
-
- eval "use DBD::mysql; use SQL::Translator 0.09003;";
- plan $@
- ? ( skip_all => 'needs DBD::mysql and SQL::Translator 0.09003 for testing' )
- : ( tests => 22 );
+ require DBIx::Class;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version
+ if not DBIx::Class->_sqlt_version_ok;
}
my $version_table_name = 'dbix_class_schema_versions';
unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) {
unlink $_ for (values %$fn);
}
+
+done_testing;
use lib qw(t/lib);
use DBIC::SqlMakerTest;
-plan tests => 4;
-
use_ok('DBICTest');
-my $schema = DBICTest->init_schema();
+my $schema = DBICTest->init_schema(no_deploy => 1);
my $sql_maker = $schema->storage->sql_maker;
);
}
+# make sure the cookbook caveat of { $op, \'...' } no longer applies
+{
+ my ($sql, @bind) = $sql_maker->where({
+ last_attempt => \ '< now() - interval "12 hours"',
+ next_attempt => { '<', \ 'now() - interval "12 hours"' },
+ created => [
+ { '<=', \ '1969' },
+ \ '> 1984',
+ ],
+ });
+ is_same_sql_bind(
+ $sql,
+ \@bind,
+ 'WHERE
+ (created <= 1969 OR created > 1984 )
+ AND last_attempt < now() - interval "12 hours"
+ AND next_attempt < now() - interval "12 hours"
+ ',
+ [],
+ );
+}
+
# Make sure the carp/croak override in SQLA works (via SQLAHacks)
my $file = __FILE__;
$file = "\Q$file\E";
throws_ok (sub {
$schema->resultset ('Artist')->search ({}, { order_by => { -asc => 'stuff', -desc => 'staff' } } )->as_query;
}, qr/$file/, 'Exception correctly croak()ed');
+
+done_testing;
use lib qw(t/lib);
use DBICTest;
-
BEGIN {
- eval "use SQL::Translator 0.09003;";
- if ($@) {
- plan skip_all => 'needs SQL::Translator 0.09003 for testing';
- }
+ require DBIx::Class;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version
+ if not DBIx::Class->_sqlt_version_ok;
}
my $schema = DBICTest->init_schema();
$schema->sources
;
-plan tests => ( @sources * 3);
-
{
my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { } } });
}
}
+done_testing;
+
sub create_schema {
my $args = shift;