Revision history for DBIx::Class
+0.08112 2009-09-21 10:57:00 (UTC)
+ - Remove the recommends from Makefile.PL, DBIx::Class is not
+ supposed to have optional dependencies. ever.
+ - Mangle the DBIx/Class.pm POD to be more clear about
+ copyright and license
+ - Put back PG's multiple autoinc per table support, accidentally
+ dropped during the serial-autodetection rewrite
+ - Make sure ResultSetColumn does not depend on the (undefined)
+ return value of ->cursor->reset()
+ - Add single() to ResultSetColumn (same semantics as ResultSet)
+ - Make sure to turn off IDENTITY_INSERT after insert() on MSSQL
+ tables that needed it
+ - More informative exception on failing _resolve_relationship
+ - Allow undef/NULL as the sole grouping value in Ordered
+ - Fix unreported rollback exceptions in TxnScopeGuard
+ - Fix overly-eager left-join chain enforcing code
+ - Warn about using distinct with an existing group_by
+ - Warn about attempting to $rs->get_column a non-unique column
+ when has_many joins are added to resultset
+ - Refactor of the exception handling system (now everything is a
+ DBIx::Class::Exception object)
+
+0.08111 2009-09-06 21:58:00 (UTC)
+ - The hashref to connection_info now accepts a 'dbh_maker'
+ coderef, allowing better intergration with Catalyst
+ - Fixed a complex prefetch + regular join regression introduced
+ in 0.08108
+ - Fixed insert_bulk rebless handling
+ - Fixed Storable roundtrip regression, and general serialization
+ cleanup
+ - SQLT related fixes:
+ - sqlt_type is now called on the correct storage object
+ - hooks can now see the correct producer_type (RT#47891)
+ - optional SQLT requirements for e.g. deploy() bumped to 0.11002
+ - Really fixed (and greatly cleaned up) postgresql autoinc sequence
+ autodetection
+ - Automatically detect MySQL v3 and use INNER JOIN instead of JOIN
+ - POD improvements (including RT#48769)
+ - Test suite tweaks (including fixes for recent CPANTS fails)
+ - Better support for MSSQL IDENTITY_INSERT ON
+
0.08109 2009-08-18 08:35:00 (UTC)
- Replication updates:
- Improved the replication tests so that they are more reliable
- Support for MSSQL 'money' type
- Support for 'smalldatetime' type used in MSSQL and Sybase for
InflateColumn::DateTime
- - support for Postgres 'timestamp without timezone' type in
+ - Support for Postgres 'timestamp without timezone' type in
InflateColumn::DateTime (RT#48389)
- Added new MySQL specific on_connect_call macro 'set_strict_mode'
(also known as make_mysql_not_suck_as_much)
\bCVS\b
,v$
\B\.svn\b
+\B\.git\b
+\B\.gitignore\b
+\b_darcs\b
# Avoid Makemaker generated and utility files.
\bMakefile$
use 5.006001; # delete this line if you want to send patches for earlier.
+# ****** DO NOT ADD OPTIONAL DEPENDENCIES. EVER. --mst ******
+
name 'DBIx-Class';
perl_version '5.006001';
all_from 'lib/DBIx/Class.pm';
-test_requires 'Test::Builder' => 0.33;
-test_requires 'Test::Deep' => 0;
-test_requires 'Test::Exception' => 0;
-test_requires 'Test::More' => 0.92;
-test_requires 'Test::Warn' => 0.11;
+test_requires 'Test::Builder' => '0.33';
+test_requires 'Test::Deep' => '0';
+test_requires 'Test::Exception' => '0';
+test_requires 'Test::More' => '0.92';
+test_requires 'Test::Warn' => '0.21';
-test_requires 'File::Temp' => 0.22;
+test_requires 'File::Temp' => '0.22';
# Core
-requires 'List::Util' => 0;
-requires 'Scalar::Util' => 0;
-requires 'Storable' => 0;
+requires 'List::Util' => '0';
+requires 'Scalar::Util' => '0';
+requires 'Storable' => '0';
# Perl 5.8.0 doesn't have utf8::is_utf8()
-requires 'Encode' => 0 if ($] <= 5.008000);
+requires 'Encode' => '0' if ($] <= 5.008000);
# Dependencies (keep in alphabetical order)
-requires 'Carp::Clan' => 6.0;
-requires 'Class::Accessor::Grouped' => 0.08003;
-requires 'Class::C3::Componentised' => 1.0005;
-requires 'Class::Inspector' => 1.24;
-requires 'Data::Page' => 2.00;
-requires 'DBD::SQLite' => 1.25;
-requires 'DBI' => 1.605;
-requires 'JSON::Any' => 1.18;
-requires 'MRO::Compat' => 0.09;
-requires 'Module::Find' => 0.06;
-requires 'Path::Class' => 0.16;
-requires 'Scope::Guard' => 0.03;
-requires 'SQL::Abstract' => 1.56;
-requires 'SQL::Abstract::Limit' => 0.13;
-requires 'Sub::Name' => 0.04;
-
-recommends 'SQL::Translator' => 0.09004;
+requires 'Carp::Clan' => '6.0';
+requires 'Class::Accessor::Grouped' => '0.09000';
+requires 'Class::C3::Componentised' => '1.0005';
+requires 'Class::Inspector' => '1.24';
+requires 'Data::Page' => '2.00';
+requires 'DBD::SQLite' => '1.25';
+requires 'DBI' => '1.605';
+requires 'JSON::Any' => '1.18';
+requires 'MRO::Compat' => '0.09';
+requires 'Module::Find' => '0.06';
+requires 'Path::Class' => '0.16';
+requires 'Scope::Guard' => '0.03';
+requires 'SQL::Abstract' => '1.58';
+requires 'SQL::Abstract::Limit' => '0.13';
+requires 'Sub::Name' => '0.04';
my %replication_requires = (
- 'Moose', => 0.87,
- 'MooseX::AttributeHelpers' => 0.21,
- 'MooseX::Types', => 0.16,
- 'namespace::clean' => 0.11,
- 'Hash::Merge', => 0.11,
+ 'Moose', => '0.87',
+ 'MooseX::AttributeHelpers' => '0.21',
+ 'MooseX::Types', => '0.16',
+ 'namespace::clean' => '0.11',
+ 'Hash::Merge', => '0.11',
);
+#************************************************************************#
+# Make *ABSOLUTELY SURE* that nothing on this list is a real require, #
+# since every module listed in %force_requires_if_author is deleted #
+# from the final META.yml (thus will never make it as a CPAN dependency) #
+#************************************************************************#
my %force_requires_if_author = (
%replication_requires,
-# 'Module::Install::Pod::Inherit' => 0.01,
- 'Test::Pod::Coverage' => 1.04,
- 'SQL::Translator' => 0.09007,
+ # when changing also adjust $DBIx::Class::Storage::DBI::minimum_sqlt_version
+ 'SQL::Translator' => '0.11002',
+
+# 'Module::Install::Pod::Inherit' => '0.01',
+
+ # when changing also adjust version in t/02pod.t
+ 'Test::Pod' => '1.26',
+
+ # when changing also adjust version in t/03podcoverage.t
+ 'Test::Pod::Coverage' => '1.08',
+ 'Pod::Coverage' => '0.20',
# CDBI-compat related
- 'DBIx::ContextualFetch' => 0,
- 'Class::DBI::Plugin::DeepAbstractSearch' => 0,
- 'Class::Trigger' => 0,
- 'Time::Piece::MySQL' => 0,
- 'Clone' => 0,
- 'Date::Simple' => 3.03,
+ 'DBIx::ContextualFetch' => '0',
+ 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
+ 'Class::Trigger' => '0',
+ 'Time::Piece::MySQL' => '0',
+ 'Clone' => '0',
+ 'Date::Simple' => '3.03',
# t/52cycle.t
- 'Test::Memory::Cycle' => 0,
- 'Devel::Cycle' => 1.10,
+ 'Test::Memory::Cycle' => '0',
+ 'Devel::Cycle' => '1.10',
# t/36datetime.t
# t/60core.t
- 'DateTime::Format::SQLite' => 0,
+ 'DateTime::Format::SQLite' => '0',
# t/96_is_deteministic_value.t
- 'DateTime::Format::Strptime'=> 0,
+ 'DateTime::Format::Strptime'=> '0',
# database-dependent reqs
#
$ENV{DBICTEST_PG_DSN}
? (
- 'Sys::SigAction' => 0,
- 'DBD::Pg' => 2.009002,
- 'DateTime::Format::Pg' => 0,
+ 'Sys::SigAction' => '0',
+ 'DBD::Pg' => '2.009002',
+ 'DateTime::Format::Pg' => '0',
) : ()
,
$ENV{DBICTEST_MYSQL_DSN}
? (
- 'DateTime::Format::MySQL' => 0,
+ 'DateTime::Format::MySQL' => '0',
) : ()
,
$ENV{DBICTEST_ORACLE_DSN}
? (
- 'DateTime::Format::Oracle' => 0,
+ 'DateTime::Format::Oracle' => '0',
) : ()
,
);
+#************************************************************************#
+# Make ABSOLUTELY SURE that nothing on the list above is a real require, #
+# since every module listed in %force_requires_if_author is deleted #
+# from the final META.yml (thus will never make it as a CPAN dependency) #
+#************************************************************************#
install_script (qw|
__PACKAGE__->table(__PACKAGE__->table()); for the result set to
return the correct object type.
-2006-03-27 by mst
- Add the ability for deploy to be given a directory and grab <dbname>.sql
- out of there if available. Try SQL::Translator if not. If none of the above,
- cry (and die()). Then you can have a script that pre-gens for all available
- SQLT modules so an app can do its own deploy without SQLT on the target
- system
-
2006-05-25 by mst (TODOed by bluefeet)
Add the search attributes "limit" and "rows_per_page".
limit: work as expected just like offset does
my @cds;
foreach my $lp (keys %albums) {
- my $artist = $schema->resultset('Artist')->search({
+ my $artist = $schema->resultset('Artist')->find({
name => $albums{$lp}
});
- push @cds, [$lp, $artist->first];
+ push @cds, [$lp, $artist->id];
}
$schema->populate('Cd', [
my @tracks;
foreach my $track (keys %tracks) {
- my $cdname = $schema->resultset('Cd')->search({
+ my $cd = $schema->resultset('Cd')->find({
title => $tracks{$track},
});
- push @tracks, [$cdname->first, $track];
+ push @tracks, [$cd->id, $track];
}
$schema->populate('Track',[
use MRO::Compat;
use vars qw($VERSION);
-use base qw/DBIx::Class::Componentised Class::Accessor::Grouped/;
+use base qw/Class::C3::Componentised Class::Accessor::Grouped/;
use DBIx::Class::StartupCheck;
sub mk_classdata {
# Always remember to do all digits for the version even if they're 0
# i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
# brain damage and presumably various other packaging systems too
-
-$VERSION = '0.08109';
+$VERSION = '0.08112';
$VERSION = eval $VERSION; # numify for warning-free dev releases
bricas: Brian Cassidy <bricas@cpan.org>
+brunov: Bruno Vecchi <vecchi.b@gmail.com>
+
caelum: Rafael Kitover <rkitover@cpan.org>
castaway: Jess Robinson
zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
+=head1 COPYRIGHT
+
+Copyright (c) 2005 - 2009 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
+as listed above.
+
=head1 LICENSE
-You may distribute this code under the same terms as Perl itself.
+This library is free software and may be distributed under the same terms
+as perl itself.
=cut
use strict;
use warnings;
-use base 'Class::C3::Componentised';
-use Carp::Clan qw/^DBIx::Class/;
-
-sub inject_base {
- my ($class, $target, @to_inject) = @_;
- {
- no strict 'refs';
- foreach my $to (reverse @to_inject) {
- my @comps = qw(DigestColumns ResultSetManager Ordered UTF8Columns);
- # Add components here that need to be loaded before Core
- foreach my $first_comp (@comps) {
- if ($to eq 'DBIx::Class::Core' &&
- $target->isa("DBIx::Class::${first_comp}")) {
- carp "Possible incorrect order of components in ".
- "${target}::load_components($first_comp) call: Core loaded ".
- "before $first_comp. See the documentation for ".
- "DBIx::Class::$first_comp for more information";
- }
- }
- unshift( @{"${target}::ISA"}, $to )
- unless ($target eq $to || $target->isa($to));
- }
- }
+###
+# Keep this class for backwards compatibility
+###
- $class->next::method($target, @to_inject);
-}
+use base 'Class::C3::Componentised';
1;
=over 4
-=item L<DBIx::Class::Serialize::Storable>
-
=item L<DBIx::Class::InflateColumn>
=item L<DBIx::Class::Relationship>
use strict;
use warnings;
+use base qw/DBIx::Class/;
+
=head1 NAME
DBIx::Class::Cursor - Abstract object representing a query cursor on a
reports to the list very much welcome).
If the data_type of a field is C<date>, C<datetime> or C<timestamp> (or
-a derivative of these datatypes, e.g. C<timestamp with timezone>, this
+a derivative of these datatypes, e.g. C<timestamp with timezone>), this
module will automatically call the appropriate parse/format method for
deflation/inflation as defined in the storage class. For instance, for
a C<datetime> field the methods C<parse_datetime> and C<format_datetime>
__PACKAGE__->load_components(qw/InflateColumn/);
-__PACKAGE__->mk_group_accessors('simple' => '__datetime_parser');
-
=head2 register_column
Chains with the L<DBIx::Class::Row/register_column> method, and sets
}
sub _datetime_parser {
- my $self = shift;
- if (my $parser = $self->__datetime_parser) {
- return $parser;
- }
- my $parser = $self->result_source->storage->datetime_parser(@_);
- return $self->__datetime_parser($parser);
+ shift->result_source->storage->datetime_parser (@_);
}
1;
These components provide extra functionality beyond
basic functionality that you can't live without.
+L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
+
L<DBIx::Class::CDBICompat> - Class::DBI Compatibility layer.
L<DBIx::Class::FormTools> - Build forms with multiple interconnected objects.
change, they may not work, etc. So, use them if you want, but
be warned.
-L<DBIx::Class::Serialize> - Hooks for Storable freeze/thaw.
-
-L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
-
L<DBIx::Class::Validation> - Validate all data before submitting to your database.
=head2 Core
=head1 AUTHOR
Aran Clary Deltac <bluefeet@cpan.org>
-
This results in something like the following C<WHERE> clause:
- WHERE artist LIKE '%Lamb%' AND title LIKE '%Fear of Fours%'
+ WHERE artist LIKE ? AND title LIKE ?
+
+And the following bind values for the placeholders: C<'%Lamb%'>, C<'%Fear of
+Fours%'>.
Other queries might require slightly more complex logic:
# Or use DBIx::Class::AccessorGroup:
__PACKAGE__->mk_group_accessors('column' => 'name_length');
+See also L</Using SQL functions on the left hand side of a comparison>.
+
=head2 SELECT DISTINCT with multiple columns
my $rs = $schema->resultset('Artist')->search(
The following will B<not> work:
my $rs = $schema->resultset('CD')->search({
- artist_id => $inside_rs->get_column('id')->as_query,
+ artist_id => $inside_rs->get_column('id')->as_query, # does NOT work
});
=head3 Support
=head2 Using SQL functions on the left hand side of a comparison
-Using SQL functions on the left hand side of a comparison is generally
-not a good idea since it requires a scan of the entire table. However,
+Using SQL functions on the left hand side of a comparison is generally not a
+good idea since it requires a scan of the entire table. (Unless your RDBMS
+supports indexes on expressions - including return values of functions -, and
+you create an index on the return value of the function in question.) However,
it can be accomplished with C<DBIx::Class> when necessary.
If you do not have quoting on, simply include the function in your search
$rs->search({ 'YEAR(date_of_birth)' => 1979 });
-With quoting on, or for a more portable solution, use the C<where>
-attribute:
+With quoting on, or for a more portable solution, use literal SQL values with
+placeholders:
- $rs->search({}, { where => \'YEAR(date_of_birth) = 1979' });
+ $rs->search(\[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ]);
-=begin hidden
+ # Equivalent SQL:
+ # SELECT * FROM employee WHERE YEAR(date_of_birth) = ?
-(When the bind args ordering bug is fixed, this technique will be better
-and can replace the one above.)
+ $rs->search({
+ name => 'Bob',
+ -nest => \[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ],
+ });
-With quoting on, or for a more portable solution, use the C<where> and
-C<bind> attributes:
+ # Equivalent SQL:
+ # SELECT * FROM employee WHERE name = ? AND YEAR(date_of_birth) = ?
- $rs->search({}, {
- where => \'YEAR(date_of_birth) = ?',
- bind => [ 1979 ]
- });
+Note: the C<plain_value> string in the C<< [ plain_value => 1979 ] >> part
+should be either the same as the name of the column (do this if the type of the
+return value of the function is the same as the type of the column) or
+otherwise it's essentially a dummy string currently (use C<plain_value> as a
+habit). It is used by L<DBIx::Class> to handle special column types.
-=end hidden
+See also L<SQL::Abstract/Literal SQL with placeholders and bind values
+(subqueries)>.
=head1 JOINS AND PREFETCHING
### The statement below will print
print "I can do admin stuff\n" if $admin->can('do_admin_stuff');
+Alternatively you can use L<DBIx::Class::DynamicSubclass> that implements
+exactly the above functionality.
+
=head2 Skip row object creation for faster results
DBIx::Class is not built for speed, it's built for convenience and
To order C<< $book->pages >> by descending page_number, create the relation
as follows:
- __PACKAGE__->has_many('pages' => 'Page', 'book', { order_by => \'page_number DESC'} );
+ __PACKAGE__->has_many('pages' => 'Page', 'book', { order_by => { -desc => 'page_number'} } );
=head2 Filtering a relationship result set
$rs = $user->addresses(); # get all addresses for a user
$rs = $address->users(); # get all users for an address
+ my $address = $user->add_to_addresses( # returns a My::Address instance,
+ # NOT a My::UserAddress instance!
+ {
+ country => 'United Kingdom',
+ area_code => 'XYZ',
+ town => 'London',
+ street => 'Sesame',
+ }
+ );
+
=head2 Relationships across DB schemas
Mapping relationships across L<DB schemas|DBIx::Class::Manual::Glossary/DB schema>
Alternatively, you can send the conversion sql scripts to your
customers as above.
-=head2 Setting quoting for the generated SQL.
+=head2 Setting quoting for the generated SQL
If the database contains column names with spaces and/or reserved words, they
need to be quoted in the SQL queries. This is done using:
=item L<DBIx::Class::Core> - Set of standard components to load.
-=item L<DBIx::Class::Serialize::Storable> - ?
-
=item L<DBIx::Class::InflateColumn> - Making objects out of your columns.
=item L<DBIx::Class::InflateColumn::DateTime> - Magically turn your datetime or timestamp columns into DateTime objects.
Install DBIx::Class via CPAN should be sufficient.
-=head3 Create the database/tables.
+=head3 Create the database/tables
First make and change the directory:
1;
-=head3 Write a script to insert some records.
+=head3 Write a script to insert some records
insertdb.pl
my @cds;
foreach my $lp (keys %albums) {
- my $artist = $schema->resultset('Artist')->search({
+ my $artist = $schema->resultset('Artist')->find({
name => $albums{$lp}
});
- push @cds, [$lp, $artist->first];
+ push @cds, [$lp, $artist->id];
}
$schema->populate('Cd', [
my @tracks;
foreach my $track (keys %tracks) {
- my $cdname = $schema->resultset('Cd')->search({
+ my $cdname = $schema->resultset('Cd')->find({
title => $tracks{$track},
});
- push @tracks, [$cdname->first, $track];
+ push @tracks, [$cdname->id, $track];
}
$schema->populate('Track',[
Next, spend some time defining which data you need to store, and how
it relates to the other data you have. For some help on normalisation,
-go to L<http://b62.tripod.com/doc/dbbase.htm> or
-L<http://209.197.234.36/db/simple.html>.
+go to L<http://b62.tripod.com/doc/dbbase.htm>.
Now, decide whether you want to have the database itself be the
definitive source of information about the data layout, or your
->search({'created_time' => { '>=', '2006-06-01 00:00:00' } })
-Note that to use a function here you need to make the whole value into
-a scalar reference:
+Note that to use a function here you need to make it a scalar
+reference:
- ->search({'created_time' => \'>= yesterday()' })
+ ->search({'created_time' => { '>=', \'yesterday()' } })
=item .. search in several tables simultaneously?
query, which can be accessed similarly to a table, see your database
documentation for details.
-=item .. search using greater-than or less-than and database functions?
-
-To use functions or literal SQL with conditions other than equality
-you need to supply the entire condition, for example:
-
- my $interval = "< now() - interval '12 hours'";
- ->search({last_attempt => \$interval})
-
-and not:
-
- my $interval = "now() - interval '12 hours'";
- ->search({last_attempt => { '<' => \$interval } })
-
=item .. search with an SQL function on the left hand side?
To use an SQL function on the left hand side of a comparison:
- ->search({}, { where => \'YEAR(date_of_birth)=1979' });
-
-=begin hidden
-
-(When the bind arg ordering bug is fixed, the previous example can be
-replaced with the following.)
-
- ->search({}, { where => \'YEAR(date_of_birth)=?', bind => [ 1979 ] });
+ ->search({ -nest => \[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ] });
-=end hidden
+Note: the C<plain_value> string in the C<< [ plain_value => 1979 ] >> part
+should be either the same as the name of the column (do this if the type of the
+return value of the function is the same as the type of the column) or
+otherwise it's essentially a dummy string currently (use C<plain_value> as a
+habit). It is used by L<DBIx::Class> to handle special column types.
Or, if you have quoting off:
sub move_to_group {
my( $self, $to_group, $to_position ) = @_;
- $self->throw_exception ('move_to_group() expects a group specification')
- unless defined $to_group;
-
- # if we're given a string, turn it into a hashref
+ # if we're given a single value, turn it into a hashref
unless (ref $to_group eq 'HASH') {
my @gcols = $self->_grouping_columns;
'bool' => "_bool",
fallback => 1;
use Carp::Clan qw/^DBIx::Class/;
+use DBIx::Class::Exception;
use Data::Page;
use Storable;
use DBIx::Class::ResultSetColumn;
my $where = $self->_collapse_cond($self->{attrs}{where} || {});
my $num_where = scalar keys %$where;
- my @unique_queries;
+ my (@unique_queries, %seen_column_combinations);
foreach my $name (@constraint_names) {
- my @unique_cols = $self->result_source->unique_constraint_columns($name);
- my $unique_query = $self->_build_unique_query($query, \@unique_cols);
+ my @constraint_cols = $self->result_source->unique_constraint_columns($name);
- my $num_cols = scalar @unique_cols;
+ my $constraint_sig = join "\x00", sort @constraint_cols;
+ next if $seen_column_combinations{$constraint_sig}++;
+
+ my $unique_query = $self->_build_unique_query($query, \@constraint_cols);
+
+ my $num_cols = scalar @constraint_cols;
my $num_query = scalar keys %$unique_query;
my $total = $num_query + $num_where;
a unique constraint that is not the primary key, or looking for
related rows.
-If you want objects to be saved immediately, use L</find_or_create> instead.
+If you want objects to be saved immediately, use L</find_or_create>
+instead.
-B<Note>: C<find_or_new> is probably not what you want when creating a
-new row in a table that uses primary keys supplied by the
-database. Passing in a primary key column with a value of I<undef>
-will cause L</find> to attempt to search for a row with a value of
-I<NULL>.
+B<Note>: Take care when using C<find_or_new> with a table having
+columns with default values that you intend to be automatically
+supplied by the database (e.g. an auto_increment primary key column).
+In normal usage, the value of such columns should NOT be included at
+all in the call to C<find_or_new>, even when set to C<undef>.
=cut
}
});
+=over
+
+=item WARNING
+
+When subclassing ResultSet never attempt to override this method. Since
+it is a simple shortcut for C<< $self->new_result($attrs)->insert >>, a
+lot of the internals simply never call it, so your override will be
+bypassed more often than not. Override either L<new|DBIx::Class::Row/new>
+or L<insert|DBIx::Class::Row/insert> depending on how early in the
+L</create> process you need to intervene.
+
+=back
+
=cut
sub create {
the find has completed and before the create has started. To avoid
this problem, use find_or_create() inside a transaction.
-B<Note>: C<find_or_create> is probably not what you want when creating
-a new row in a table that uses primary keys supplied by the
-database. Passing in a primary key column with a value of I<undef>
-will cause L</find> to attempt to search for a row with a value of
-I<NULL>.
+B<Note>: Take care when using C<find_or_create> with a table having
+columns with default values that you intend to be automatically
+supplied by the database (e.g. an auto_increment primary key column).
+In normal usage, the value of such columns should NOT be included at
+all in the call to C<find_or_create>, even when set to C<undef>.
See also L</find> and L</update_or_create>. For information on how to declare
unique constraints, see L<DBIx::Class::ResultSource/add_unique_constraint>.
See also L</find> and L</find_or_create>. For information on how to declare
unique constraints, see L<DBIx::Class::ResultSource/add_unique_constraint>.
-B<Note>: C<update_or_create> is probably not what you want when
-looking for a row in a table that uses primary keys supplied by the
-database, unless you actually have a key value. Passing in a primary
-key column with a value of I<undef> will cause L</find> to attempt to
-search for a row with a value of I<NULL>.
+B<Note>: Take care when using C<update_or_create> with a table having
+columns with default values that you intend to be automatically
+supplied by the database (e.g. an auto_increment primary key column).
+In normal usage, the value of such columns should NOT be included at
+all in the call to C<update_or_create>, even when set to C<undef>.
=cut
$cd->insert;
}
-See also L</find>, L</find_or_create> and L<find_or_new>.
+B<Note>: Take care when using C<update_or_new> with a table having
+columns with default values that you intend to be automatically
+supplied by the database (e.g. an auto_increment primary key column).
+In normal usage, the value of such columns should NOT be included at
+all in the call to C<update_or_new>, even when set to C<undef>.
+
+See also L</find>, L</find_or_create> and L</find_or_new>.
=cut
# build columns (as long as select isn't set) into a set of as/select hashes
unless ( $attrs->{select} ) {
- @colbits = map {
- ( ref($_) eq 'HASH' )
- ? $_
- : {
- (
- /^\Q${alias}.\E(.+)$/
- ? "$1"
- : "$_"
- )
- =>
- (
- /\./
- ? "$_"
- : "${alias}.$_"
- )
- }
- } ( ref($attrs->{columns}) eq 'ARRAY' ) ? @{ delete $attrs->{columns}} : (delete $attrs->{columns} || $source->columns );
+
+ my @cols = ( ref($attrs->{columns}) eq 'ARRAY' )
+ ? @{ delete $attrs->{columns}}
+ : (
+ ( delete $attrs->{columns} )
+ ||
+ $source->columns
+ )
+ ;
+
+ @colbits = map {
+ ( ref($_) eq 'HASH' )
+ ? $_
+ : {
+ (
+ /^\Q${alias}.\E(.+)$/
+ ? "$1"
+ : "$_"
+ )
+ =>
+ (
+ /\./
+ ? "$_"
+ : "${alias}.$_"
+ )
+ }
+ } @cols;
}
+
# add the additional columns on
foreach ( 'include_columns', '+columns' ) {
push @colbits, map {
if ( $attrs->{join} || $attrs->{prefetch} ) {
- $self->throw_exception ('join/prefetch can not be used with a literal scalarref {from}')
+ $self->throw_exception ('join/prefetch can not be used with a custom {from}')
if ref $attrs->{from} ne 'ARRAY';
my $join = delete $attrs->{join} || {};
# generate the distinct induced group_by early, as prefetch will be carried via a
# subquery (since a group_by is present)
if (delete $attrs->{distinct}) {
- $attrs->{group_by} ||= [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ];
+ if ($attrs->{group_by}) {
+ carp ("Useless use of distinct on a grouped resultset ('distinct' is ignored when a 'group_by' is present)");
+ }
+ else {
+ $attrs->{group_by} = [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ];
+ }
}
$attrs->{collapse} ||= {};
sub _calculate_score {
my ($self, $a, $b) = @_;
+ if (defined $a xor defined $b) {
+ return 0;
+ }
+ elsif (not defined $a) {
+ return 1;
+ }
+
if (ref $b eq 'HASH') {
my ($b_key) = keys %{$b};
if (ref $a eq 'HASH') {
sub throw_exception {
my $self=shift;
+
if (ref $self && $self->_source_handle->schema) {
$self->_source_handle->schema->throw_exception(@_)
- } else {
- croak(@_);
}
-
+ else {
+ DBIx::Class::Exception->throw(@_);
+ }
}
# XXX: FIXME: Attributes docs need clearing up
=back
-Set to 1 to group by all columns.
+Set to 1 to group by all columns. If the resultset already has a group_by
+attribute, this setting is ignored and an appropriate warning is issued.
=head2 where
For more examples of using these attributes, see
L<DBIx::Class::Manual::Cookbook>.
-=head2 from
-
-=over 4
-
-=item Value: \@from_clause
-
-=back
-
-The C<from> attribute gives you manual control over the C<FROM> clause of SQL
-statements generated by L<DBIx::Class>, allowing you to express custom C<JOIN>
-clauses.
-
-NOTE: Use this on your own risk. This allows you to shoot off your foot!
-
-C<join> will usually do what you need and it is strongly recommended that you
-avoid using C<from> unless you cannot achieve the desired result using C<join>.
-And we really do mean "cannot", not just tried and failed. Attempting to use
-this because you're having problems with C<join> is like trying to use x86
-ASM because you've got a syntax error in your C. Trust us on this.
-
-Now, if you're still really, really sure you need to use this (and if you're
-not 100% sure, ask the mailing list first), here's an explanation of how this
-works.
-
-The syntax is as follows -
-
- [
- { <alias1> => <table1> },
- [
- { <alias2> => <table2>, -join_type => 'inner|left|right' },
- [], # nested JOIN (optional)
- { <table1.column1> => <table2.column2>, ... (more conditions) },
- ],
- # More of the above [ ] may follow for additional joins
- ]
-
- <table1> <alias1>
- JOIN
- <table2> <alias2>
- [JOIN ...]
- ON <table1.column1> = <table2.column2>
- <more joins may follow>
-
-An easy way to follow the examples below is to remember the following:
-
- Anything inside "[]" is a JOIN
- Anything inside "{}" is a condition for the enclosing JOIN
-
-The following examples utilize a "person" table in a family tree application.
-In order to express parent->child relationships, this table is self-joined:
-
- # Person->belongs_to('father' => 'Person');
- # Person->belongs_to('mother' => 'Person');
-
-C<from> can be used to nest joins. Here we return all children with a father,
-then search against all mothers of those children:
-
- $rs = $schema->resultset('Person')->search(
- undef,
- {
- alias => 'mother', # alias columns in accordance with "from"
- from => [
- { mother => 'person' },
- [
- [
- { child => 'person' },
- [
- { father => 'person' },
- { 'father.person_id' => 'child.father_id' }
- ]
- ],
- { 'mother.person_id' => 'child.mother_id' }
- ],
- ]
- },
- );
-
- # Equivalent SQL:
- # SELECT mother.* FROM person mother
- # JOIN (
- # person child
- # JOIN person father
- # ON ( father.person_id = child.father_id )
- # )
- # ON ( mother.person_id = child.mother_id )
-
-The type of any join can be controlled manually. To search against only people
-with a father in the person table, we could explicitly use C<INNER JOIN>:
-
- $rs = $schema->resultset('Person')->search(
- undef,
- {
- alias => 'child', # alias columns in accordance with "from"
- from => [
- { child => 'person' },
- [
- { father => 'person', -join_type => 'inner' },
- { 'father.id' => 'child.father_id' }
- ],
- ]
- },
- );
-
- # Equivalent SQL:
- # SELECT child.* FROM person child
- # INNER JOIN person father ON child.father_id = father.id
-
-You can select from a subquery by passing a resultset to from as follows.
-
- $schema->resultset('Artist')->search(
- undef,
- { alias => 'artist2',
- from => [ { artist2 => $artist_rs->as_query } ],
- } );
-
- # and you'll get sql like this..
- # SELECT artist2.artistid, artist2.name, artist2.rank, artist2.charfield FROM
- # ( SELECT me.artistid, me.name, me.rank, me.charfield FROM artists me ) artist2
-
-If you need to express really complex joins, you
-can supply literal SQL to C<from> via a scalar reference. In this case
-the contents of the scalar will replace the table name associated with the
-resultsource.
-
-WARNING: This technique might very well not work as expected on chained
-searches - you have been warned.
-
- # Assuming the Event resultsource is defined as:
-
- MySchema::Event->add_columns (
- sequence => {
- data_type => 'INT',
- is_auto_increment => 1,
- },
- location => {
- data_type => 'INT',
- },
- type => {
- data_type => 'INT',
- },
- );
- MySchema::Event->set_primary_key ('sequence');
-
- # This will get back the latest event for every location. The column
- # selector is still provided by DBIC, all we do is add a JOIN/WHERE
- # combo to limit the resultset
-
- $rs = $schema->resultset('Event');
- $table = $rs->result_source->name;
- $latest = $rs->search (
- undef,
- { from => \ "
- (SELECT e1.* FROM $table e1
- JOIN $table e2
- ON e1.location = e2.location
- AND e1.sequence < e2.sequence
- WHERE e2.sequence is NULL
- ) me",
- },
- );
-
- # Equivalent SQL (with the DBIC chunks added):
-
- SELECT me.sequence, me.location, me.type FROM
- (SELECT e1.* FROM events e1
- JOIN events e2
- ON e1.location = e2.location
- AND e1.sequence < e2.sequence
- WHERE e2.sequence is NULL
- ) me;
-
=head2 for
=over 4
package DBIx::Class::ResultSetColumn;
+
use strict;
use warnings;
+
use base 'DBIx::Class';
+
+use Carp::Clan qw/^DBIx::Class/;
+use DBIx::Class::Exception;
use List::Util;
=head1 NAME
my $select = defined $as_index ? $select_list->[$as_index] : $column;
# {collapse} would mean a has_many join was injected, which in turn means
- # we need to group IF WE CAN (only if the column in question is unique)
+ # we need to group *IF WE CAN* (only if the column in question is unique)
if (!$new_attrs->{group_by} && keys %{$orig_attrs->{collapse}}) {
# scan for a constraint that would contain our column only - that'd be proof
if ($col eq $select or $fqcol eq $select) {
$new_attrs->{group_by} = [ $select ];
+ delete $new_attrs->{distinct}; # it is ignored when group_by is present
last;
}
}
+
+ if (!$new_attrs->{group_by}) {
+ carp (
+ "Attempting to retrieve non-unique column '$column' on a resultset containing "
+ . 'one-to-many joins will return duplicate results.'
+ );
+ }
}
my $new = bless { _select => $select, _as => $column, _parent_resultset => $new_parent_rs }, $class;
sub next {
my $self = shift;
+
+ # using cursor so we don't inflate anything
my ($row) = $self->_resultset->cursor->next;
+
return $row;
}
sub all {
my $self = shift;
+
+ # using cursor so we don't inflate anything
return map { $_->[0] } $self->_resultset->cursor->all;
}
sub first {
my $self = shift;
- my ($row) = $self->_resultset->cursor->reset->next;
+
+ # using cursor so we don't inflate anything
+ $self->_resultset->cursor->reset;
+ my ($row) = $self->_resultset->cursor->next;
+
+ return $row;
+}
+
+=head2 single
+
+=over 4
+
+=item Arguments: none
+
+=item Return Value: $value
+
+=back
+
+Much like L<DBIx::Class::ResultSet/single> fetches one and only one column
+value using the cursor directly. If additional rows are present a warning
+is issued before discarding the cursor.
+
+=cut
+
+sub single {
+ my $self = shift;
+
+ my $attrs = $self->_resultset->_resolved_attrs;
+ my ($row) = $self->_resultset->result_source->storage->select_single(
+ $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs
+ );
+
return $row;
}
sub throw_exception {
my $self=shift;
+
if (ref $self && $self->{_parent_resultset}) {
- $self->{_parent_resultset}->throw_exception(@_)
- } else {
- croak(@_);
+ $self->{_parent_resultset}->throw_exception(@_);
+ }
+ else {
+ DBIx::Class::Exception->throw(@_);
}
}
#
# Returns the underlying resultset. Creates it from the parent resultset if
# necessary.
-#
+#
sub _resultset {
my $self = shift;
use DBIx::Class::ResultSet;
use DBIx::Class::ResultSourceHandle;
+
+use DBIx::Class::Exception;
use Carp::Clan qw/^DBIx::Class/;
-use Storable;
use base qw/DBIx::Class/;
# Returns the {from} structure used to express JOIN conditions
sub _resolve_join {
- my ($self, $join, $alias, $seen, $jpath, $force_left) = @_;
+ my ($self, $join, $alias, $seen, $jpath, $parent_force_left) = @_;
# we need a supplied one, because we do in-place modifications, no returns
$self->throw_exception ('You must supply a seen hashref as the 3rd argument to _resolve_join')
$jpath = [@$jpath];
- if (ref $join eq 'ARRAY') {
+ if (not defined $join) {
+ return ();
+ }
+ elsif (ref $join eq 'ARRAY') {
return
map {
- $self->_resolve_join($_, $alias, $seen, $jpath, $force_left);
+ $self->_resolve_join($_, $alias, $seen, $jpath, $parent_force_left);
} @$join;
- } elsif (ref $join eq 'HASH') {
- return
- map {
- my $as = ($seen->{$_} ? join ('_', $_, $seen->{$_} + 1) : $_); # the actual seen value will be incremented below
- local $force_left->{force} = $force_left->{force};
- (
- $self->_resolve_join($_, $alias, $seen, [@$jpath], $force_left),
- $self->related_source($_)->_resolve_join(
- $join->{$_}, $as, $seen, [@$jpath, $_], $force_left
- )
- );
- } keys %$join;
- } elsif (ref $join) {
- $self->throw_exception("No idea how to resolve join reftype ".ref $join);
- } else {
+ }
+ elsif (ref $join eq 'HASH') {
- return() unless defined $join;
+ my @ret;
+ for my $rel (keys %$join) {
+ my $rel_info = $self->relationship_info($rel)
+ or $self->throw_exception("No such relationship ${rel}");
+
+ my $force_left = $parent_force_left;
+ $force_left ||= lc($rel_info->{attrs}{join_type}||'') eq 'left';
+
+ # the actual seen value will be incremented by the recursion
+ my $as = ($seen->{$rel} ? join ('_', $rel, $seen->{$rel} + 1) : $rel);
+
+ push @ret, (
+ $self->_resolve_join($rel, $alias, $seen, [@$jpath], $force_left),
+ $self->related_source($rel)->_resolve_join(
+ $join->{$rel}, $as, $seen, [@$jpath, $rel], $force_left
+ )
+ );
+ }
+ return @ret;
+
+ }
+ elsif (ref $join) {
+ $self->throw_exception("No idea how to resolve join reftype ".ref $join);
+ }
+ else {
my $count = ++$seen->{$join};
my $as = ($count > 1 ? "${join}_${count}" : $join);
- my $rel_info = $self->relationship_info($join);
- $self->throw_exception("No such relationship ${join}") unless $rel_info;
- my $type;
- if ($force_left) {
- $type = 'left';
- } else {
- $type = $rel_info->{attrs}{join_type} || '';
- $force_left = 1 if lc($type) eq 'left';
- }
+ my $rel_info = $self->relationship_info($join)
+ or $self->throw_exception("No such relationship ${join}");
my $rel_src = $self->related_source($join);
return [ { $as => $rel_src->from,
-source_handle => $rel_src->handle,
- -join_type => $type,
+ -join_type => $parent_force_left
+ ? 'left'
+ : $rel_info->{attrs}{join_type}
+ ,
-join_path => [@$jpath, $join],
-alias => $as,
-relation_chain_depth => $seen->{-relation_chain_depth} || 0,
#warn "$self $k $for $v";
unless ($for->has_column_loaded($v)) {
if ($for->in_storage) {
- $self->throw_exception(
- "Column ${v} not loaded or not passed to new() prior to insert()"
- ." on ${for} trying to resolve relationship (maybe you forgot "
- ."to call ->discard_changes to get defaults from the db)"
+ $self->throw_exception(sprintf
+ 'Unable to resolve relationship from %s to %s: column %s.%s not '
+ . 'loaded from storage (or not passed to new() prior to insert()). '
+ . 'Maybe you forgot to call ->discard_changes to get defaults from the db.',
+
+ $for->result_source->source_name,
+ $as,
+ $as, $v,
);
}
return $UNRESOLVABLE_CONDITION;
my ($self, $pre, $alias, $alias_map, $order, $collapse, $pref_path) = @_;
$pref_path ||= [];
- if( ref $pre eq 'ARRAY' ) {
+ if (not defined $pre) {
+ return ();
+ }
+ elsif( ref $pre eq 'ARRAY' ) {
return
map { $self->_resolve_prefetch( $_, $alias, $alias_map, $order, $collapse, [ @$pref_path ] ) }
@$pre;
$p = $p->{$_} for (@$pref_path, $pre);
$self->throw_exception (
- "Unable to resolve prefetch $pre - join alias map does not contain an entry for path: "
+ "Unable to resolve prefetch '$pre' - join alias map does not contain an entry for path: "
. join (' -> ', @$pref_path, $pre)
) if (ref $p->{-join_aliases} ne 'ARRAY' or not @{$p->{-join_aliases}} );
sub throw_exception {
my $self = shift;
+
if (defined $self->schema) {
$self->schema->throw_exception(@_);
- } else {
- croak(@_);
+ }
+ else {
+ DBIx::Class::Exception->throw(@_);
}
}
my $to_serialize = { %$self };
- my $class = $self->schema->class($self->source_moniker);
- $to_serialize->{schema} = $class;
+ delete $to_serialize->{schema};
+ $to_serialize->{_frozen_from_class} = $self->schema->class($self->source_moniker);
+
return (Storable::freeze($to_serialize));
}
sub STORABLE_thaw {
- my ($self, $cloning,$ice) = @_;
+ my ($self, $cloning, $ice) = @_;
%$self = %{ Storable::thaw($ice) };
- my $class = delete $self->{schema};
+ my $class = delete $self->{_frozen_from_class};
if( $thaw_schema ) {
$self->{schema} = $thaw_schema;
}
$self->{schema} = $rs->schema if $rs;
}
- carp "Unable to restore schema" unless $self->{schema};
+ carp "Unable to restore schema. Look at 'freeze' and 'thaw' methods in DBIx::Class::Schema."
+ unless $self->{schema};
}
=head1 AUTHOR
use warnings;
use base qw/DBIx::Class/;
-use Carp::Clan qw/^DBIx::Class/;
+
+use DBIx::Class::Exception;
use Scalar::Util ();
-use Scope::Guard;
###
### Internal method
foreach my $key (keys %$attrs) {
if (ref $attrs->{$key}) {
## Can we extract this lot to use with update(_or .. ) ?
- confess "Can't do multi-create without result source" unless $source;
+ $new->throw_exception("Can't do multi-create without result source")
+ unless $source;
my $info = $source->relationship_info($key);
if ($info && $info->{attrs}{accessor}
&& $info->{attrs}{accessor} eq 'single')
sub throw_exception {
my $self=shift;
+
if (ref $self && ref $self->result_source && $self->result_source->schema) {
- $self->result_source->schema->throw_exception(@_);
- } else {
- croak(@_);
+ $self->result_source->schema->throw_exception(@_)
+ }
+ else {
+ DBIx::Class::Exception->throw(@_);
}
}
foreach my $j (@join) {
my ($to, $on) = @$j;
+
# check whether a join type exists
- my $join_clause = '';
my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to;
- if (ref($to_jt) eq 'HASH' and exists($to_jt->{-join_type})) {
- $join_clause = ' '.uc($to_jt->{-join_type}).' JOIN ';
- } else {
- $join_clause = ' JOIN ';
+ my $join_type;
+ if (ref($to_jt) eq 'HASH' and defined($to_jt->{-join_type})) {
+ $join_type = $to_jt->{-join_type};
+ $join_type =~ s/^\s+ | \s+$//xg;
}
- push(@sqlf, $join_clause);
+
+ $join_type = $self->{_default_jointype} if not defined $join_type;
+
+ my $join_clause = sprintf ('%s JOIN ',
+ $join_type ? ' ' . uc($join_type) : ''
+ );
+ push @sqlf, $join_clause;
if (ref $to eq 'ARRAY') {
push(@sqlf, '(', $self->_recurse_from(@$to), ')');
$storage_class = 'DBIx::Class::Storage'.$storage_class
if $storage_class =~ m/^::/;
- eval "require ${storage_class};";
+ eval { $self->ensure_class_loaded ($storage_class) };
$self->throw_exception(
"No arguments to load_classes and couldn't load ${storage_class} ($@)"
) if $@;
return;
}
- eval 'require SQL::Translator "0.09003"';
- if ($@) {
- $self->throw_exception("SQL::Translator 0.09003 required");
- }
+ $self->throw_exception($self->storage->_sqlt_version_error)
+ if (not $self->storage->_sqlt_version_ok);
- my $db_tr = SQL::Translator->new({
- add_drop_table => 1,
+ my $db_tr = SQL::Translator->new({
+ add_drop_table => 1,
parser => 'DBI',
parser_args => { dbh => $self->storage->dbh }
});
my ($self, $cloning) = @_;
my $to_serialize = { %$self };
+ # The source is either derived from _source_handle or is
+ # reattached in the thaw handler below
delete $to_serialize->{result_source};
- delete $to_serialize->{related_resultsets};
- delete $to_serialize->{_inflated_column};
+
+ # Dynamic values, easy to recalculate
+ delete $to_serialize->{$_} for qw/related_resultsets _inflated_column/;
return (Storable::freeze($to_serialize));
}
my ($self, $cloning, $serialized) = @_;
%$self = %{ Storable::thaw($serialized) };
+
+ # if the handle went missing somehow, reattach
$self->result_source($self->result_source_instance)
- if $self->can('result_source_instance');
+ if !$self->_source_handle && $self->can('result_source_instance');
}
1;
use base qw/DBIx::Class/;
use mro 'c3';
-use Scalar::Util qw/weaken/;
-use Carp::Clan qw/^DBIx::Class/;
+use DBIx::Class::Exception;
+use Scalar::Util();
use IO::File;
use DBIx::Class::Storage::TxnScopeGuard;
sub set_schema {
my ($self, $schema) = @_;
$self->schema($schema);
- weaken($self->{schema}) if ref $self->{schema};
+ Scalar::Util::weaken($self->{schema}) if ref $self->{schema};
}
=head2 connected
sub throw_exception {
my $self = shift;
- $self->schema->throw_exception(@_) if $self->schema;
- croak @_;
+ if ($self->schema) {
+ $self->schema->throw_exception(@_);
+ }
+ else {
+ DBIx::Class::Exception->throw(@_);
+ }
}
=head2 txn_do
use Scalar::Util();
use List::Util();
+# what version of sqlt do we require if deploy() without a ddl_dir is invoked
+# when changing also adjust the corresponding author_require in Makefile.PL
+my $minimum_sqlt_version = '0.11002';
+
+
__PACKAGE__->mk_group_accessors('simple' =>
qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid
_conn_tid transaction_depth _dbh_autocommit _driver_determined savepoints/
my $schema = MySchema->connect('dbi:SQLite:my.db');
$schema->storage->debug(1);
- $schema->dbh_do("DROP TABLE authors");
+
+ my @stuff = $schema->storage->dbh_do(
+ sub {
+ my ($storage, $dbh, @args) = @_;
+ $dbh->do("DROP TABLE authors");
+ },
+ @column_list
+ );
$schema->resultset('Book')->search({
written_on => $schema->storage->datetime_parser(DateTime->now)
%extra_attributes,
}];
+ $connect_info_args = [{
+ dbh_maker => sub { DBI->connect (...) },
+ %dbi_attributes,
+ %extra_attributes,
+ }];
+
This is particularly useful for L<Catalyst> based applications, allowing the
following config (L<Config::General> style):
</connect_info>
</Model::DB>
+The C<dsn>/C<user>/C<password> combination can be substituted by the
+C<dbh_maker> key whose value is a coderef that returns a connected
+L<DBI database handle|DBI/connect>
+
=back
Please note that the L<DBI> docs recommend that you always explicitly
# Connect via subref
->connect_info([ sub { DBI->connect(...) } ]);
+ # Connect via subref in hashref
+ ->connect_info([{
+ dbh_maker => sub { DBI->connect(...) },
+ on_connect_do => 'alter session ...',
+ }]);
+
# A bit more complicated
->connect_info(
[
elsif (ref $args[0] eq 'HASH') { # single hashref (i.e. Catalyst config)
%attrs = %{$args[0]};
@args = ();
- for (qw/password user dsn/) {
- unshift @args, delete $attrs{$_};
+ if (my $code = delete $attrs{dbh_maker}) {
+ @args = $code;
+
+ my @ignored = grep { delete $attrs{$_} } (qw/dsn user password/);
+ if (@ignored) {
+ carp sprintf (
+ 'Attribute(s) %s in connect_info were ignored, as they can not be applied '
+ . "to the result of 'dbh_maker'",
+
+ join (', ', map { "'$_'" } (@ignored) ),
+ );
+ }
+ }
+ else {
+ @args = delete @attrs{qw/dsn user password/};
}
}
else { # otherwise assume dsn/user/password + \%attrs + \%extra_attrs
my $self = shift;
my $code = shift;
- my $dbh = $self->_dbh;
+ my $dbh = $self->_get_dbh;
return $self->$code($dbh, @_) if $self->{_in_dbh_do}
|| $self->{transaction_depth};
my $want_array = wantarray;
eval {
- $self->_verify_pid if $dbh;
- if(!$self->_dbh) {
- $self->_populate_dbh;
- $dbh = $self->_dbh;
- }
if($want_array) {
@result = $self->$code($dbh, @_);
my $tried = 0;
while(1) {
eval {
- $self->_verify_pid if $self->_dbh;
- $self->_populate_dbh if !$self->_dbh;
+ $self->_get_dbh;
$self->txn_begin;
if($want_array) {
$self->_do_connection_actions(disconnect_call_ => $_) for @actions;
- $self->_dbh->rollback unless $self->_dbh_autocommit;
+ $self->_dbh_rollback unless $self->_dbh_autocommit;
+
$self->_dbh->disconnect;
$self->_dbh(undef);
$self->{_dbh_gen}++;
# this is the internal "get dbh or connect (don't check)" method
sub _get_dbh {
my $self = shift;
+ $self->_verify_pid if $self->_dbh;
$self->_populate_dbh unless $self->_dbh;
return $self->_dbh;
}
return $self->_sql_maker;
}
+# nothing to do by default
sub _rebless {}
+sub _init {}
sub _populate_dbh {
my ($self) = @_;
if ($self->_dbh) { # we are connected
$driver = $self->_dbh->{Driver}{Name};
} else {
- # try to use dsn to not require being connected, the driver may still
- # force a connection in _rebless to determine version
- ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i;
- $started_unconnected = 1;
+ # if connect_info is a CODEREF, we have no choice but to connect
+ if (ref $self->_dbi_connect_info->[0] &&
+ Scalar::Util::reftype($self->_dbi_connect_info->[0]) eq 'CODE') {
+ $self->_populate_dbh;
+ $driver = $self->_dbh->{Driver}{Name};
+ }
+ else {
+ # try to use dsn to not require being connected, the driver may still
+ # force a connection in _rebless to determine version
+ ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i;
+ $started_unconnected = 1;
+ }
}
my $storage_class = "DBIx::Class::Storage::DBI::${driver}";
$self->_driver_determined(1);
+ $self->_init; # run driver-specific initializations
+
$self->_run_connection_actions
if $started_unconnected && defined $self->_dbh;
}
my @bind = map { [ undef, $_ ] } @do_args;
$self->_query_start($sql, @bind);
- $self->_dbh->do($sql, $attrs, @do_args);
+ $self->_get_dbh->do($sql, $attrs, @do_args);
$self->_query_end($sql, @bind);
}
$weak_self->throw_exception("DBI Exception: $_[0]");
}
else {
+ # the handler may be invoked by something totally out of
+ # the scope of DBIC
croak ("DBI Exception: $_[0]");
}
};
if($self->{transaction_depth} == 0) {
$self->debugobj->txn_begin()
if $self->debug;
-
- # being here implies we have AutoCommit => 1
- # if the user is utilizing txn_do - good for
- # him, otherwise we need to ensure that the
- # $dbh is healthy on BEGIN
- my $dbh_method = $self->{_in_dbh_do} ? '_dbh' : 'dbh';
- $self->$dbh_method->begin_work;
-
- } elsif ($self->auto_savepoint) {
+ $self->_dbh_begin_work;
+ }
+ elsif ($self->auto_savepoint) {
$self->svp_begin;
}
$self->{transaction_depth}++;
}
+sub _dbh_begin_work {
+ my $self = shift;
+
+ # if the user is utilizing txn_do - good for him, otherwise we need to
+ # ensure that the $dbh is healthy on BEGIN.
+ # We do this via ->dbh_do instead of ->dbh, so that the ->dbh "ping"
+ # will be replaced by a failure of begin_work itself (which will be
+ # then retried on reconnect)
+ if ($self->{_in_dbh_do}) {
+ $self->_dbh->begin_work;
+ } else {
+ $self->dbh_do(sub { $_[1]->begin_work });
+ }
+}
+
sub txn_commit {
my $self = shift;
if ($self->{transaction_depth} == 1) {
my $dbh = $self->_dbh;
$self->debugobj->txn_commit()
if ($self->debug);
- $dbh->commit;
+ $self->_dbh_commit;
$self->{transaction_depth} = 0
if $self->_dbh_autocommit;
}
}
}
+sub _dbh_commit {
+ my $self = shift;
+ $self->_dbh->commit;
+}
+
sub txn_rollback {
my $self = shift;
my $dbh = $self->_dbh;
if ($self->debug);
$self->{transaction_depth} = 0
if $self->_dbh_autocommit;
- $dbh->rollback;
+ $self->_dbh_rollback;
}
elsif($self->{transaction_depth} > 1) {
$self->{transaction_depth}--;
}
}
+sub _dbh_rollback {
+ my $self = shift;
+ $self->_dbh->rollback;
+}
+
# This used to be the top-half of _execute. It was split out to make it
# easier to override in NoBindVars without duping the rest. It takes up
# all of _execute's args, and emits $sql, @bind.
sub _execute {
my $self = shift;
- $self->dbh_do('_dbh_execute', @_)
+ $self->dbh_do('_dbh_execute', @_); # retry over disconnects
}
sub insert {
## only prepped once.
sub insert_bulk {
my ($self, $source, $cols, $data) = @_;
+
+# redispatch to insert_bulk method of storage we reblessed into, if necessary
+ if (not $self->_driver_determined) {
+ $self->_determine_driver;
+ goto $self->can('insert_bulk');
+ }
+
my %colvalues;
my $table = $source->from;
@colvalues{@$cols} = (0..$#$cols);
my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues);
- $self->_determine_driver;
-
$self->_query_start( $sql, @bind );
my $sth = $self->sth($sql);
local $Data::Dumper::Indent = 1;
local $Data::Dumper::Useqq = 1;
local $Data::Dumper::Quotekeys = 0;
+ local $Data::Dumper::Sortkeys = 1;
$self->throw_exception(sprintf "%s for populate slice:\n%s",
$tuple_status->[$i][1],
}
sub update {
- my $self = shift @_;
- my $source = shift @_;
- $self->_determine_driver;
+ my ($self, $source, @args) = @_;
+
+# redispatch to update method of storage we reblessed into, if necessary
+ if (not $self->_driver_determined) {
+ $self->_determine_driver;
+ goto $self->can('update');
+ }
+
my $bind_attributes = $self->source_bind_attributes($source);
- return $self->_execute('update' => [], $source, $bind_attributes, @_);
+ return $self->_execute('update' => [], $source, $bind_attributes, @args);
}
sub _adjust_select_args_for_complex_prefetch {
my ($self, $from, $select, $where, $attrs) = @_;
- $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute')
- if (ref $from ne 'ARRAY');
-
- # copies for mangling
- $from = [ @$from ];
- $select = [ @$select ];
- $attrs = { %$attrs };
+ $self->throw_exception ('Nothing to prefetch... how did we get here?!')
+ if not @{$attrs->{_prefetch_select}};
- # separate attributes
- my $sub_attrs = { %$attrs };
- delete $attrs->{$_} for qw/where bind rows offset group_by having/;
- delete $sub_attrs->{$_} for qw/for collapse _prefetch_select _collapse_order_by select as/;
+ $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute')
+ if (ref $from ne 'ARRAY' || ref $from->[0] ne 'HASH' || ref $from->[1] ne 'ARRAY');
- my $select_root_alias = $attrs->{alias};
- my $sql_maker = $self->sql_maker;
- # create subquery select list - consider only stuff *not* brought in by the prefetch
- my $sub_select = [];
- my $sub_group_by;
- for my $i (0 .. @{$attrs->{select}} - @{$attrs->{_prefetch_select}} - 1) {
- my $sel = $attrs->{select}[$i];
+ # generate inner/outer attribute lists, remove stuff that doesn't apply
+ my $outer_attrs = { %$attrs };
+ delete $outer_attrs->{$_} for qw/where bind rows offset group_by having/;
- # alias any functions to the dbic-side 'as' label
- # adjust the outer select accordingly
- if (ref $sel eq 'HASH' ) {
- $sel->{-as} ||= $attrs->{as}[$i];
- $select->[$i] = join ('.', $attrs->{alias}, ($sel->{-as} || "select_$i") );
- }
+ my $inner_attrs = { %$attrs };
+ delete $inner_attrs->{$_} for qw/for collapse _prefetch_select _collapse_order_by select as/;
- push @$sub_select, $sel;
- }
# bring over all non-collapse-induced order_by into the inner query (if any)
# the outer one will have to keep them all
- delete $sub_attrs->{order_by};
- if (my $ord_cnt = @{$attrs->{order_by}} - @{$attrs->{_collapse_order_by}} ) {
- $sub_attrs->{order_by} = [
- @{$attrs->{order_by}}[ 0 .. $ord_cnt - 1]
+ delete $inner_attrs->{order_by};
+ if (my $ord_cnt = @{$outer_attrs->{order_by}} - @{$outer_attrs->{_collapse_order_by}} ) {
+ $inner_attrs->{order_by} = [
+ @{$outer_attrs->{order_by}}[ 0 .. $ord_cnt - 1]
];
}
- # mangle {from}, keep in mind that $from is "headless" from here on
- my $join_root = shift @$from;
- my %inner_joins;
- my %join_info = map { $_->[0]{-alias} => $_->[0] } (@$from);
+ # generate the inner/outer select lists
+ # for inside we consider only stuff *not* brought in by the prefetch
+ # on the outside we substitute any function for its alias
+ my $outer_select = [ @$select ];
+ my $inner_select = [];
+ for my $i (0 .. ( @$outer_select - @{$outer_attrs->{_prefetch_select}} - 1) ) {
+ my $sel = $outer_select->[$i];
+
+ if (ref $sel eq 'HASH' ) {
+ $sel->{-as} ||= $attrs->{as}[$i];
+ $outer_select->[$i] = join ('.', $attrs->{alias}, ($sel->{-as} || "inner_column_$i") );
+ }
+
+ push @$inner_select, $sel;
+ }
+
+ # normalize a copy of $from, so it will be easier to work with further
+ # down (i.e. promote the initial hashref to an AoH)
+ $from = [ @$from ];
+ $from->[0] = [ $from->[0] ];
+ my %original_join_info = map { $_->[0]{-alias} => $_->[0] } (@$from);
- # in complex search_related chains $select_root_alias may *not* be
- # 'me' so always include it in the inner join
- $inner_joins{$select_root_alias} = 1 if ($join_root->{-alias} ne $select_root_alias);
+ # decide which parts of the join will remain in either part of
+ # the outer/inner query
- # decide which parts of the join will remain on the inside
- #
- # this is not a very viable optimisation, but it was written
- # before I realised this, so might as well remain. We can throw
- # away _any_ branches of the join tree that are:
- # 1) not mentioned in the condition/order
- # 2) left-join leaves (or left-join leaf chains)
- # Most of the join conditions will not satisfy this, but for real
- # complex queries some might, and we might make some RDBMS happy.
- #
- #
- # since we do not have introspectable SQLA, we fall back to ugly
- # scanning of raw SQL for WHERE, and for pieces of ORDER BY
- # in order to determine what goes into %inner_joins
+ # First we compose a list of which aliases are used in restrictions
+ # (i.e. conditions/order/grouping/etc). Since we do not have
+ # introspectable SQLA, we fall back to ugly scanning of raw SQL for
+ # WHERE, and for pieces of ORDER BY in order to determine which aliases
+ # need to appear in the resulting sql.
# It may not be very efficient, but it's a reasonable stop-gap
+ # Also unqualified column names will not be considered, but more often
+ # than not this is actually ok
+ #
+ # In the same loop we enumerate part of the selection aliases, as
+ # it requires the same sqla hack for the time being
+ my ($restrict_aliases, $select_aliases, $prefetch_aliases);
{
# produce stuff unquoted, so it can be scanned
+ my $sql_maker = $self->sql_maker;
local $sql_maker->{quote_char};
my $sep = $self->_sql_maker_opts->{name_sep} || '.';
$sep = "\Q$sep\E";
- my @order_by = (map
+ my $non_prefetch_select_sql = $sql_maker->_recurse_fields ($inner_select);
+ my $prefetch_select_sql = $sql_maker->_recurse_fields ($outer_attrs->{_prefetch_select});
+ my $where_sql = $sql_maker->where ($where);
+ my $group_by_sql = $sql_maker->_order_by({
+ map { $_ => $inner_attrs->{$_} } qw/group_by having/
+ });
+ my @non_prefetch_order_by_chunks = (map
{ ref $_ ? $_->[0] : $_ }
- $sql_maker->_order_by_chunks ($sub_attrs->{order_by})
+ $sql_maker->_order_by_chunks ($inner_attrs->{order_by})
);
- my $where_sql = $sql_maker->where ($where);
- my $select_sql = $sql_maker->_recurse_fields ($sub_select);
- # sort needed joins
- for my $alias (keys %join_info) {
+ for my $alias (keys %original_join_info) {
+ my $seen_re = qr/\b $alias $sep/x;
- # any table alias found on a column name in where or order_by
- # gets included in %inner_joins
- # Also any parent joins that are needed to reach this particular alias
- for my $piece ($select_sql, $where_sql, @order_by ) {
- if ($piece =~ /\b $alias $sep/x) {
- $inner_joins{$alias} = 1;
+ for my $piece ($where_sql, $group_by_sql, @non_prefetch_order_by_chunks ) {
+ if ($piece =~ $seen_re) {
+ $restrict_aliases->{$alias} = 1;
}
}
+
+ if ($non_prefetch_select_sql =~ $seen_re) {
+ $select_aliases->{$alias} = 1;
+ }
+
+ if ($prefetch_select_sql =~ $seen_re) {
+ $prefetch_aliases->{$alias} = 1;
+ }
+
}
}
- # scan for non-leaf/non-left joins and mark as needed
- # also mark all ancestor joins that are needed to reach this particular alias
- # (e.g. join => { cds => 'tracks' } - tracks will bring cds too )
- #
- # traverse by the size of the -join_path i.e. reverse depth first
- for my $alias (sort { @{$join_info{$b}{-join_path}} <=> @{$join_info{$a}{-join_path}} } (keys %join_info) ) {
-
- my $j = $join_info{$alias};
- $inner_joins{$alias} = 1 if (! $j->{-join_type} || ($j->{-join_type} !~ /^left$/i) );
+ # Add any non-left joins to the restriction list (such joins are indeed restrictions)
+ for my $j (values %original_join_info) {
+ my $alias = $j->{-alias} or next;
+ $restrict_aliases->{$alias} = 1 if (
+ (not $j->{-join_type})
+ or
+ ($j->{-join_type} !~ /^left (?: \s+ outer)? $/xi)
+ );
+ }
- if ($inner_joins{$alias}) {
- $inner_joins{$_} = 1 for (@{$j->{-join_path}});
+ # mark all join parents as mentioned
+ # (e.g. join => { cds => 'tracks' } - tracks will need to bring cds too )
+ for my $collection ($restrict_aliases, $select_aliases) {
+ for my $alias (keys %$collection) {
+ $collection->{$_} = 1
+ for (@{ $original_join_info{$alias}{-join_path} || [] });
}
}
# construct the inner $from for the subquery
- my $inner_from = [ $join_root ];
+ my %inner_joins = (map { %{$_ || {}} } ($restrict_aliases, $select_aliases) );
+ my @inner_from;
for my $j (@$from) {
- push @$inner_from, $j if $inner_joins{$j->[0]{-alias}};
+ push @inner_from, $j if $inner_joins{$j->[0]{-alias}};
}
# if a multi-type join was needed in the subquery ("multi" is indicated by
# presence in {collapse}) - add a group_by to simulate the collapse in the subq
- unless ($sub_attrs->{group_by}) {
+ unless ($inner_attrs->{group_by}) {
for my $alias (keys %inner_joins) {
# the dot comes from some weirdness in collapse
# remove after the rewrite
if ($attrs->{collapse}{".$alias"}) {
- $sub_attrs->{group_by} ||= $sub_select;
+ $inner_attrs->{group_by} ||= $inner_select;
last;
}
}
}
+ # demote the inner_from head
+ $inner_from[0] = $inner_from[0][0];
+
# generate the subquery
my $subq = $self->_select_args_to_query (
- $inner_from,
- $sub_select,
+ \@inner_from,
+ $inner_select,
$where,
- $sub_attrs
+ $inner_attrs,
);
+
my $subq_joinspec = {
- -alias => $select_root_alias,
- -source_handle => $join_root->{-source_handle},
- $select_root_alias => $subq,
+ -alias => $attrs->{alias},
+ -source_handle => $inner_from[0]{-source_handle},
+ $attrs->{alias} => $subq,
};
- # Generate a new from (really just replace the join slot with the subquery)
- # Before we would start the outer chain from the subquery itself (i.e.
- # SELECT ... FROM (SELECT ... ) alias JOIN ..., but this turned out to be
- # a bad idea for search_related, as the root of the chain was effectively
- # lost (i.e. $artist_rs->search_related ('cds'... ) would result in alias
- # of 'cds', which would prevent from doing things like order_by artist.*)
- # See t/prefetch/via_search_related.t for a better idea
+ # Generate the outer from - this is relatively easy (really just replace
+ # the join slot with the subquery), with a major caveat - we can not
+ # join anything that is non-selecting (not part of the prefetch), but at
+ # the same time is a multi-type relationship, as it will explode the result.
+ #
+ # There are two possibilities here
+ # - either the join is non-restricting, in which case we simply throw it away
+ # - it is part of the restrictions, in which case we need to collapse the outer
+ # result by tackling yet another group_by to the outside of the query
+
+ # so first generate the outer_from, up to the substitution point
my @outer_from;
- if ($join_root->{-alias} eq $select_root_alias) { # just swap the root part and we're done
- @outer_from = (
- $subq_joinspec,
- @$from,
- )
- }
- else { # this is trickier
- @outer_from = ($join_root);
-
- for my $j (@$from) {
- if ($j->[0]{-alias} eq $select_root_alias) {
- push @outer_from, [
- $subq_joinspec,
- @{$j}[1 .. $#$j],
- ];
- }
- else {
- push @outer_from, $j;
- }
+ while (my $j = shift @$from) {
+ if ($j->[0]{-alias} eq $attrs->{alias}) { # time to swap
+ push @outer_from, [
+ $subq_joinspec,
+ @{$j}[1 .. $#$j],
+ ];
+ last; # we'll take care of what's left in $from below
+ }
+ else {
+ push @outer_from, $j;
+ }
+ }
+
+ # see what's left - throw away if not selecting/restricting
+ # also throw in a group_by if restricting to guard against
+ # cross-join explosions
+ #
+ while (my $j = shift @$from) {
+ my $alias = $j->[0]{-alias};
+
+ if ($select_aliases->{$alias} || $prefetch_aliases->{$alias}) {
+ push @outer_from, $j;
+ }
+ elsif ($restrict_aliases->{$alias}) {
+ push @outer_from, $j;
+
+ # FIXME - this should be obviated by SQLA2, as I'll be able to
+ # have restrict_inner and restrict_outer... or something to that
+ # effect... I think...
+
+ # FIXME2 - I can't find a clean way to determine if a particular join
+ # is a multi - instead I am just treating everything as a potential
+ # explosive join (ribasushi)
+ #
+ # if (my $handle = $j->[0]{-source_handle}) {
+ # my $rsrc = $handle->resolve;
+ # ... need to bail out of the following if this is not a multi,
+ # as it will be much easier on the db ...
+
+ $outer_attrs->{group_by} ||= $outer_select;
+ # }
}
}
+ # demote the outer_from head
+ $outer_from[0] = $outer_from[0][0];
+
# This is totally horrific - the $where ends up in both the inner and outer query
# Unfortunately not much can be done until SQLA2 introspection arrives, and even
# then if where conditions apply to the *right* side of the prefetch, you may have
# the outer select to exclude joins you didin't want in the first place
#
# OTOH it can be seen as a plus: <ash> (notes that this query would make a DBA cry ;)
- return (\@outer_from, $select, $where, $attrs);
+ return (\@outer_from, $outer_select, $where, $outer_attrs);
}
sub _resolve_ident_sources {
sub sth {
my ($self, $sql) = @_;
- $self->dbh_do('_dbh_sth', $sql);
+ $self->dbh_do('_dbh_sth', $sql); # retry over disconnects
}
sub _dbh_columns_info_for {
sub columns_info_for {
my ($self, $table) = @_;
- $self->dbh_do('_dbh_columns_info_for', $table);
+ $self->_dbh_columns_info_for ($self->_get_dbh, $table);
}
=head2 last_insert_id
sub last_insert_id {
my $self = shift;
- $self->dbh_do('_dbh_last_insert_id', @_);
+ $self->_dbh_last_insert_id ($self->_dbh, @_);
+}
+
+=head2 _native_data_type
+
+=over 4
+
+=item Arguments: $type_name
+
+=back
+
+This API is B<EXPERIMENTAL>, will almost definitely change in the future, and
+currently only used by L<::AutoCast|DBIx::Class::Storage::DBI::AutoCast> and
+L<::Sybase|DBIx::Class::Storage::DBI::Sybase>.
+
+The default implementation returns C<undef>, implement in your Storage driver if
+you need this functionality.
+
+Should map types from other databases to the native RDBMS type, for example
+C<VARCHAR2> to C<VARCHAR>.
+
+Types with modifiers should map to the underlying data type. For example,
+C<INTEGER AUTO_INCREMENT> should become C<INTEGER>.
+
+Composite types should map to the container type, for example
+C<ENUM(foo,bar,baz)> becomes C<ENUM>.
+
+=cut
+
+sub _native_data_type {
+ #my ($self, $data_type) = @_;
+ return undef
+}
+
+# Check if placeholders are supported at all
+sub _placeholders_supported {
+ my $self = shift;
+ my $dbh = $self->_get_dbh;
+
+ # some drivers provide a $dbh attribute (e.g. Sybase and $dbh->{syb_dynamic_supported})
+ # but it is inaccurate more often than not
+ eval {
+ local $dbh->{PrintError} = 0;
+ local $dbh->{RaiseError} = 1;
+ $dbh->do('select ?', {}, 1);
+ };
+ return $@ ? 0 : 1;
+}
+
+# Check if placeholders bound to non-string types throw exceptions
+#
+sub _typeless_placeholders_supported {
+ my $self = shift;
+ my $dbh = $self->_get_dbh;
+
+ eval {
+ local $dbh->{PrintError} = 0;
+ local $dbh->{RaiseError} = 1;
+ # this specifically tests a bind that is NOT a string
+ $dbh->do('select 1 where 1 = ?', {}, 1);
+ };
+ return $@ ? 0 : 1;
}
=head2 sqlt_type
=cut
-sub sqlt_type { shift->_get_dbh->{Driver}->{Name} }
+sub sqlt_type {
+ my ($self) = @_;
+
+ if (not $self->_driver_determined) {
+ $self->_determine_driver;
+ goto $self->can ('sqlt_type');
+ }
+
+ $self->_get_dbh->{Driver}->{Name};
+}
=head2 bind_attribute_by_data_type
%{$sqltargs || {}}
};
- $self->throw_exception(q{Can't create a ddl file without SQL::Translator 0.09003: '}
- . $self->_check_sqlt_message . q{'})
- if !$self->_check_sqlt_version;
+ $self->throw_exception("Can't create a ddl file without SQL::Translator: " . $self->_sqlt_version_error)
+ if !$self->_sqlt_version_ok;
my $sqlt = SQL::Translator->new( $sqltargs );
return join('', @rows);
}
- $self->throw_exception(q{Can't deploy without SQL::Translator 0.09003: '}
- . $self->_check_sqlt_message . q{'})
- if !$self->_check_sqlt_version;
+ $self->throw_exception("Can't deploy without either SQL::Translator or a ddl_dir: " . $self->_sqlt_version_error )
+ if !$self->_sqlt_version_ok;
# sources needs to be a parser arg, but for simplicty allow at top level
# coming in
sub datetime_parser {
my $self = shift;
return $self->{datetime_parser} ||= do {
- $self->_populate_dbh unless $self->_dbh;
$self->build_datetime_parser(@_);
};
}
=cut
sub build_datetime_parser {
+ if (not $_[0]->_driver_determined) {
+ $_[0]->_determine_driver;
+ goto $_[0]->can('build_datetime_parser');
+ }
+
my $self = shift;
my $type = $self->datetime_parser_type(@_);
- eval "use ${type}";
- $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ $self->ensure_class_loaded ($type);
return $type;
}
-{
- my $_check_sqlt_version; # private
- my $_check_sqlt_message; # private
- sub _check_sqlt_version {
- return $_check_sqlt_version if defined $_check_sqlt_version;
- eval 'use SQL::Translator "0.09003"';
- $_check_sqlt_message = $@ || '';
- $_check_sqlt_version = !$@;
- }
-
- sub _check_sqlt_message {
- _check_sqlt_version if !defined $_check_sqlt_message;
- $_check_sqlt_message;
- }
-}
=head2 is_replicating
return;
}
+# SQLT version handling
+{
+ my $_sqlt_version_ok; # private
+ my $_sqlt_version_error; # private
+
+ sub _sqlt_version_ok {
+ if (!defined $_sqlt_version_ok) {
+ eval "use SQL::Translator $minimum_sqlt_version";
+ if ($@) {
+ $_sqlt_version_ok = 0;
+ $_sqlt_version_error = $@;
+ }
+ else {
+ $_sqlt_version_ok = 1;
+ }
+ }
+ return $_sqlt_version_ok;
+ }
+
+ sub _sqlt_version_error {
+ shift->_sqlt_version_ok unless defined $_sqlt_version_ok;
+ return $_sqlt_version_error;
+ }
+
+ sub _sqlt_minimum_version { $minimum_sqlt_version };
+}
+
sub DESTROY {
my $self = shift;
+
$self->_verify_pid if $self->_dbh;
# some databases need this to stop spewing warnings
if (my $dbh = $self->_dbh) {
+ local $@;
eval { $dbh->disconnect };
}
--- /dev/null
+package DBIx::Class::Storage::DBI::AutoCast;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+
+__PACKAGE__->mk_group_accessors('simple' => 'auto_cast' );
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::AutoCast
+
+=head1 SYNOPSIS
+
+ $schema->storage->auto_cast(1);
+
+=head1 DESCRIPTION
+
+In some combinations of RDBMS and DBD drivers (e.g. FreeTDS and Sybase)
+statements with values bound to columns or conditions that are not strings will
+throw implicit type conversion errors.
+
+As long as a column L<data_type|DBIx::Class::ResultSource/add_columns> is
+defined, and it resolves to a base RDBMS native type via L</_native_data_type> as
+defined in your Storage driver, the placeholder for this column will be
+converted to:
+
+ CAST(? as $mapped_type)
+
+=cut
+
+sub _prep_for_execute {
+ my $self = shift;
+ my ($op, $extra_bind, $ident, $args) = @_;
+
+ my ($sql, $bind) = $self->next::method (@_);
+
+# If we're using ::NoBindVars, there are no binds by this point so this code
+# gets skippeed.
+ if ($self->auto_cast && @$bind) {
+ my $new_sql;
+ my @sql_part = split /\?/, $sql;
+ my $col_info = $self->_resolve_column_info($ident,[ map $_->[0], @$bind ]);
+
+ foreach my $bound (@$bind) {
+ my $col = $bound->[0];
+ my $type = $self->_native_data_type($col_info->{$col}{data_type});
+
+ foreach my $data (@{$bound}[1..$#$bound]) {
+ $new_sql .= shift(@sql_part) .
+ ($type ? "CAST(? AS $type)" : '?');
+ }
+ }
+ $new_sql .= join '', @sql_part;
+ $sql = $new_sql;
+ }
+
+ return ($sql, $bind);
+}
+
+
+=head1 AUTHOR
+
+See L<DBIx::Class/CONTRIBUTORS>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
+
+1;
use base qw/DBIx::Class::Cursor/;
+__PACKAGE__->mk_group_accessors('simple' =>
+ qw/sth/
+);
+
=head1 NAME
DBIx::Class::Storage::DBI::Cursor - Object representing a query cursor on a
&& $self->{attrs}{rows}
&& $self->{pos} >= $self->{attrs}{rows}
) {
- $self->{sth}->finish if $self->{sth}->{Active};
- delete $self->{sth};
+ $self->sth->finish if $self->sth->{Active};
+ $self->sth(undef);
$self->{done} = 1;
}
return if $self->{done};
- unless ($self->{sth}) {
- $self->{sth} = ($storage->_select(@{$self->{args}}))[1];
+ unless ($self->sth) {
+ $self->sth(($storage->_select(@{$self->{args}}))[1]);
if ($self->{attrs}{software_limit}) {
if (my $offset = $self->{attrs}{offset}) {
- $self->{sth}->fetch for 1 .. $offset;
+ $self->sth->fetch for 1 .. $offset;
}
}
}
- my @row = $self->{sth}->fetchrow_array;
+ my @row = $self->sth->fetchrow_array;
if (@row) {
$self->{pos}++;
} else {
- delete $self->{sth};
+ $self->sth(undef);
$self->{done} = 1;
}
return @row;
my ($storage, $dbh, $self) = @_;
$self->_check_dbh_gen;
- $self->{sth}->finish if $self->{sth}->{Active};
- delete $self->{sth};
+ $self->sth->finish if $self->sth && $self->sth->{Active};
+ $self->sth(undef);
my ($rv, $sth) = $storage->_select(@{$self->{args}});
return @{$sth->fetchall_arrayref};
}
my ($self) = @_;
# No need to care about failures here
- eval { $self->{sth}->finish if $self->{sth} && $self->{sth}->{Active} };
+ eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
$self->_soft_reset;
+ return undef;
}
sub _soft_reset {
my ($self) = @_;
- delete $self->{sth};
+ $self->sth(undef);
delete $self->{done};
$self->{pos} = 0;
- return $self;
}
sub _check_dbh_gen {
# None of the reasons this would die matter if we're in DESTROY anyways
local $@;
- eval { $self->{sth}->finish if $self->{sth} && $self->{sth}->{Active} };
+ eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
}
1;
__PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::MSSQL');
+sub _set_identity_insert {
+ my ($self, $table) = @_;
+
+ my $sql = sprintf (
+ 'SET IDENTITY_INSERT %s ON',
+ $self->sql_maker->_quote ($table),
+ );
+
+ my $dbh = $self->_get_dbh;
+ eval { $dbh->do ($sql) };
+ if ($@) {
+ $self->throw_exception (sprintf "Error executing '%s': %s",
+ $sql,
+ $dbh->errstr,
+ );
+ }
+}
+
+sub _unset_identity_insert {
+ my ($self, $table) = @_;
+
+ my $sql = sprintf (
+ 'SET IDENTITY_INSERT %s OFF',
+ $self->sql_maker->_quote ($table),
+ );
+
+ my $dbh = $self->_get_dbh;
+ $dbh->do ($sql);
+}
+
sub insert_bulk {
my $self = shift;
my ($source, $cols, $data) = @_;
- my $identity_insert = 0;
-
- COLUMNS:
- foreach my $col (@{$cols}) {
- if ($source->column_info($col)->{is_auto_increment}) {
- $identity_insert = 1;
- last COLUMNS;
- }
- }
+ my $is_identity_insert = (List::Util::first
+ { $source->column_info ($_)->{is_auto_increment} }
+ (@{$cols})
+ )
+ ? 1
+ : 0;
- if ($identity_insert) {
- my $table = $source->from;
- $self->_get_dbh->do("SET IDENTITY_INSERT $table ON");
+ if ($is_identity_insert) {
+ $self->_set_identity_insert ($source->name);
}
$self->next::method(@_);
- if ($identity_insert) {
- my $table = $source->from;
- $self->_get_dbh->do("SET IDENTITY_INSERT $table OFF");
+ if ($is_identity_insert) {
+ $self->_unset_identity_insert ($source->name);
}
}
my $self = shift;
my ($source, $to_insert) = @_;
- my $updated_cols = {};
+ my $supplied_col_info = $self->_resolve_column_info($source, [keys %$to_insert] );
my %guid_cols;
my @pk_cols = $source->primary_columns;
my @get_guids_for =
grep { not exists $to_insert->{$_} } (@pk_guids, @auto_guids);
+ my $updated_cols = {};
+
for my $guid_col (@get_guids_for) {
my ($new_guid) = $self->_get_dbh->selectrow_array('SELECT NEWID()');
$updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid;
}
+ my $is_identity_insert = (List::Util::first { $_->{is_auto_increment} } (values %$supplied_col_info) )
+ ? 1
+ : 0;
+
+ if ($is_identity_insert) {
+ $self->_set_identity_insert ($source->name);
+ }
+
$updated_cols = { %$updated_cols, %{ $self->next::method(@_) } };
+ if ($is_identity_insert) {
+ $self->_unset_identity_insert ($source->name);
+ }
+
+
return $updated_cols;
}
if ($op eq 'insert') {
$sql .= ';SELECT SCOPE_IDENTITY()';
- my $col_info = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]);
- if (List::Util::first { $_->{is_auto_increment} } (values %$col_info) ) {
-
- my $table = $ident->from;
- my $identity_insert_on = "SET IDENTITY_INSERT $table ON";
- my $identity_insert_off = "SET IDENTITY_INSERT $table OFF";
- $sql = "$identity_insert_on; $sql; $identity_insert_off";
- }
}
return ($sql, $bind);
=head1 IMPLEMENTATION NOTES
+=head2 IDENTITY information
+
Microsoft SQL Server supports three methods of retrieving the IDENTITY
value for inserted row: IDENT_CURRENT, @@IDENTITY, and SCOPE_IDENTITY().
SCOPE_IDENTITY is used here because it is the safest. However, it must
inserts into another table with an identity will give erroneous results on
recent versions of SQL Server.
+=head2 identity insert
+
+Be aware that we have tried to make things as simple as possible for our users.
+For MSSQL that means that when a user tries to create a row, while supplying an
+explicit value for an autoincrementing column, we will try to issue the
+appropriate database call to make this possible, namely C<SET IDENTITY_INSERT
+$table_name ON>. Unfortunately this operation in MSSQL requires the
+C<db_ddladmin> privilege, which is normally not included in the standard
+write-permissions.
+
=head1 AUTHOR
See L<DBIx::Class/CONTRIBUTORS>.
sub _prep_for_execute {
my $self = shift;
- my ($op, $extra_bind, $ident) = @_;
-
my ($sql, $bind) = $self->next::method(@_);
- # stringify args, quote via $dbh, and manually insert
+ # stringify bind args, quote via $dbh, and manually insert
+ #my ($op, $extra_bind, $ident, $args) = @_;
+ my $ident = $_[2];
my @sql_part = split /\?/, $sql;
my $new_sql;
+ my $col_info = $self->_resolve_column_info($ident, [ map $_->[0], @$bind ]);
+
foreach my $bound (@$bind) {
my $col = shift @$bound;
- my $datatype = 'FIXME!!!';
+
+ my $datatype = $col_info->{$col}{data_type};
+
foreach my $data (@$bound) {
- if(ref $data) {
- $data = ''.$data;
- }
- $data = $self->_dbh->quote($data);
- $new_sql .= shift(@sql_part) . $data;
+ $data = ''.$data if ref $data;
+
+ $data = $self->_prep_interpolated_value($datatype, $data)
+ if $datatype;
+
+ $data = $self->_dbh->quote($data)
+ unless $self->interpolate_unquoted($datatype, $data);
+
+ $new_sql .= shift(@sql_part) . $data;
}
}
$new_sql .= join '', @sql_part;
return ($new_sql, []);
}
-=head1 AUTHORS
+=head2 interpolate_unquoted
+
+This method is called by L</_prep_for_execute> for every column in
+order to determine if its value should be quoted or not. The arguments
+are the current column data type and the actual bind value. The return
+value is interpreted as: true - do not quote, false - do quote. You should
+override this in you Storage::DBI::<database> subclass, if your RDBMS
+does not like quotes around certain datatypes (e.g. Sybase and integer
+columns). The default method always returns false (do quote).
+
+ WARNING!!!
+
+ Always validate that the bind-value is valid for the current datatype.
+ Otherwise you may very well open the door to SQL injection attacks.
-Brandon Black <blblack@gmail.com>
+=cut
+
+sub interpolate_unquoted {
+ #my ($self, $datatype, $value) = @_;
+ return 0;
+}
+
+=head2 _prep_interpolated_value
+
+Given a datatype and the value to be inserted directly into a SQL query, returns
+the necessary string to represent that value (by e.g. adding a '$' sign)
+
+=cut
+
+sub _prep_interpolated_value {
+ #my ($self, $datatype, $value) = @_;
+ return $_[2];
+}
+
+=head1 AUTHORS
-Trym Skaar <trym@tryms.no>
+See L<DBIx::Class/CONTRIBUTORS>
=head1 LICENSE
use base qw/DBIx::Class::Storage::DBI::MSSQL/;
use mro 'c3';
-use Carp::Clan qw/^DBIx::Class/;
use List::Util();
use Scalar::Util ();
my $self = shift;
if (ref($self->_dbi_connect_info->[0]) eq 'CODE') {
- croak 'cannot set DBI attributes on a CODE ref connect_info';
+ $self->throw_exception ('cannot set DBI attributes on a CODE ref connect_info');
}
my $dbi_attrs = $self->_dbi_connect_info->[-1];
$dbh->do('SELECT @@IDENTITY');
};
if ($@) {
- croak <<'EOF';
+ $self->throw_exception (<<'EOF');
Your drivers do not seem to support dynamic cursors (odbc_cursortype => 2),
if you're using FreeTDS, make sure to set tds_version to 8.0 or greater.
$self->_identity_method('@@identity');
}
-sub _rebless {
- no warnings 'uninitialized';
+sub _init {
my $self = shift;
- if (ref($self->_dbi_connect_info->[0]) ne 'CODE' &&
- eval { $self->_dbi_connect_info->[-1]{odbc_cursortype} } == 2) {
+ no warnings qw/uninitialized/;
+
+ if (
+ ref($self->_dbi_connect_info->[0]) ne 'CODE'
+ &&
+ ref ($self->_dbi_connect_info->[-1]) eq 'HASH'
+ &&
+ $self->_dbi_connect_info->[-1]{odbc_cursortype} == 2
+ ) {
$self->_set_dynamic_cursors;
return;
}
my $dsn = $self->_dbi_connect_info->[0];
if (ref($dsn) eq 'CODE') {
- croak 'cannot change the DBI DSN on a CODE ref connect_info';
+ $self->throw_exception('cannot change the DBI DSN on a CODE ref connect_info');
}
if ($dsn !~ /MARS_Connection=/) {
? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins'
: 'DBIx::Class::Storage::DBI::Oracle::Generic';
- # Load and rebless
- eval "require $class";
-
- bless $self, $class unless $@;
+ $self->ensure_class_loaded ($class);
+ bless $self, $class;
}
}
"alter session set nls_timestamp_tz_format='$timestamp_tz_format'");
}
-sub _svp_begin {
- my ($self, $name) = @_;
-
- $self->_get_dbh->do("SAVEPOINT $name");
-}
-
=head2 source_bind_attributes
Handle LOB types in Oracle. Under a certain size (4k?), you can get away
return \%bind_attributes;
}
+sub _svp_begin {
+ my ($self, $name) = @_;
+
+ $self->_get_dbh->do("SAVEPOINT $name");
+}
+
# Oracle automatically releases a savepoint when you start another one with the
# same name.
sub _svp_release { 1 }
$sub->();
}
-sub _dbh_last_insert_id {
- my ($self, $dbh, $seq) = @_;
- $dbh->last_insert_id(undef, undef, undef, undef, {sequence => $seq});
-}
-
sub last_insert_id {
- my ($self,$source,$col) = @_;
- my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
- $self->throw_exception("could not fetch primary key for " . $source->name . ", could not "
- . "get autoinc sequence for $col (check that table and column specifications are correct "
- . "and in the correct case)") unless defined $seq;
- $self->dbh_do('_dbh_last_insert_id', $seq);
-}
+ my ($self,$source,@cols) = @_;
-sub _get_pg_search_path {
- my ($self,$dbh) = @_;
- # cache the search path as ['schema','schema',...] in the storage
- # obj
- $self->{_pg_search_path} ||= do {
- my @search_path;
- my ($sp_string) = $dbh->selectrow_array('SHOW search_path');
- while( $sp_string =~ s/("[^"]+"|[^,]+),?// ) {
- unless( defined $1 and length $1 ) {
- $self->throw_exception("search path sanity check failed: '$1'")
- }
- push @search_path, $1;
- }
- \@search_path
- };
-}
+ my @values;
-sub _dbh_get_autoinc_seq {
- my ($self, $dbh, $schema, $table, @pri) = @_;
-
- # get the list of postgres schemas to search. if we have a schema
- # specified, use that. otherwise, use the search path
- my @search_path;
- if( defined $schema and length $schema ) {
- @search_path = ( $schema );
- } else {
- @search_path = @{ $self->_get_pg_search_path($dbh) };
- }
+ for my $col (@cols) {
+ my $seq = ( $source->column_info($col)->{sequence} ||= $self->dbh_do('_dbh_get_autoinc_seq', $source, $col) )
+ or $self->throw_exception( "could not determine sequence for "
+ . $source->name
+ . ".$col, please consider adding a "
+ . "schema-qualified sequence to its column info"
+ );
- foreach my $search_schema (@search_path) {
- foreach my $col (@pri) {
- my $info = $dbh->column_info(undef,$search_schema,$table,$col)->fetchrow_hashref;
- if($info) {
- # if we get here, we have definitely found the right
- # column.
- if( defined $info->{COLUMN_DEF} and
- $info->{COLUMN_DEF}
- =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i
- ) {
- my $seq = $1;
- return $seq =~ /\./ ? $seq : $info->{TABLE_SCHEM} . "." . $seq;
- } else {
- # we have found the column, but cannot figure out
- # the nextval seq
- return;
- }
- }
- }
+ push @values, $self->_dbh_last_insert_id ($self->_dbh, $seq);
}
- return;
+
+ return @values;
+}
+
+# there seems to be absolutely no reason to have this as a separate method,
+# but leaving intact in case someone is already overriding it
+sub _dbh_last_insert_id {
+ my ($self, $dbh, $seq) = @_;
+ $dbh->last_insert_id(undef, undef, undef, undef, {sequence => $seq});
}
-sub get_autoinc_seq {
- my ($self,$source,$col) = @_;
- my @pri = $source->primary_columns;
+sub _dbh_get_autoinc_seq {
+ my ($self, $dbh, $source, $col) = @_;
my $schema;
my $table = $source->name;
- if (ref $table eq 'SCALAR') {
- $table = $$table;
+ # deref table name if it needs it
+ $table = $$table
+ if ref $table eq 'SCALAR';
+
+ # parse out schema name if present
+ if( $table =~ /^(.+)\.(.+)$/ ) {
+ ( $schema, $table ) = ( $1, $2 );
}
- elsif ($table =~ /^(.+)\.(.+)$/) {
- ($schema, $table) = ($1, $2);
+
+ # use DBD::Pg to fetch the column info if it is recent enough to
+ # work. otherwise, use custom SQL
+ my $seq_expr = $DBD::Pg::VERSION >= 2.015001
+ ? eval{ $dbh->column_info(undef,$schema,$table,$col)->fetchrow_hashref->{COLUMN_DEF} }
+ : $self->_dbh_get_column_default( $dbh, $schema, $table, $col );
+
+ # if no default value is set on the column, or if we can't parse the
+ # default value as a sequence, throw.
+ unless ( defined $seq_expr and $seq_expr =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i ){
+ $seq_expr = '' unless defined $seq_expr;
+ $schema = "$schema." if defined $schema && length $schema;
+ $self->throw_exception( "no sequence found for $schema$table.$col, check table definition, "
+ . "or explicitly set the 'sequence' for this column in the "
+ . $source->source_name
+ . " class"
+ );
}
- $self->dbh_do('_dbh_get_autoinc_seq', $schema, $table, @pri);
+ return $1;
}
+# custom method for fetching column default, since column_info has a
+# bug with older versions of DBD::Pg
+sub _dbh_get_column_default {
+ my ( $self, $dbh, $schema, $table, $col ) = @_;
+
+ # Build and execute a query into the pg_catalog to find the Pg
+ # expression for the default value for this column in this table.
+ # If the table name is schema-qualified, query using that specific
+ # schema name.
+
+ # Otherwise, find the table in the standard Postgres way, using the
+ # search path. This is done with the pg_catalog.pg_table_is_visible
+ # function, which returns true if a given table is 'visible',
+ # meaning the first table of that name to be found in the search
+ # path.
+
+ # I *think* we can be assured that this query will always find the
+ # correct column according to standard Postgres semantics.
+ #
+ # -- rbuels
+
+ my $sqlmaker = $self->sql_maker;
+ local $sqlmaker->{bindtype} = 'normal';
+
+ my ($where, @bind) = $sqlmaker->where ({
+ 'a.attnum' => {'>', 0},
+ 'c.relname' => $table,
+ 'a.attname' => $col,
+ -not_bool => 'a.attisdropped',
+ (defined $schema && length $schema)
+ ? ( 'n.nspname' => $schema )
+ : ( -bool => \'pg_catalog.pg_table_is_visible(c.oid)' )
+ });
+
+ my ($seq_expr) = $dbh->selectrow_array(<<EOS,undef,@bind);
+
+SELECT
+ (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
+ FROM pg_catalog.pg_attrdef d
+ WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef)
+FROM pg_catalog.pg_class c
+ LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+ JOIN pg_catalog.pg_attribute a ON a.attrelid = c.oid
+$where
+
+EOS
+
+ return $seq_expr;
+}
+
+
sub sqlt_type {
return 'PostgreSQL';
}
1;
+__END__
+
=head1 NAME
DBIx::Class::Storage::DBI::Pg - Automatic primary key class for PostgreSQL
=head1 POSTGRESQL SCHEMA SUPPORT
-This supports multiple PostgreSQL schemas, with one caveat: for
-performance reasons, the schema search path is queried the first time it is
-needed and CACHED for subsequent uses.
+This driver supports multiple PostgreSQL schemas, with one caveat: for
+performance reasons, data about the search path, sequence names, and
+so forth is queried as needed and CACHED for subsequent uses.
+
+For this reason, once your schema is instantiated, you should not
+change the PostgreSQL schema search path for that schema's database
+connection. If you do, Bad Things may happen.
-For this reason, you should do any necessary manipulation of the
-PostgreSQL search path BEFORE instantiating your schema object, or as
-part of the on_connect_do option to connect(), for example:
+You should do any necessary manipulation of the search path BEFORE
+instantiating your schema object, or as part of the on_connect_do
+option to connect(), for example:
my $schema = My::Schema->connect
( $dsn,$user,$pass,
my @didnt_load;
for my $module (keys %replication_required) {
- eval "use $module $replication_required{$module}";
- push @didnt_load, "$module $replication_required{$module}"
- if $@;
+ eval "use $module $replication_required{$module}";
+ push @didnt_load, "$module $replication_required{$module}"
+ if $@;
}
croak("@{[ join ', ', @didnt_load ]} are missing and are required for Replication")
use DBIx::Class::Storage::DBI::Replicated::Types qw/BalancerClassNamePart DBICSchema DBICStorageDBI/;
use MooseX::Types::Moose qw/ClassName HashRef Object/;
use Scalar::Util 'reftype';
-use Carp::Clan qw/^DBIx::Class/;
use Hash::Merge 'merge';
use namespace::clean -except => 'meta';
isa=>'DBIx::Class::Storage::DBI::Replicated::Pool',
lazy_build=>1,
handles=>[qw/
- connect_replicants
+ connect_replicants
replicants
has_replicants
/],
select
select_single
columns_info_for
- /],
+ /],
);
=head2 write_handler
is=>'ro',
isa=>Object,
lazy_build=>1,
- handles=>[qw/
+ handles=>[qw/
on_connect_do
- on_disconnect_do
+ on_disconnect_do
connect_info
throw_exception
sql_maker
create_ddl_dir
deployment_statements
datetime_parser
- datetime_parser_type
- build_datetime_parser
+ datetime_parser_type
+ build_datetime_parser
last_insert_id
insert
insert_bulk
sth
deploy
with_deferred_fk_checks
- dbh_do
+ dbh_do
reload_row
- with_deferred_fk_checks
+ with_deferred_fk_checks
_prep_for_execute
- backup
- is_datatype_numeric
- _count_select
- _subq_count_select
- _subq_update_delete
- svp_rollback
- svp_begin
- svp_release
+ backup
+ is_datatype_numeric
+ _count_select
+ _subq_count_select
+ _subq_update_delete
+ svp_rollback
+ svp_begin
+ svp_release
/],
);
);
$self->pool($self->_build_pool)
- if $self->pool;
+ if $self->pool;
}
if (@opts{qw/balancer_type balancer_args/}) {
);
$self->balancer($self->_build_balancer)
- if $self->balancer;
+ if $self->balancer;
}
$self->_master_connect_info_opts(\%opts);
my ($class, $schema, $storage_type_args, @args) = @_;
return {
- schema=>$schema,
- %$storage_type_args,
- @args
+ schema=>$schema,
+ %$storage_type_args,
+ @args
}
}
sub _build_balancer {
my $self = shift @_;
$self->create_balancer(
- pool=>$self->pool,
+ pool=>$self->pool,
master=>$self->master,
%{$self->balancer_args},
);
for my $r (@args) {
$r = [ $r ] unless reftype $r eq 'ARRAY';
- croak "coderef replicant connect_info not supported"
+ $self->throw_exception('coderef replicant connect_info not supported')
if ref $r->[0] && reftype $r->[0] eq 'CODE';
# any connect_info options?
my $i = 0;
$i++ while $i < @$r && (reftype($r->[$i])||'') ne 'HASH';
-# make one if none
+# make one if none
$r->[$i] = {} unless $r->[$i];
# merge if two hashes
my @hashes = @$r[$i .. $#{$r}];
- croak "invalid connect_info options"
+ $self->throw_exception('invalid connect_info options')
if (grep { reftype($_) eq 'HASH' } @hashes) != @hashes;
- croak "too many hashrefs in connect_info"
+ $self->throw_exception('too many hashrefs in connect_info')
if @hashes > 2;
my %opts = %{ merge(reverse @hashes) };
# delete them
splice @$r, $i+1, ($#{$r} - $i), ();
+# make sure master/replicants opts don't clash
+ my %master_opts = %{ $self->_master_connect_info_opts };
+ if (exists $opts{dbh_maker}) {
+ delete @master_opts{qw/dsn user password/};
+ }
+ delete $master_opts{dbh_maker};
+
# merge with master
- %opts = %{ merge(\%opts, $self->_master_connect_info_opts) };
+ %opts = %{ merge(\%opts, \%master_opts) };
# update
$r->[$i] = \%opts;
($result[0]) = ($coderef->(@args));
} else {
$coderef->(@args);
- }
+ }
};
##Reset to the original state
- $self->read_handler($current);
+ $self->read_handler($current);
##Exception testing has to come last, otherwise you might leave the
##read_handler set to master.
if(@_) {
foreach my $source ($self->all_storages) {
$source->debug(@_);
- }
+ }
}
return $self->master->debug;
}
if(@_) {
foreach my $source ($self->all_storages) {
$source->debugobj(@_);
- }
+ }
}
return $self->master->debugobj;
}
if(@_) {
foreach my $source ($self->all_storages) {
$source->debugfh(@_);
- }
+ }
}
return $self->master->debugfh;
}
if(@_) {
foreach my $source ($self->all_storages) {
$source->debugcb(@_);
- }
+ }
}
return $self->master->debugcb;
}
use DBIx::Class::Storage::DBI::Replicated::Replicant;
use List::Util 'sum';
use Scalar::Util 'reftype';
+use DBI ();
use Carp::Clan qw/^DBIx::Class/;
use MooseX::Types::Moose qw/Num Int ClassName HashRef/;
},
);
+has next_unknown_replicant_id => (
+ is => 'rw',
+ metaclass => 'Counter',
+ isa => Int,
+ default => 1,
+ provides => {
+ inc => 'inc_unknown_replicant_id'
+ },
+);
+
=head1 METHODS
This class defines the following methods.
$connect_info = [ $connect_info ]
if reftype $connect_info ne 'ARRAY';
- croak "coderef replicant connect_info not supported"
- if ref $connect_info->[0] && reftype $connect_info->[0] eq 'CODE';
-
- my $replicant = $self->connect_replicant($schema, $connect_info);
+ my $connect_coderef =
+ (reftype($connect_info->[0])||'') eq 'CODE' ? $connect_info->[0]
+ : (reftype($connect_info->[0])||'') eq 'HASH' &&
+ $connect_info->[0]->{dbh_maker};
+
+ my $dsn;
+ my $replicant = do {
+# yes this is evil, but it only usually happens once (for coderefs)
+# this will fail if the coderef does not actually DBI::connect
+ no warnings 'redefine';
+ my $connect = \&DBI::connect;
+ local *DBI::connect = sub {
+ $dsn = $_[1];
+ goto $connect;
+ };
+ $self->connect_replicant($schema, $connect_info);
+ };
+
+ my $key;
+
+ if (!$dsn) {
+ if (!$connect_coderef) {
+ $dsn = $connect_info->[0];
+ $dsn = $dsn->{dsn} if (reftype($dsn)||'') eq 'HASH';
+ }
+ else {
+ # all attempts to get the DSN failed
+ $key = "UNKNOWN_" . $self->next_unknown_replicant_id;
+ $self->inc_unknown_replicant_id;
+ }
+ }
+ if ($dsn) {
+ $replicant->dsn($dsn);
+ ($key) = ($dsn =~ m/^dbi\:.+\:(.+)$/i);
+ }
- my $key = $connect_info->[0];
- $key = $key->{dsn} if ref $key && reftype $key eq 'HASH';
- ($key) = ($key =~ m/^dbi\:.+\:(.+)$/);
+ $replicant->id($key);
+ $self->set_replicant($key => $replicant);
- $self->set_replicant( $key => $replicant);
push @newly_created, $replicant;
}
use Moose::Role;
requires qw/_query_start/;
with 'DBIx::Class::Storage::DBI::Replicated::WithDSN';
-use MooseX::Types::Moose 'Bool';
+use MooseX::Types::Moose qw/Bool Str/;
use namespace::clean -except => 'meta';
default=>1,
);
+has dsn => (is => 'rw', isa => Str);
+has id => (is => 'rw', isa => Str);
+
=head1 METHODS
This class defines the following methods.
package DBIx::Class::Storage::DBI::Replicated::WithDSN;
use Moose::Role;
+use Scalar::Util 'reftype';
requires qw/_query_start/;
use namespace::clean -except => 'meta';
around '_query_start' => sub {
my ($method, $self, $sql, @bind) = @_;
- my $dsn = $self->_dbi_connect_info->[0];
+
+ my $dsn = eval { $self->dsn } || $self->_dbi_connect_info->[0];
+
my($op, $rest) = (($sql=~m/^(\w+)(.+)$/),'NOP', 'NO SQL');
my $storage_type = $self->can('active') ? 'REPLICANT' : 'MASTER';
- $self->$method("$op [DSN_$storage_type=$dsn]$rest", @bind);
+ my $query = do {
+ if ((reftype($dsn)||'') ne 'CODE') {
+ "$op [DSN_$storage_type=$dsn]$rest";
+ }
+ elsif (my $id = eval { $self->id }) {
+ "$op [$storage_type=$id]$rest";
+ }
+ else {
+ "$op [$storage_type]$rest";
+ }
+ };
+
+ $self->$method($query, @bind);
};
=head1 ALSO SEE
my $self = shift;
my $dbh = $self->_get_dbh;
- if (not $self->_placeholders_supported) {
+ if (not $self->_typeless_placeholders_supported) {
bless $self,
'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars';
$self->_rebless;
/;
use mro 'c3';
-sub _rebless {
+sub _init {
my $self = shift;
-
$self->disable_sth_caching(1);
}
$dbh->{mysql_insertid};
}
+# we need to figure out what mysql version we're running
+sub sql_maker {
+ my $self = shift;
+
+ unless ($self->_sql_maker) {
+ my $maker = $self->next::method (@_);
+
+ # mysql 3 does not understand a bare JOIN
+ my $mysql_ver = $self->_get_dbh->get_info(18);
+ $maker->{_default_jointype} = 'INNER' if $mysql_ver =~ /^3/;
+ }
+
+ return $self->_sql_maker;
+}
+
sub sqlt_type {
return 'MySQL';
}
use strict;
use warnings;
-use base qw/Class::Accessor::Grouped/;
+use base qw/DBIx::Class/;
use IO::File;
__PACKAGE__->mk_group_accessors(simple => qw/callback debugfh silence/);
use strict;
use warnings;
-use Carp ();
+use Carp::Clan qw/^DBIx::Class/;
sub new {
my ($class, $storage) = @_;
return if $dismiss;
my $exception = $@;
- Carp::cluck("A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or an error - bad")
- unless $exception;
+
{
local $@;
+
+ carp 'A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back.'
+ unless $exception;
+
eval { $storage->txn_rollback };
my $rollback_exception = $@;
- if($rollback_exception) {
- my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
- $storage->throw_exception(
- "Transaction aborted: ${exception}. "
- . "Rollback failed: ${rollback_exception}"
- ) unless $rollback_exception =~ /$exception_class/;
+ if ($rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) {
+ if ($exception) {
+ $exception = "Transaction aborted: ${exception} "
+ ."Rollback failed: ${rollback_exception}";
+ }
+ else {
+ carp (join ' ',
+ "********************* ROLLBACK FAILED!!! ********************",
+ "\nA rollback operation failed after the guard went out of scope.",
+ 'This is potentially a disastrous situation, check your data for',
+ "consistency: $rollback_exception"
+ );
+ }
}
}
+
+ $@ = $exception;
}
1;
}
pod2usage(1) if ($help);
-$ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} = 1 if ($trace);
+$ENV{DBIC_TRACE} = 1 if ($trace);
die('No op specified') if(!$op);
die('Invalid op') if ($op!~/^insert|update|delete|select$/s);
+use warnings;
+use strict;
+
use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my @MODULES = (
+ 'Test::Pod 1.26',
+);
+
+# Don't run tests for installs
+unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
+ plan( skip_all => "Author tests not required for installation" );
+}
-eval "use Test::Pod 1.14";
-plan skip_all => 'Test::Pod 1.14 required' if $@;
+# Load the testing modules
+foreach my $MODULE ( @MODULES ) {
+ eval "use $MODULE";
+ if ( $@ ) {
+ $ENV{RELEASE_TESTING}
+ ? die( "Failed to load required release-testing module $MODULE" )
+ : plan( skip_all => "$MODULE not available for testing" );
+ }
+}
all_pod_files_ok();
+use warnings;
+use strict;
+
use Test::More;
+use List::Util ();
+use lib qw(t/lib);
+use DBICTest;
-eval "use Pod::Coverage 0.19";
-plan skip_all => 'Pod::Coverage 0.19 required' if $@;
-eval "use Test::Pod::Coverage 1.04";
-plan skip_all => 'Test::Pod::Coverage 1.04 required' if $@;
+my @MODULES = (
+ 'Test::Pod::Coverage 1.08',
+ 'Pod::Coverage 0.20',
+);
-plan skip_all => 'set TEST_POD to enable this test'
- unless ($ENV{TEST_POD} || -e 'MANIFEST.SKIP');
+# Don't run tests for installs
+unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
+ plan( skip_all => "Author tests not required for installation" );
+}
-my @modules = sort { $a cmp $b } (Test::Pod::Coverage::all_modules());
-plan tests => scalar(@modules);
+# Load the testing modules
+foreach my $MODULE ( @MODULES ) {
+ eval "use $MODULE";
+ if ( $@ ) {
+ $ENV{RELEASE_TESTING}
+ ? die( "Failed to load required release-testing module $MODULE" )
+ : plan( skip_all => "$MODULE not available for testing" );
+ }
+}
# Since this is about checking documentation, a little documentation
-# of what this is doing might be in order...
+# of what this is doing might be in order.
# The exceptions structure below is a hash keyed by the module
-# name. The value for each is a hash, which contains one or more
+# name. Any * in a name is treated like a wildcard and will behave
+# as expected. Modules are matched by longest string first, so
+# A::B::C will match even if there is A::B*
+
+# The value for each is a hash, which contains one or more
# (although currently more than one makes no sense) of the following
# things:-
# skip => a true value means this module is not checked
# do not need to be documented.
my $exceptions = {
'DBIx::Class' => {
- ignore => [
- qw/MODIFY_CODE_ATTRIBUTES
- component_base_class
- mk_classdata
- mk_classaccessor/
- ]
+ ignore => [qw/
+ MODIFY_CODE_ATTRIBUTES
+ component_base_class
+ mk_classdata
+ mk_classaccessor
+ /]
},
'DBIx::Class::Row' => {
- ignore => [
- qw( MULTICREATE_DEBUG )
- ],
+ ignore => [qw/
+ MULTICREATE_DEBUG
+ /],
},
'DBIx::Class::ResultSource' => {
ignore => [qw/
- compare_relationship_keys
- pk_depends_on
- resolve_condition
- resolve_join
- resolve_prefetch
+ compare_relationship_keys
+ pk_depends_on
+ resolve_condition
+ resolve_join
+ resolve_prefetch
+ /],
+ },
+ 'DBIx::Class::ResultSourceHandle' => {
+ ignore => [qw/
+ schema
+ source_moniker
/],
},
'DBIx::Class::Storage' => {
- ignore => [
- qw(cursor)
- ]
+ ignore => [qw/
+ schema
+ cursor
+ /]
},
'DBIx::Class::Schema' => {
- ignore => [
- qw(setup_connection_class)
- ]
- },
- 'DBIx::Class::Storage::DBI::Sybase' => {
- ignore => [
- qw/should_quote_data_type/,
- ]
- },
- 'DBIx::Class::CDBICompat::AccessorMapping' => { skip => 1 },
- 'DBIx::Class::CDBICompat::AbstractSearch' => {
- ignore => [qw(search_where)]
- },
- 'DBIx::Class::CDBICompat::AttributeAPI' => { skip => 1 },
- 'DBIx::Class::CDBICompat::AutoUpdate' => { skip => 1 },
- 'DBIx::Class::CDBICompat::ColumnsAsHash' => {
- ignore => [qw(inflate_result new update)]
+ ignore => [qw/
+ setup_connection_class
+ /]
},
- 'DBIx::Class::CDBICompat::ColumnCase' => { skip => 1 },
- 'DBIx::Class::CDBICompat::ColumnGroups' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Constraints' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Constructor' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Copy' => {
- ignore => [qw(copy)]
+
+ 'DBIx::Class::Schema::Versioned' => {
+ ignore => [ qw/
+ connection
+ /]
},
- 'DBIx::Class::CDBICompat::DestroyWarning' => { skip => 1 },
- 'DBIx::Class::CDBICompat::GetSet' => { skip => 1 },
- 'DBIx::Class::CDBICompat::HasA' => { skip => 1 },
- 'DBIx::Class::CDBICompat::HasMany' => { skip => 1 },
- 'DBIx::Class::CDBICompat::ImaDBI' => { skip => 1 },
- 'DBIx::Class::CDBICompat::LazyLoading' => { skip => 1 },
- 'DBIx::Class::CDBICompat::LiveObjectIndex' => { skip => 1 },
- 'DBIx::Class::CDBICompat::MightHave' => { skip => 1 },
- 'DBIx::Class::CDBICompat::NoObjectIndex' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Pager' => { skip => 1 },
- 'DBIx::Class::CDBICompat::ReadOnly' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Relationship' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Relationships' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Retrieve' => { skip => 1 },
- 'DBIx::Class::CDBICompat::SQLTransformer' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Stringify' => { skip => 1 },
- 'DBIx::Class::CDBICompat::TempColumns' => { skip => 1 },
- 'DBIx::Class::CDBICompat::Triggers' => { skip => 1 },
- 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 },
- 'DBIx::Class::Componentised' => { skip => 1 },
- 'DBIx::Class::Relationship::Accessor' => { skip => 1 },
- 'DBIx::Class::Relationship::BelongsTo' => { skip => 1 },
- 'DBIx::Class::Relationship::CascadeActions' => { skip => 1 },
- 'DBIx::Class::Relationship::HasMany' => { skip => 1 },
- 'DBIx::Class::Relationship::HasOne' => { skip => 1 },
- 'DBIx::Class::Relationship::Helpers' => { skip => 1 },
- 'DBIx::Class::Relationship::ManyToMany' => { skip => 1 },
- 'DBIx::Class::Relationship::ProxyMethods' => { skip => 1 },
- 'DBIx::Class::ResultSetProxy' => { skip => 1 },
- 'DBIx::Class::ResultSetManager' => { skip => 1 },
- 'DBIx::Class::ResultSourceProxy' => { skip => 1 },
- 'DBIx::Class::Storage::DBI' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::Replicated::Types' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::DB2' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::MSSQL' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::Sybase::MSSQL' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::ODBC400' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::ODBC::DB2_400_SQL' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::Oracle' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::Pg' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::SQLite' => { skip => 1 },
- 'DBIx::Class::Storage::DBI::mysql' => { skip => 1 },
- 'DBIx::Class::SQLAHacks' => { skip => 1 },
- 'DBIx::Class::SQLAHacks::MySQL' => { skip => 1 },
- 'DBIx::Class::SQLAHacks::MSSQL' => { skip => 1 },
- 'SQL::Translator::Parser::DBIx::Class' => { skip => 1 },
- 'SQL::Translator::Producer::DBIx::Class::File' => { skip => 1 },
-# skipped because the synopsis covers it clearly
+ 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 },
+ 'DBIx::Class::Componentised' => { skip => 1 },
+ 'DBIx::Class::Relationship::*' => { skip => 1 },
+ 'DBIx::Class::ResultSetProxy' => { skip => 1 },
+ 'DBIx::Class::ResultSourceProxy' => { skip => 1 },
+ 'DBIx::Class::Storage::Statistics' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::Replicated::Types' => { skip => 1 },
- 'DBIx::Class::InflateColumn::File' => { skip => 1 },
+# test some specific components whose parents are exempt below
+ 'DBIx::Class::Storage::DBI::Replicated*' => {},
+ 'DBIx::Class::Relationship::Base' => {},
-# skip connection since it's just an override
+# internals
+ 'DBIx::Class::SQLAHacks*' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI*' => { skip => 1 },
+ 'SQL::Translator::*' => { skip => 1 },
- 'DBIx::Class::Schema::Versioned' => { ignore => [ qw(connection) ] },
+# deprecated / backcompat stuff
+ 'DBIx::Class::CDBICompat*' => { skip => 1 },
+ 'DBIx::Class::ResultSetManager' => { skip => 1 },
+ 'DBIx::Class::DB' => { skip => 1 },
-# don't bother since it's heavily deprecated
- 'DBIx::Class::ResultSetManager' => { skip => 1 },
+# skipped because the synopsis covers it clearly
+ 'DBIx::Class::InflateColumn::File' => { skip => 1 },
};
+my $ex_lookup = {};
+for my $string (keys %$exceptions) {
+ my $ex = $exceptions->{$string};
+ $string =~ s/\*/'.*?'/ge;
+ my $re = qr/^$string$/;
+ $ex_lookup->{$re} = $ex;
+}
+
+my @modules = sort { $a cmp $b } (Test::Pod::Coverage::all_modules());
+
foreach my $module (@modules) {
- SKIP:
- {
- skip "$module - No real methods", 1 if ($exceptions->{$module}{skip});
-
- # build parms up from ignore list
- my $parms = {};
- $parms->{trustme} =
- [ map { qr/^$_$/ } @{ $exceptions->{$module}{ignore} } ]
- if exists($exceptions->{$module}{ignore});
-
- # run the test with the potentially modified parm set
- pod_coverage_ok($module, $parms, "$module POD coverage");
- }
+ SKIP: {
+
+ my ($match) = List::Util::first
+ { $module =~ $_ }
+ (sort { length $b <=> length $a || $b cmp $a } (keys %$ex_lookup) )
+ ;
+
+ my $ex = $ex_lookup->{$match} if $match;
+
+ skip ("$module exempt", 1) if ($ex->{skip});
+
+ # build parms up from ignore list
+ my $parms = {};
+ $parms->{trustme} =
+ [ map { qr/^$_$/ } @{ $ex->{ignore} } ]
+ if exists($ex->{ignore});
+
+ # run the test with the potentially modified parm set
+ pod_coverage_ok($module, $parms, "$module POD coverage");
+ }
}
+
+done_testing;
use lib qw(t/lib);
use DBICTest::ForeignComponent;
-plan tests => 6;
-
# Tests if foreign component was loaded by calling foreign's method
ok( DBICTest::ForeignComponent->foreign_test_method, 'foreign component' );
'inject_base filters duplicates'
);
-# Test for a warning with incorrect order in load_components
-my @warnings = ();
-{
- package A::Test;
- our @ISA = 'DBIx::Class';
- {
- local $SIG{__WARN__} = sub { push @warnings, shift};
- __PACKAGE__->load_components(qw(Core UTF8Columns));
- }
-}
-like( $warnings[0], qr/Core loaded before UTF8Columns/,
- 'warning issued for incorrect order in load_components()' );
-is( scalar @warnings, 1,
- 'only one warning issued for incorrect load_components call' );
-
-# Test that no warning is issued for the correct order in load_components
-{
- @warnings = ();
- package B::Test;
- our @ISA = 'DBIx::Class';
- {
- local $SIG{__WARN__} = sub { push @warnings, shift };
- __PACKAGE__->load_components(qw(UTF8Columns Core));
- }
-}
-is( scalar @warnings, 0,
- 'warning not issued for correct order in load_components()' );
-
use_ok('DBIx::Class::AccessorGroup');
+use_ok('DBIx::Class::Componentised');
+
+done_testing;
use Test::More;
use lib qw(t/lib);
-use Data::Dumper;
plan tests => 4;
my $exp_warn = qr/The many-to-many relationship 'bars' is trying to create/;
$Data::Dumper::Sortkeys = 1;
use lib qw(t/lib);
-
-BEGIN {
- eval "use DBD::SQLite";
- plan $ENV{DATA_DUMPER_TEST}
- ? ( tests => 2 )
- : ( skip_all => 'Set $ENV{DATA_DUMPER_TEST} to run this test' );
-}
-
-
use_ok('DBICTest');
my $schema = DBICTest->init_schema();
cmp_ok( $rs->count(), '==', 1, "Single record in after death with dumper");
-1;
+done_testing;
use warnings;
use Test::More;
-use Data::Dumper;
use lib qw(t/lib);
use DBICTest;
my $schema = DBICTest->init_schema();
use Test::More;
use Test::Exception;
+use Test::Warn;
use lib qw(t/lib);
use DBICTest;
use DBIC::SqlMakerTest;
my %not_dirty = $art->get_dirty_columns();
is(scalar(keys(%not_dirty)), 0, 'Nothing is dirty');
-eval {
+throws_ok ( sub {
my $ret = $art->make_column_dirty('name2');
-};
-ok(defined($@), 'Failed to make non-existent column dirty');
+}, qr/No such column 'name2'/, 'Failed to make non-existent column dirty');
+
$art->make_column_dirty('name');
my %fake_dirty = $art->get_dirty_columns();
is(scalar(keys(%fake_dirty)), 1, '1 fake dirty column');
is($new_again->ID, 'DBICTest::Artist|artist|artistid=4', 'unique object id generated correctly');
+# test that store_column is called once for create() for non sequence columns
+{
+ ok(my $artist = $schema->resultset('Artist')->create({name => 'store_column test'}));
+ is($artist->name, 'X store_column test'); # used to be 'X X store...'
+
+ # call store_column even though the column doesn't seem to be dirty
+ ok($artist->update({name => 'X store_column test'}));
+ is($artist->name, 'X X store_column test');
+ $artist->delete;
+}
+
# Test backwards compatibility
{
my $warnings = '';
isa_ok($tdata{'last_updated_on'}, 'DateTime', 'inflated accessored column');
}
-eval { $schema->class("Track")->load_components('DoesNotExist'); };
-
-ok $@, $@;
+throws_ok (sub {
+ $schema->class("Track")->load_components('DoesNotExist');
+}, qr!Can't locate DBIx/Class/DoesNotExist.pm!, 'exception on nonexisting component');
is($schema->class("Artist")->field_name_for->{name}, 'artist name', 'mk_classdata usage ok');
is ($collapsed_or_rs->all, 4, 'Collapsed joined search with OR returned correct number of rows');
is ($collapsed_or_rs->count, 4, 'Collapsed search count with OR ok');
+# make sure sure distinct on a grouped rs is warned about
+my $cd_rs = $schema->resultset ('CD')
+ ->search ({}, { distinct => 1, group_by => 'title' });
+warnings_exist (sub {
+ $cd_rs->next;
+}, qr/Useless use of distinct/, 'UUoD warning');
+
{
my $tcount = $schema->resultset('Track')->search(
{},
use lib qw(t/lib);
use DBICTest;
use DBI::Const::GetInfoType;
+use DBIC::SqlMakerTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
-plan tests => 19;
-
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
my $dbh = $schema->storage->dbh;
#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', '');
+# make sure sqlt_type overrides work (::Storage::DBI::mysql does this)
+{
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+ ok (!$schema->storage->_dbh, 'definitely not connected');
+ is ($schema->storage->sqlt_type, 'MySQL', 'sqlt_type correct pre-connection');
+}
+
# This is in Core now, but it's here just to test that it doesn't break
$schema->class('Artist')->load_components('PK::Auto');
my $type_info = $schema->storage->columns_info_for('artist');
is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
+
+
}
my $cd = $schema->resultset ('CD')->create ({});
my $producer = $schema->resultset ('Producer')->create ({});
lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die';
+{
+ my $artist = $schema->resultset('Artist')->next;
+ my $cd = $schema->resultset('CD')->next;
+ $cd->set_from_related ('artist', $artist);
+ $cd->update;
+
+ my $rs = $schema->resultset('CD')->search ({}, { prefetch => 'artist' });
+
+ lives_ok sub {
+ my $cd = $rs->next;
+ is ($cd->artist->name, $artist->name, 'Prefetched artist');
+ }, 'join does not throw (mysql 3 test)';
+
+ # induce a jointype override, make sure it works even if we don't have mysql3
+ local $schema->storage->sql_maker->{_default_jointype} = 'inner';
+ is_same_sql_bind (
+ $rs->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield
+ FROM cd me
+ INNER JOIN artist artist ON artist.artistid = me.artist
+ )',
+ [],
+ 'overriden default join type works',
+ );
+}
## Can we properly deal with the null search problem?
##
is $artist => undef
=> 'Nothing Found!';
}
+
+done_testing;
use lib qw(t/lib);
use DBICTest;
-{
- package DBICTest::Schema::Casecheck;
- use strict;
- use warnings;
- use base 'DBIx::Class';
-
- __PACKAGE__->load_components(qw/Core/);
- __PACKAGE__->table('testschema.casecheck');
- __PACKAGE__->add_columns(qw/id name NAME uc_name storecolumn/);
- __PACKAGE__->column_info_from_storage(1);
- __PACKAGE__->set_primary_key('id');
-
- sub store_column {
- my ($self, $name, $value) = @_;
- $value = '#'.$value if($name eq "storecolumn");
- $self->maybe::next::method($name, $value);
- }
-}
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-{
- package DBICTest::Schema::ArrayTest;
+plan skip_all => <<EOM unless $dsn && $user;
+Set \$ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test
+( NOTE: This test drops and creates tables called 'artist', 'casecheck',
+ 'array_test' and 'sequence_test' as well as following sequences:
+ 'pkid1_seq', 'pkid2_seq' and 'nonpkid_seq''. as well as following
+ schemas: 'dbic_t_schema', 'dbic_t_schema_2', 'dbic_t_schema_3',
+ 'dbic_t_schema_4', and 'dbic_t_schema_5'
+)
+EOM
- use strict;
- use warnings;
- use base 'DBIx::Class';
+### load any test classes that are defined further down in the file via BEGIN blocks
- __PACKAGE__->load_components(qw/Core/);
- __PACKAGE__->table('testschema.array_test');
- __PACKAGE__->add_columns(qw/id arrayfield/);
- __PACKAGE__->column_info_from_storage(1);
- __PACKAGE__->set_primary_key('id');
+our @test_classes; #< array that will be pushed into by test classes defined in this file
+DBICTest::Schema->load_classes( map {s/.+:://;$_} @test_classes ) if @test_classes;
-}
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
+### pre-connect tests (keep each test separate as to make sure rebless() runs)
+{
+ my $s = DBICTest::Schema->connect($dsn, $user, $pass);
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test '.
- '(note: This test drops and creates tables called \'artist\', \'casecheck\', \'array_test\' and \'sequence_test\''.
- ' as well as following sequences: \'pkid1_seq\', \'pkid2_seq\' and \'nonpkid_seq\''.
- ' as well as following schemas: \'testschema\',\'anothertestschema\'!)'
- unless ($dsn && $user);
+ ok (!$s->storage->_dbh, 'definitely not connected');
-DBICTest::Schema->load_classes( 'Casecheck', 'ArrayTest' );
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass,);
+ # Check that datetime_parser returns correctly before we explicitly connect.
+ SKIP: {
+ eval { require DateTime::Format::Pg };
+ skip "DateTime::Format::Pg required", 2 if $@;
-# Check that datetime_parser returns correctly before we explicitly connect.
-SKIP: {
- eval { require DateTime::Format::Pg };
- skip "DateTime::Format::Pg required", 2 if $@;
+ my $store = ref $s->storage;
+ is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
- my $store = ref $schema->storage;
- is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
+ my $parser = $s->storage->datetime_parser;
+ is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+ }
- my $parser = $schema->storage->datetime_parser;
- is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+ ok (!$s->storage->_dbh, 'still not connected');
}
-
-my $dbh = $schema->storage->dbh;
-$schema->source("Artist")->name("testschema.artist");
-$schema->source("SequenceTest")->name("testschema.sequence_test");
{
- local $SIG{__WARN__} = sub {};
- _cleanup ($dbh);
-
- my $artist_table_def = <<EOS;
-(
- artistid serial PRIMARY KEY
- , name VARCHAR(100)
- , rank INTEGER NOT NULL DEFAULT '13'
- , charfield CHAR(10)
- , arrayfield INTEGER[]
-)
-EOS
- $dbh->do("CREATE SCHEMA testschema;");
- $dbh->do("CREATE TABLE testschema.artist $artist_table_def;");
- $dbh->do("CREATE TABLE testschema.sequence_test (pkid1 integer, pkid2 integer, nonpkid integer, name VARCHAR(100), CONSTRAINT pk PRIMARY KEY(pkid1, pkid2));");
- $dbh->do("CREATE SEQUENCE pkid1_seq START 1 MAXVALUE 999999 MINVALUE 0");
- $dbh->do("CREATE SEQUENCE pkid2_seq START 10 MAXVALUE 999999 MINVALUE 0");
- $dbh->do("CREATE SEQUENCE nonpkid_seq START 20 MAXVALUE 999999 MINVALUE 0");
- ok ( $dbh->do('CREATE TABLE testschema.casecheck (id serial PRIMARY KEY, "name" VARCHAR(1), "NAME" VARCHAR(2), "UC_NAME" VARCHAR(3), "storecolumn" VARCHAR(10));'), 'Creation of casecheck table');
- ok ( $dbh->do('CREATE TABLE testschema.array_test (id serial PRIMARY KEY, arrayfield INTEGER[]);'), 'Creation of array_test table');
- $dbh->do("CREATE SCHEMA anothertestschema;");
- $dbh->do("CREATE TABLE anothertestschema.artist $artist_table_def;");
- $dbh->do("CREATE SCHEMA yetanothertestschema;");
- $dbh->do("CREATE TABLE yetanothertestschema.artist $artist_table_def;");
- $dbh->do('set search_path=testschema,public');
+ my $s = DBICTest::Schema->connect($dsn, $user, $pass);
+ # make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
+ ok (!$s->storage->_dbh, 'definitely not connected');
+ is ($s->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
+ ok (!$s->storage->_dbh, 'still not connected');
}
-# store_column is called once for create() for non sequence columns
-
-ok(my $storecolumn = $schema->resultset('Casecheck')->create({'storecolumn' => 'a'}));
+### connect, create postgres-specific test schema
-is($storecolumn->storecolumn, '#a'); # was '##a'
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+drop_test_schema($schema);
+create_test_schema($schema);
-# This is in Core now, but it's here just to test that it doesn't break
-$schema->class('Artist')->load_components('PK::Auto');
+### begin main tests
-cmp_ok( $schema->resultset('Artist')->count, '==', 0, 'this should start with an empty artist table');
-{ # test that auto-pk also works with the defined search path by
- # un-schema-qualifying the table name
- my $artist_name_save = $schema->source("Artist")->name;
- $schema->source("Artist")->name("artist");
+# run a BIG bunch of tests for last-insert-id / Auto-PK / sequence
+# discovery
+run_apk_tests($schema); #< older set of auto-pk tests
+run_extended_apk_tests($schema); #< new extended set of auto-pk tests
- my $unq_new;
- lives_ok {
- $unq_new = $schema->resultset('Artist')->create({ name => 'baz' });
- } 'insert into unqualified, shadowed table succeeds';
-
- is($unq_new && $unq_new->artistid, 1, "and got correct artistid");
-
- #test with anothertestschema
- $schema->source('Artist')->name('anothertestschema.artist');
- my $another_new = $schema->resultset('Artist')->create({ name => 'ribasushi'});
- is( $another_new->artistid,1, 'got correct artistid for yetanotherschema');
-
- #test with yetanothertestschema
- $schema->source('Artist')->name('yetanothertestschema.artist');
- my $yetanother_new = $schema->resultset('Artist')->create({ name => 'ribasushi'});
- is( $yetanother_new->artistid,1, 'got correct artistid for yetanotherschema');
- is( $yetanother_new->artistid,1, 'got correct artistid for yetanotherschema');
-
- $schema->source("Artist")->name($artist_name_save);
-}
-
-my $new = $schema->resultset('Artist')->create({ name => 'foo' });
-is($new->artistid, 2, "Auto-PK worked");
-$new = $schema->resultset('Artist')->create({ name => 'bar' });
-is($new->artistid, 3, "Auto-PK worked");
+### type_info tests
my $test_type_info = {
'artistid' => {
},
};
-
-my $type_info = $schema->storage->columns_info_for('testschema.artist');
+my $type_info = $schema->storage->columns_info_for('dbic_t_schema.artist');
my $artistid_defval = delete $type_info->{artistid}->{default_value};
like($artistid_defval,
qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
is_deeply($type_info, $test_type_info,
'columns_info_for - column data types');
+
+
+
+####### Array tests
+
+BEGIN {
+ package DBICTest::Schema::ArrayTest;
+ push @main::test_classes, __PACKAGE__;
+
+ use strict;
+ use warnings;
+ use base 'DBIx::Class';
+
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('dbic_t_schema.array_test');
+ __PACKAGE__->add_columns(qw/id arrayfield/);
+ __PACKAGE__->column_info_from_storage(1);
+ __PACKAGE__->set_primary_key('id');
+
+}
SKIP: {
skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002;
}
+
+########## Case check
+
+BEGIN {
+ package DBICTest::Schema::Casecheck;
+ push @main::test_classes, __PACKAGE__;
+
+ use strict;
+ use warnings;
+ use base 'DBIx::Class';
+
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('dbic_t_schema.casecheck');
+ __PACKAGE__->add_columns(qw/id name NAME uc_name/);
+ __PACKAGE__->column_info_from_storage(1);
+ __PACKAGE__->set_primary_key('id');
+}
+
my $name_info = $schema->source('Casecheck')->column_info( 'name' );
is( $name_info->{size}, 1, "Case sensitive matching info for 'name'" );
my $uc_name_info = $schema->source('Casecheck')->column_info( 'uc_name' );
is( $uc_name_info->{size}, 3, "Case insensitive matching info for 'uc_name'" );
-# Test SELECT ... FOR UPDATE
-my $HaveSysSigAction = eval "require Sys::SigAction" && !$@;
-if ($HaveSysSigAction) {
- Sys::SigAction->import( 'set_sig_handler' );
-}
-SKIP: {
- skip "Sys::SigAction is not available", 3 unless $HaveSysSigAction;
- # create a new schema
- my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
- $schema2->source("Artist")->name("testschema.artist");
- $schema->txn_do( sub {
- my $artist = $schema->resultset('Artist')->search(
- {
- artistid => 1
- },
- {
- for => 'update'
- }
- )->first;
- is($artist->artistid, 1, "select for update returns artistid = 1");
- my $artist_from_schema2;
- my $error_ok = 0;
- eval {
- my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
- alarm(2);
- $artist_from_schema2 = $schema2->resultset('Artist')->find(1);
- $artist_from_schema2->name('fooey');
- $artist_from_schema2->update;
- alarm(0);
- };
- if (my $e = $@) {
- $error_ok = $e =~ /DBICTestTimeout/;
- }
-
- # Make sure that an error was raised, and that the update failed
- ok($error_ok, "update from second schema times out");
- ok($artist_from_schema2->is_column_changed('name'), "'name' column is still dirty from second schema");
- });
-}
+## Test SELECT ... FOR UPDATE
SKIP: {
- skip "Sys::SigAction is not available", 3 unless $HaveSysSigAction;
- # create a new schema
- my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
- $schema2->source("Artist")->name("testschema.artist");
+ if(eval "require Sys::SigAction" && !$@) {
+ Sys::SigAction->import( 'set_sig_handler' );
+ }
+ else {
+ skip "Sys::SigAction is not available", 6;
+ }
+
+ my ($timed_out, $artist2);
- $schema->txn_do( sub {
+ for my $t (
+ {
+ # Make sure that an error was raised, and that the update failed
+ update_lock => 1,
+ test_sub => sub {
+ ok($timed_out, "update from second schema times out");
+ ok($artist2->is_column_changed('name'), "'name' column is still dirty from second schema");
+ },
+ },
+ {
+ # Make sure that an error was NOT raised, and that the update succeeded
+ update_lock => 0,
+ test_sub => sub {
+ ok(! $timed_out, "update from second schema DOES NOT timeout");
+ ok(! $artist2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
+ },
+ },
+ ) {
+ # create a new schema
+ my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
+ $schema2->source("Artist")->name("dbic_t_schema.artist");
+
+ $schema->txn_do( sub {
my $artist = $schema->resultset('Artist')->search(
{
artistid => 1
},
+ $t->{update_lock} ? { for => 'update' } : {}
)->first;
- is($artist->artistid, 1, "select for update returns artistid = 1");
+ is($artist->artistid, 1, "select returns artistid = 1");
- my $artist_from_schema2;
- my $error_ok = 0;
+ $timed_out = 0;
eval {
my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
alarm(2);
- $artist_from_schema2 = $schema2->resultset('Artist')->find(1);
- $artist_from_schema2->name('fooey');
- $artist_from_schema2->update;
+ $artist2 = $schema2->resultset('Artist')->find(1);
+ $artist2->name('fooey');
+ $artist2->update;
alarm(0);
};
- if (my $e = $@) {
- $error_ok = $e =~ /DBICTestTimeout/;
- }
+ $timed_out = $@ =~ /DBICTestTimeout/;
+ });
- # Make sure that an error was NOT raised, and that the update succeeded
- ok(! $error_ok, "update from second schema DOES NOT timeout");
- ok(! $artist_from_schema2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
- });
+ $t->{test_sub}->();
+ }
}
+
+######## other older Auto-pk tests
+
+$schema->source("SequenceTest")->name("dbic_t_schema.sequence_test");
for (1..5) {
my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
is($st->pkid1, $_, "Oracle Auto-PK without trigger: First primary key");
my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
is($st->pkid1, 55, "Oracle Auto-PK without trigger: First primary key set manually");
-sub _cleanup {
- my $dbh = shift or return;
-
- for my $stat (
- 'DROP TABLE testschema.artist',
- 'DROP TABLE testschema.casecheck',
- 'DROP TABLE testschema.sequence_test',
- 'DROP TABLE testschema.array_test',
- 'DROP SEQUENCE pkid1_seq',
- 'DROP SEQUENCE pkid2_seq',
- 'DROP SEQUENCE nonpkid_seq',
- 'DROP SCHEMA testschema',
- 'DROP TABLE anothertestschema.artist',
- 'DROP SCHEMA anothertestschema',
- 'DROP TABLE yetanothertestschema.artist',
- 'DROP SCHEMA yetanothertestschema',
- ) {
- eval { $dbh->do ($stat) };
- }
+done_testing;
+
+exit;
+
+END {
+ return unless $schema;
+ drop_test_schema($schema);
+ eapk_drop_all( $schema)
+};
+
+
+######### SUBROUTINES
+
+sub create_test_schema {
+ my $schema = shift;
+ $schema->storage->dbh_do(sub {
+ my (undef,$dbh) = @_;
+
+ local $dbh->{Warn} = 0;
+
+ my $std_artist_table = <<EOS;
+(
+ artistid serial PRIMARY KEY
+ , name VARCHAR(100)
+ , rank INTEGER NOT NULL DEFAULT '13'
+ , charfield CHAR(10)
+ , arrayfield INTEGER[]
+)
+EOS
+
+ $dbh->do("CREATE SCHEMA dbic_t_schema");
+ $dbh->do("CREATE TABLE dbic_t_schema.artist $std_artist_table");
+ $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.sequence_test (
+ pkid1 integer
+ , pkid2 integer
+ , nonpkid integer
+ , name VARCHAR(100)
+ , CONSTRAINT pk PRIMARY KEY(pkid1, pkid2)
+)
+EOS
+ $dbh->do("CREATE SEQUENCE pkid1_seq START 1 MAXVALUE 999999 MINVALUE 0");
+ $dbh->do("CREATE SEQUENCE pkid2_seq START 10 MAXVALUE 999999 MINVALUE 0");
+ $dbh->do("CREATE SEQUENCE nonpkid_seq START 20 MAXVALUE 999999 MINVALUE 0");
+ $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.casecheck (
+ id serial PRIMARY KEY
+ , "name" VARCHAR(1)
+ , "NAME" VARCHAR(2)
+ , "UC_NAME" VARCHAR(3)
+)
+EOS
+ $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.array_test (
+ id serial PRIMARY KEY
+ , arrayfield INTEGER[]
+)
+EOS
+ $dbh->do("CREATE SCHEMA dbic_t_schema_2");
+ $dbh->do("CREATE TABLE dbic_t_schema_2.artist $std_artist_table");
+ $dbh->do("CREATE SCHEMA dbic_t_schema_3");
+ $dbh->do("CREATE TABLE dbic_t_schema_3.artist $std_artist_table");
+ $dbh->do('set search_path=dbic_t_schema,public');
+ $dbh->do("CREATE SCHEMA dbic_t_schema_4");
+ $dbh->do("CREATE SCHEMA dbic_t_schema_5");
+ $dbh->do(<<EOS);
+ CREATE TABLE dbic_t_schema_4.artist
+ (
+ artistid integer not null default nextval('artist_artistid_seq'::regclass) PRIMARY KEY
+ , name VARCHAR(100)
+ , rank INTEGER NOT NULL DEFAULT '13'
+ , charfield CHAR(10)
+ , arrayfield INTEGER[]
+ );
+EOS
+ $dbh->do('set search_path=public,dbic_t_schema,dbic_t_schema_3');
+ $dbh->do('create sequence public.artist_artistid_seq'); #< in the public schema
+ $dbh->do(<<EOS);
+ CREATE TABLE dbic_t_schema_5.artist
+ (
+ artistid integer not null default nextval('public.artist_artistid_seq'::regclass) PRIMARY KEY
+ , name VARCHAR(100)
+ , rank INTEGER NOT NULL DEFAULT '13'
+ , charfield CHAR(10)
+ , arrayfield INTEGER[]
+ );
+EOS
+ $dbh->do('set search_path=dbic_t_schema,public');
+ });
}
-done_testing;
-END { _cleanup($dbh) }
+
+sub drop_test_schema {
+ my ( $schema, $warn_exceptions ) = @_;
+
+ $schema->storage->dbh_do(sub {
+ my (undef,$dbh) = @_;
+
+ local $dbh->{Warn} = 0;
+
+ for my $stat (
+ 'DROP SCHEMA dbic_t_schema_5 CASCADE',
+ 'DROP SEQUENCE public.artist_artistid_seq',
+ 'DROP SCHEMA dbic_t_schema_4 CASCADE',
+ 'DROP SCHEMA dbic_t_schema CASCADE',
+ 'DROP SEQUENCE pkid1_seq',
+ 'DROP SEQUENCE pkid2_seq',
+ 'DROP SEQUENCE nonpkid_seq',
+ 'DROP SCHEMA dbic_t_schema_2 CASCADE',
+ 'DROP SCHEMA dbic_t_schema_3 CASCADE',
+ ) {
+ eval { $dbh->do ($stat) };
+ diag $@ if $@ && $warn_exceptions;
+ }
+ });
+}
+
+
+### auto-pk / last_insert_id / sequence discovery
+sub run_apk_tests {
+ my $schema = shift;
+
+ # This is in Core now, but it's here just to test that it doesn't break
+ $schema->class('Artist')->load_components('PK::Auto');
+ cmp_ok( $schema->resultset('Artist')->count, '==', 0, 'this should start with an empty artist table');
+
+ # test that auto-pk also works with the defined search path by
+ # un-schema-qualifying the table name
+ apk_t_set($schema,'artist');
+
+ my $unq_new;
+ lives_ok {
+ $unq_new = $schema->resultset('Artist')->create({ name => 'baz' });
+ } 'insert into unqualified, shadowed table succeeds';
+
+ is($unq_new && $unq_new->artistid, 1, "and got correct artistid");
+
+ my @test_schemas = ( [qw| dbic_t_schema_2 1 |],
+ [qw| dbic_t_schema_3 1 |],
+ [qw| dbic_t_schema_4 2 |],
+ [qw| dbic_t_schema_5 1 |],
+ );
+ foreach my $t ( @test_schemas ) {
+ my ($sch_name, $start_num) = @$t;
+ #test with dbic_t_schema_2
+ apk_t_set($schema,"$sch_name.artist");
+ my $another_new;
+ lives_ok {
+ $another_new = $schema->resultset('Artist')->create({ name => 'Tollbooth Willy'});
+ is( $another_new->artistid,$start_num, "got correct artistid for $sch_name")
+ or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+ } "$sch_name liid 1 did not die"
+ or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+ lives_ok {
+ $another_new = $schema->resultset('Artist')->create({ name => 'Adam Sandler'});
+ is( $another_new->artistid,$start_num+1, "got correct artistid for $sch_name")
+ or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+ } "$sch_name liid 2 did not die"
+ or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+
+ }
+
+ lives_ok {
+ apk_t_set($schema,'dbic_t_schema.artist');
+ my $new = $schema->resultset('Artist')->create({ name => 'foo' });
+ is($new->artistid, 4, "Auto-PK worked");
+ $new = $schema->resultset('Artist')->create({ name => 'bar' });
+ is($new->artistid, 5, "Auto-PK worked");
+ } 'old auto-pk tests did not die either';
+}
+
+# sets the artist table name and clears sequence name cache
+sub apk_t_set {
+ my ( $s, $n ) = @_;
+ $s->source("Artist")->name($n);
+ $s->source('Artist')->column_info('artistid')->{sequence} = undef; #< clear sequence name cache
+}
+
+
+######## EXTENDED AUTO-PK TESTS
+
+my @eapk_id_columns;
+BEGIN {
+ package DBICTest::Schema::ExtAPK;
+ push @main::test_classes, __PACKAGE__;
+
+ use strict;
+ use warnings;
+ use base 'DBIx::Class';
+
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('apk');
+
+ @eapk_id_columns = qw( id1 id2 id3 id4 );
+ __PACKAGE__->add_columns(
+ map { $_ => { data_type => 'integer', is_auto_increment => 1 } }
+ @eapk_id_columns
+ );
+
+ __PACKAGE__->set_primary_key('id2'); #< note the SECOND column is
+ #the primary key
+}
+
+my @eapk_schemas;
+BEGIN{ @eapk_schemas = map "dbic_apk_$_", 0..5 }
+
+sub run_extended_apk_tests {
+ my $schema = shift;
+
+ #save the search path and reset it at the end
+ my $search_path_save = eapk_get_search_path($schema);
+
+ eapk_drop_all($schema);
+
+ # make the test schemas and sequences
+ $schema->storage->dbh_do(sub {
+ my ( undef, $dbh ) = @_;
+
+ $dbh->do("CREATE SCHEMA $_")
+ for @eapk_schemas;
+
+ $dbh->do("CREATE SEQUENCE $eapk_schemas[5].fooseq");
+ $dbh->do("CREATE SEQUENCE $eapk_schemas[4].fooseq");
+ $dbh->do("CREATE SEQUENCE $eapk_schemas[3].fooseq");
+
+ $dbh->do("SET search_path = ".join ',', @eapk_schemas );
+ });
+
+ # clear our search_path cache
+ $schema->storage->{_pg_search_path} = undef;
+
+ eapk_create( $schema,
+ with_search_path => [0,1],
+ );
+ eapk_create( $schema,
+ with_search_path => [1,0,'public'],
+ nextval => "$eapk_schemas[5].fooseq",
+ );
+ eapk_create( $schema,
+ with_search_path => ['public',0,1],
+ qualify_table => 2,
+ );
+ eapk_create( $schema,
+ with_search_path => [3,1,0,'public'],
+ nextval => "$eapk_schemas[4].fooseq",
+ );
+ eapk_create( $schema,
+ with_search_path => [3,1,0,'public'],
+ nextval => "$eapk_schemas[3].fooseq",
+ qualify_table => 4,
+ );
+
+ eapk_poke( $schema, 0 );
+ eapk_poke( $schema, 2 );
+ eapk_poke( $schema, 4 );
+ eapk_poke( $schema, 1 );
+ eapk_poke( $schema, 0 );
+ eapk_poke( $schema, 1 );
+ eapk_poke( $schema, 4 );
+ eapk_poke( $schema, 3 );
+ eapk_poke( $schema, 1 );
+ eapk_poke( $schema, 2 );
+ eapk_poke( $schema, 0 );
+
+ # set our search path back
+ eapk_set_search_path( $schema, @$search_path_save );
+}
+
+# do a DBIC create on the apk table in the given schema number (which is an
+# index of @eapk_schemas)
+
+my %seqs; #< sanity-check hash of schema.table.col => currval of its sequence
+
+sub eapk_poke {
+ my ($s, $schema_num) = @_;
+
+ my $schema_name = defined $schema_num
+ ? $eapk_schemas[$schema_num]
+ : '';
+
+ my $schema_name_actual = $schema_name || eapk_get_search_path($s)->[0];
+
+ $s->source('ExtAPK')->name($schema_name ? $schema_name.'.apk' : 'apk');
+ #< clear sequence name cache
+ $s->source('ExtAPK')->column_info($_)->{sequence} = undef
+ for @eapk_id_columns;
+
+ no warnings 'uninitialized';
+ lives_ok {
+ my $new;
+ for my $inc (1,2,3) {
+ $new = $schema->resultset('ExtAPK')->create({});
+ my $proper_seqval = ++$seqs{"$schema_name_actual.apk.id2"};
+ is( $new->id2, $proper_seqval, "$schema_name_actual.apk.id2 correct inc $inc" )
+ or eapk_seq_diag($s,$schema_name);
+ $new->discard_changes;
+ for my $id (grep $_ ne 'id2', @eapk_id_columns) {
+ my $proper_seqval = ++$seqs{"$schema_name_actual.apk.$id"};
+ is( $new->$id, $proper_seqval, "$schema_name_actual.apk.$id correct inc $inc" )
+ or eapk_seq_diag($s,$schema_name);
+ }
+ }
+ } "create in schema '$schema_name' lives"
+ or eapk_seq_diag($s,$schema_name);
+}
+
+# print diagnostic info on which sequences were found in the ExtAPK
+# class
+sub eapk_seq_diag {
+ my $s = shift;
+ my $schema = shift || eapk_get_search_path($s)->[0];
+
+ diag "$schema.apk sequences: ",
+ join(', ',
+ map "$_:".($s->source('ExtAPK')->column_info($_)->{sequence} || '<none>'),
+ @eapk_id_columns
+ );
+}
+
+# get the postgres search path as an arrayref
+sub eapk_get_search_path {
+ my ( $s ) = @_;
+ # cache the search path as ['schema','schema',...] in the storage
+ # obj
+
+ return $s->storage->dbh_do(sub {
+ my (undef, $dbh) = @_;
+ my @search_path;
+ my ($sp_string) = $dbh->selectrow_array('SHOW search_path');
+ while ( $sp_string =~ s/("[^"]+"|[^,]+),?// ) {
+ unless( defined $1 and length $1 ) {
+ die "search path sanity check failed: '$1'";
+ }
+ push @search_path, $1;
+ }
+ \@search_path
+ });
+}
+sub eapk_set_search_path {
+ my ($s,@sp) = @_;
+ my $sp = join ',',@sp;
+ $s->storage->dbh_do( sub { $_[1]->do("SET search_path = $sp") } );
+}
+
+# create the apk table in the given schema, can set whether the table name is qualified, what the nextval is for the second ID
+sub eapk_create {
+ my ($schema, %a) = @_;
+
+ $schema->storage->dbh_do(sub {
+ my (undef,$dbh) = @_;
+
+ my $searchpath_save;
+ if ( $a{with_search_path} ) {
+ ($searchpath_save) = $dbh->selectrow_array('SHOW search_path');
+
+ my $search_path = join ',',map {/\D/ ? $_ : $eapk_schemas[$_]} @{$a{with_search_path}};
+
+ $dbh->do("SET search_path = $search_path");
+ }
+
+ my $table_name = $a{qualify_table}
+ ? ($eapk_schemas[$a{qualify_table}] || die). ".apk"
+ : 'apk';
+ local $_[1]->{Warn} = 0;
+
+ my $id_def = $a{nextval}
+ ? "integer primary key not null default nextval('$a{nextval}'::regclass)"
+ : 'serial primary key';
+ $dbh->do(<<EOS);
+CREATE TABLE $table_name (
+ id1 serial
+ , id2 $id_def
+ , id3 serial
+ , id4 serial
+)
+EOS
+
+ if( $searchpath_save ) {
+ $dbh->do("SET search_path = $searchpath_save");
+ }
+ });
+}
+
+sub eapk_drop_all {
+ my ( $schema, $warn_exceptions ) = @_;
+
+ $schema->storage->dbh_do(sub {
+ my (undef,$dbh) = @_;
+
+ local $dbh->{Warn} = 0;
+
+ # drop the test schemas
+ for (@eapk_schemas ) {
+ eval{ $dbh->do("DROP SCHEMA $_ CASCADE") };
+ diag $@ if $@ && $warn_exceptions;
+ }
+
+
+ });
+}
plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
-plan tests => 39;
-
DBICTest::Schema->load_classes('ArtistGUID');
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
});
lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
$schema->populate ('Owners', [
[qw/id name /],
[qw/1 wiggle/],
]);
}, 'populate with PKs supplied ok' );
+lives_ok (sub {
+ # start a new connection, make sure rebless works
+ # test an insert with a supplied identity, followed by one without
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+ for (1..2) {
+ my $id = $_ * 20 ;
+ $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
+ $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
+ }
+}, 'create with/without PKs ok' );
+
+is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
+
lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
$schema->populate ('BooksInLibrary', [
[qw/source owner title /],
[qw/Library 1 secrets0/],
],
);
}
-
}
+done_testing;
+
# clean up our mess
END {
if (my $dbh = eval { $schema->storage->_dbh }) {
use strict;
-use warnings;
+use warnings;
use Test::More;
use Test::Exception;
use Test::More;
use lib qw(t/lib);
use DBICTest;
-use Data::Dumper;
use DBIC::SqlMakerTest;
my $schema = DBICTest->init_schema();
use strict;
-use warnings;
+use warnings;
use Test::More;
use lib qw(t/lib);
use DBICTest;
+use DBIC::SqlMakerTest;
+use DBIC::DebugObj;
my $schema = DBICTest->init_schema();
-plan tests => 49;
-
# Check the defined unique constraints
is_deeply(
[ sort $schema->source('CD')->unique_constraint_names ],
);
ok($cd2->in_storage, 'Updating year using update_or_new was successful');
is($cd2->id, $cd1->id, 'Got the same CD using update_or_new');
-}
\ No newline at end of file
+}
+
+# make sure the ident condition is assembled sanely
+{
+ my $artist = $schema->resultset('Artist')->next;
+
+ my ($sql, @bind);
+ $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)),
+ $schema->storage->debug(1);
+
+ $artist->discard_changes;
+
+ is_same_sql_bind (
+ $sql,
+ \@bind,
+ 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?',
+ [qw/'1'/],
+ );
+
+ $schema->storage->debug(0);
+ $schema->storage->debugobj(undef);
+}
+
+done_testing;
use strict;
-use warnings;
+use warnings;
use Test::More;
+use Test::Warn;
use Test::Exception;
use lib qw(t/lib);
use DBICTest;
my $schema = DBICTest->init_schema();
-plan tests => 64;
-
my $code = sub {
my ($artist, @cd_titles) = @_;
-
+
$artist->create_related('cds', {
title => $_,
year => 2006,
}) foreach (@cd_titles);
-
+
return $artist->cds->all;
};
name => 'Death Cab for Cutie',
made_up_column => 1,
});
-
+
$guard->commit;
} qr/No such column made_up_column .*? at .*?81transactions.t line \d+/s, "Error propogated okay";
ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
- my $inner_exception;
+ my $inner_exception; # set in inner() below
eval {
outer($schema, 1);
};
ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
lives_ok (sub {
- my $w;
- local $SIG{__WARN__} = sub { $w = shift };
-
- # The 0 arg says don't die, just let the scope guard go out of scope
- # forcing a txn_rollback to happen
- outer($schema, 0);
-
- like ($w, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or an error/, 'Out of scope warning detected');
+ warnings_exist ( sub {
+ # The 0 arg says don't die, just let the scope guard go out of scope
+ # forcing a txn_rollback to happen
+ outer($schema, 0);
+ }, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, 'Out of scope warning detected');
ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
}, 'rollback successful withot exception');
$inner_guard->commit;
}
}
+
+# make sure the guard does not eat exceptions
+{
+ my $schema = DBICTest->init_schema();
+ throws_ok (sub {
+ my $guard = $schema->txn_scope_guard;
+ $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+ $schema->storage->disconnect; # this should freak out the guard rollback
+
+ die 'Deliberate exception';
+ }, qr/Deliberate exception.+Rollback failed/s);
+}
+
+# make sure it warns *big* on failed rollbacks
+{
+ my $schema = DBICTest->init_schema();
+
+ # something is really confusing Test::Warn here, no time to debug
+=begin
+ warnings_exist (
+ sub {
+ my $guard = $schema->txn_scope_guard;
+ $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+ $schema->storage->disconnect; # this should freak out the guard rollback
+ },
+ [
+ qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
+ qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
+ ],
+ 'proper warnings generated on out-of-scope+rollback failure'
+ );
+=cut
+
+ my @want = (
+ qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
+ qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
+ );
+
+ my @w;
+ local $SIG{__WARN__} = sub {
+ if (grep {$_[0] =~ $_} (@want)) {
+ push @w, $_[0];
+ }
+ else {
+ warn $_[0];
+ }
+ };
+ {
+ my $guard = $schema->txn_scope_guard;
+ $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+ $schema->storage->disconnect; # this should freak out the guard rollback
+ }
+
+ is (@w, 2, 'Both expected warnings found');
+}
+
+done_testing;
}
);
-use Data::Dumper; $Data::Dumper::Deparse = 1;
-
# start test for prefetch SELECT count
$queries = 0;
$schema->storage->debug(1);
use lib qw(t/lib);
use DBICTest;
-eval "use SQL::Translator";
-plan skip_all => 'SQL::Translator required' if $@;
+BEGIN {
+ require DBIx::Class::Storage::DBI;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version
+ if not DBIx::Class::Storage::DBI->_sqlt_version_ok;
+}
my $schema = DBICTest->init_schema (no_deploy => 1);
use strict;
-use warnings;
+use warnings;
use Test::More;
+use Test::Warn;
use Test::Exception;
use lib qw(t/lib);
use DBICTest;
my $schema = DBICTest->init_schema();
-plan tests => 20;
-
my $rs = $schema->resultset("CD")->search({}, { order_by => 'cdid' });
my $rs_title = $rs->get_column('title');
is($rs_year->first, 1999, "first okay");
+warnings_exist (sub {
+ is($rs_year->single, 1999, "single okay");
+}, qr/Query returned more than one row/, 'single warned');
+
# test +select/+as for single column
my $psrs = $schema->resultset('CD')->search({},
{
[ $rs->get_column ('cdid')->all ],
'prefetch properly collapses amount of rows from get_column',
);
+
+done_testing;
plan skip_all => 'Install Text::CSV_XS or Text::CSV_PP to run this test' if ($@);
}
-my @json_backends = qw/XS JSON DWIW Syck/;
+my @json_backends = qw/XS JSON DWIW/;
my $tests_per_run = 5;
plan tests => $tests_per_run * @json_backends;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+{ # Fake storage driver for sqlite with autocast
+ package DBICTest::SQLite::AutoCast;
+ use base qw/
+ DBIx::Class::Storage::DBI::AutoCast
+ DBIx::Class::Storage::DBI::SQLite
+ /;
+ use mro 'c3';
+
+ my $type_map = {
+ datetime => 'DateTime',
+ integer => 'INT',
+ int => undef, # no conversion
+ };
+
+ sub _native_data_type {
+ return $type_map->{$_[1]};
+ }
+}
+
+my $schema = DBICTest->init_schema (storage_type => 'DBICTest::SQLite::AutoCast');
+
+# 'me.id' will be cast unlike the unqualified 'id'
+my $rs = $schema->resultset ('CD')->search ({
+ cdid => { '>', 5 },
+ 'tracks.last_updated_at' => { '!=', undef },
+ 'tracks.last_updated_on' => { '<', 2009 },
+ 'tracks.position' => 4,
+ 'tracks.single_track' => \[ '= ?', [ single_track => [1, 2, 3 ] ] ],
+}, { join => 'tracks' });
+
+my $bind = [
+ [ cdid => 5 ],
+ [ 'tracks.last_updated_on' => 2009 ],
+ [ 'tracks.position' => 4 ],
+ [ 'single_track' => [ 1, 2, 3] ],
+];
+
+is_same_sql_bind (
+ $rs->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ FROM cd me
+ LEFT JOIN track tracks ON tracks.cd = me.cdid
+ WHERE
+ cdid > ?
+ AND tracks.last_updated_at IS NOT NULL
+ AND tracks.last_updated_on < ?
+ AND tracks.position = ?
+ AND tracks.single_track = ?
+ )',
+ $bind,
+ 'expected sql with casting off',
+);
+
+$schema->storage->auto_cast (1);
+
+is_same_sql_bind (
+ $rs->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ FROM cd me
+ LEFT JOIN track tracks ON tracks.cd = me.cdid
+ WHERE
+ cdid > CAST(? AS INT)
+ AND tracks.last_updated_at IS NOT NULL
+ AND tracks.last_updated_on < CAST (? AS yyy)
+ AND tracks.position = ?
+ AND tracks.single_track = CAST(? AS INT)
+ )',
+ $bind,
+ 'expected sql with casting on',
+);
+
+done_testing;
#!/usr/bin/perl
+
use strict;
use warnings;
use Test::More;
plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn);
-
- eval "use DBD::mysql; use SQL::Translator 0.09003;";
- plan $@
- ? ( skip_all => 'needs DBD::mysql and SQL::Translator 0.09003 for testing' )
- : ( tests => 22 );
+ require DBIx::Class::Storage::DBI;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version
+ if not DBIx::Class::Storage::DBI->_sqlt_version_ok;
}
my $version_table_name = 'dbix_class_schema_versions';
unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) {
unlink $_ for (values %$fn);
}
+
+done_testing;
use lib qw(t/lib);
use DBIC::SqlMakerTest;
-plan tests => 4;
-
use_ok('DBICTest');
-my $schema = DBICTest->init_schema();
+my $schema = DBICTest->init_schema(no_deploy => 1);
my $sql_maker = $schema->storage->sql_maker;
);
}
+# make sure the cookbook caveat of { $op, \'...' } no longer applies
+{
+ my ($sql, @bind) = $sql_maker->where({
+ last_attempt => \ '< now() - interval "12 hours"',
+ next_attempt => { '<', \ 'now() - interval "12 hours"' },
+ created => [
+ { '<=', \ '1969' },
+ \ '> 1984',
+ ],
+ });
+ is_same_sql_bind(
+ $sql,
+ \@bind,
+ 'WHERE
+ (created <= 1969 OR created > 1984 )
+ AND last_attempt < now() - interval "12 hours"
+ AND next_attempt < now() - interval "12 hours"
+ ',
+ [],
+ );
+}
+
# Make sure the carp/croak override in SQLA works (via SQLAHacks)
my $file = __FILE__;
$file = "\Q$file\E";
throws_ok (sub {
$schema->resultset ('Artist')->search ({}, { order_by => { -asc => 'stuff', -desc => 'staff' } } )->as_query;
}, qr/$file/, 'Exception correctly croak()ed');
+
+done_testing;
use lib qw(t/lib);
use DBICTest;
-
BEGIN {
- eval "use SQL::Translator 0.09003;";
- if ($@) {
- plan skip_all => 'needs SQL::Translator 0.09003 for testing';
- }
+ require DBIx::Class::Storage::DBI;
+ plan skip_all =>
+ 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version
+ if not DBIx::Class::Storage::DBI->_sqlt_version_ok;
}
my $schema = DBICTest->init_schema();
$schema->sources
;
-plan tests => ( @sources * 3);
-
{
my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { } } });
}
}
+done_testing;
+
sub create_schema {
my $args = shift;
}
eval { Film->constrain_column(codirector => Untaint => 'date') };
is $@, '', 'Can constrain with untaint';
+
my $freeaa =
eval { Film->create({ title => "The Freaa", codirector => 'today' }) };
- TODO: {
- local $TODO = "no idea what this is supposed to do";
- is $@, '', "Can create codirector";
- is $freeaa && $freeaa->codirector, '2001-03-03', "Set the codirector";
- }
+ is $@, '', "Can create codirector";
+ is $freeaa && $freeaa->codirector, '2001-03-03', "Set the codirector";
}
__DATA__
+++ /dev/null
-package # hide from PAUSE
- Binary;
-
-use strict;
-use base 'PgBase';
-
-__PACKAGE__->table(cdbibintest => 'cdbibintest');
-__PACKAGE__->sequence('binseq');
-__PACKAGE__->columns(All => qw(id bin));
-
-# __PACKAGE__->data_type(bin => DBI::SQL_BINARY);
-
-sub schema { "id INTEGER, bin BYTEA" }
-
-1;
-
+++ /dev/null
-package # hide from PAUSE
- PgBase;
-
-use strict;
-use base 'DBIx::Class::CDBICompat';
-
-my $db = $ENV{DBD_PG_DBNAME} || 'template1';
-my $user = $ENV{DBD_PG_USER} || 'postgres';
-my $pass = $ENV{DBD_PG_PASSWD} || '';
-
-__PACKAGE__->connection("dbi:Pg:dbname=$db", $user, $pass,
- { AutoCommit => 1 });
-
-sub CONSTRUCT {
- my $class = shift;
- my ($table, $sequence) = ($class->table, $class->sequence || "");
- my $schema = $class->schema;
- $class->db_Main->do("CREATE TEMPORARY SEQUENCE $sequence") if $sequence;
- $class->db_Main->do("CREATE TEMPORARY TABLE $table ( $schema )");
-}
-
-1;
-
my $schema = DBICTest->init_schema();
-use Data::Dumper;
-
# add 2 extra artists
$schema->populate ('Artist', [
[qw/name/],
use strict;
use warnings;
-use Data::Dumper;
-
use Test::More;
plan ( tests => 1 );
my $schema = DBICTest->init_schema();
-use Data::Dumper;
-
my @serializers = (
{ module => 'YAML.pm',
inflater => sub { YAML::Load (shift) },
We have a number of reasons to believe that this is a development
checkout and that you, the user, did not run `perl Makefile.PL`
before using this code. You absolutely _must_ perform this step,
-as not doing so often results in a lot of wasted time for other
-contributors trying to assit you with "it broke!" problems.
+and ensure you have all required dependencies present. Not doing
+so often results in a lot of wasted time for other contributors
+trying to assit you with spurious "its broken!" problems.
If you are seeing this message unexpectedly (i.e. you are in fact
-attempting a regular installation be it through CPAN or manually,
-set the variable DBICTEST_NO_MAKEFILE_VERIFICATION to a true value
-so you can continue. Also _make_absolutely_sure_ to report this to
-either the mailing list or to the irc channel as described in
+attempting a regular installation be it through CPAN or manually),
+please report the situation to either the mailing list or to the
+irc channel as described in
http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT
-Failure to do this will make us believe that all these checks are
-indeed foolproof and we will remove the ability to override this
-entirely.
-
The DBIC team
}
}
+# Mimic $Module::Install::AUTHOR
+sub is_author {
+
+ my $root = _find_co_root()
+ or return undef;
+
+ return (
+ ( not -d $root->subdir ('inc') )
+ or
+ ( -e $root->subdir ('inc')->file ($^O eq 'VMS' ? '_author' : '.author') )
+ );
+}
+
# Try to determine the root of a checkout/untar if possible
# or return undef
sub _find_co_root {
},
);
__PACKAGE__->set_primary_key('artistid');
+__PACKAGE__->add_unique_constraint(artist => ['artistid']); # do not remove, part of a test
__PACKAGE__->mk_classdata('field_name_for', {
artistid => 'primary key',
}
}
+sub store_column {
+ my ($self, $name, $value) = @_;
+ $value = 'X '.$value if ($name eq 'name' && $value && $value =~ /(X )?store_column test/);
+ $self->next::method($name, $value);
+}
+
+
1;
data_type => 'integer',
},
'position' => {
- data_type => 'integer',
+ data_type => 'int',
accessor => 'pos',
},
'title' => {
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Thu Aug 20 07:47:13 2009
+-- Created on Mon Sep 21 00:11:34 2009
--
CREATE TABLE track (
trackid INTEGER PRIMARY KEY NOT NULL,
cd integer NOT NULL,
- position integer NOT NULL,
+ position int NOT NULL,
title varchar(100) NOT NULL,
last_updated_on datetime,
last_updated_at datetime,
use Test::Exception;
use lib qw(t/lib);
use DBICTest;
+
use Data::Dumper;
+$Data::Dumper::Sortkeys = 1;
my $schema = DBICTest->init_schema();
);
}
+{
+ my $cd_rs = $schema->resultset('CD')->search({}, {
+ distinct => 1,
+ join => [qw/ tracks /],
+ prefetch => [qw/ artist /],
+ });
+ is($cd_rs->count, 5, 'complex prefetch + non-prefetching has_many join count correct');
+ is($cd_rs->all, 5, 'complex prefetch + non-prefetching has_many join number of objects correct');
+
+ # make sure join tracks was thrown out
+ is_same_sql_bind (
+ $cd_rs->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield
+ FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ FROM cd me
+ JOIN artist artist ON artist.artistid = me.artist
+ GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ ) me
+ JOIN artist artist ON artist.artistid = me.artist
+ )',
+ [],
+ );
+
+
+
+ # try the same as above, but add a condition so the tracks join can not be thrown away
+ my $cd_rs2 = $cd_rs->search ({ 'tracks.title' => { '!=' => 'ugabuganoexist' } });
+ is($cd_rs2->count, 5, 'complex prefetch + non-prefetching restricted has_many join count correct');
+ is($cd_rs2->all, 5, 'complex prefetch + non-prefetching restricted has_many join number of objects correct');
+
+ # the outer group_by seems like a necessary evil, if someone can figure out how to take it away
+ # without breaking compat - be my guest
+ is_same_sql_bind (
+ $cd_rs2->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield
+ FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ FROM cd me
+ LEFT JOIN track tracks ON tracks.cd = me.cdid
+ JOIN artist artist ON artist.artistid = me.artist
+ WHERE ( tracks.title != ? )
+ GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+ ) me
+ LEFT JOIN track tracks ON tracks.cd = me.cdid
+ JOIN artist artist ON artist.artistid = me.artist
+ WHERE ( tracks.title != ? )
+ GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield
+ )',
+ [ map { [ 'tracks.title' => 'ugabuganoexist' ] } (1 .. 2) ],
+ );
+}
+
done_testing;
--- /dev/null
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBIC::SqlMakerTest;
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+
+# a regular belongs_to prefetch
+my $cds = $schema->resultset('CD')->search ({}, { prefetch => 'artist' } );
+
+my $nulls = {
+ hashref => {},
+ arrayref => [],
+ undef => undef,
+};
+
+# make sure null-prefetches do not screw with the final sql:
+for my $type (keys %$nulls) {
+# is_same_sql_bind (
+# $cds->search({}, { prefetch => { artist => $nulls->{$type} } })->as_query,
+# $cds->as_query,
+# "same sql with null $type prefetch"
+# );
+}
+
+# make sure left join is carried only starting from the first has_many
+is_same_sql_bind (
+ $cds->search({}, { prefetch => { artist => { cds => 'artist' } } })->as_query,
+ '(
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+ artist.artistid, artist.name, artist.rank, artist.charfield,
+ cds.cdid, cds.artist, cds.title, cds.year, cds.genreid, cds.single_track,
+ artist_2.artistid, artist_2.name, artist_2.rank, artist_2.charfield
+ FROM cd me
+ JOIN artist artist ON artist.artistid = me.artist
+ LEFT JOIN cd cds ON cds.artist = artist.artistid
+ LEFT JOIN artist artist_2 ON artist_2.artistid = cds.artist
+ ORDER BY cds.artist, cds.year
+ )',
+ [],
+);
+
+done_testing;
use Test::Exception;
use lib qw(t/lib);
use DBICTest;
-use Data::Dumper;
use IO::File;
my $schema = DBICTest->init_schema();
my $search = { 'artist.name' => 'Caterwauler McCrae' };
my $attr = { prefetch => [ qw/artist liner_notes/ ],
order_by => 'me.cdid' };
-my $search_str = Dumper($search);
-my $attr_str = Dumper($attr);
my $rs = $schema->resultset("CD")->search($search, $attr);
my @cd = $rs->all;
# test where conditions at the root of the related chain
- my $artist_rs = $schema->resultset("Artist")->search({artistid => 11});
-
+ my $artist_rs = $schema->resultset("Artist")->search({artistid => 2});
+ my $artist = $artist_rs->next;
+ $artist->create_related ('cds', $_) for (
+ {
+ year => 1999, title => 'vague cd', genre => { name => 'vague genre' }
+ },
+ {
+ year => 1999, title => 'vague cd2', genre => { name => 'vague genre' }
+ },
+ );
$rs = $artist_rs->search_related('cds')->search_related('genre',
- { 'genre.name' => 'foo' },
+ { 'genre.name' => 'vague genre' },
{ prefetch => 'cds' },
);
- is($rs->all, 0, 'prefetch without distinct (objects)');
- is($rs->count, 0, 'prefetch without distinct (count)');
-
+ is($rs->all, 1, 'base without distinct (objects)');
+ is($rs->count, 1, 'base without distinct (count)');
+ # artist -> 2 cds -> 2 genres -> 2 cds for each genre = 4
+ is($rs->search_related('cds')->all, 4, 'prefetch without distinct (objects)');
+ is($rs->search_related('cds')->count, 4, 'prefetch without distinct (count)');
$rs = $artist_rs->search(undef, {distinct => 1})
->search_related('cds')->search_related('genre',
- { 'genre.name' => 'foo' },
+ { 'genre.name' => 'vague genre' },
);
- is($rs->all, 0, 'distinct without prefetch (objects)');
- is($rs->count, 0, 'distinct without prefetch (count)');
-
+ is($rs->all, 1, 'distinct without prefetch (objects)');
+ is($rs->count, 1, 'distinct without prefetch (count)');
$rs = $artist_rs->search({}, {distinct => 1})
->search_related('cds')->search_related('genre',
- { 'genre.name' => 'foo' },
+ { 'genre.name' => 'vague genre' },
{ prefetch => 'cds' },
);
- is($rs->all, 0, 'distinct with prefetch (objects)');
- is($rs->count, 0, 'distinct with prefetch (count)');
+ is($rs->all, 1, 'distinct with prefetch (objects)');
+ is($rs->count, 1, 'distinct with prefetch (count)');
+ # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2
+ is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)');
+ is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)');
-#!/usr/bin/perl -w
-
use strict;
-use warnings;
+use warnings;
use Test::More;
use lib qw(t/lib);
-#!/usr/bin/perl -w
-
use strict;
-use warnings;
+use warnings;
use Test::More;
use lib qw(t/lib);
# expect a year update on the only related row
-# (non-qunique column only)
+# (non-unique column only)
$genre->update_or_create_related ('model_cd', {
year => 2011,
});
},
'CD year column updated correctly without a disambiguator',
);
-
-
use strict;
use warnings FATAL => 'all';
-use Data::Dumper;
-
use Test::More;
plan ( tests => 5 );
use DBIC::SqlMakerTest;
use DBIC::DebugObj;
use DBICTest;
+
+# use Data::Dumper comparisons to avoid mesing with coderefs
use Data::Dumper;
+$Data::Dumper::Sortkeys = 1;
my $schema = DBICTest->init_schema();
use strict;
use warnings;
-use Data::Dumper;
-
use Test::More;
-
use lib qw(t/lib);
use DBICTest;
use DBIC::SqlMakerTest;
my @tests = (
{
rs => $cdrs,
+ search => \[ "title = ? AND year LIKE ?", 'buahaha', '20%' ],
+ attrs => { rows => 5 },
+ sqlbind => \[
+ "( SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE (title = ? AND year LIKE ?) LIMIT 5)",
+ 'buahaha',
+ '20%',
+ ],
+ },
+
+ {
+ rs => $cdrs,
search => {
artist_id => { 'in' => $art_rs->search({}, { rows => 1 })->get_column( 'id' )->as_query },
},
use strict;
-use warnings;
+use warnings;
use Test::More;
+use Test::Warn;
use lib qw(t/lib);
use DBICTest;
use Data::Dumper;
}
}
-plan tests => 17;
-
my $schema = DBICTest->init_schema( sqlite_use_file => 1 );
is( ref($schema->storage), 'DBIx::Class::Storage::DBI::SQLite',
},
],
},
+ 'connect_info ([ \%attr_with_coderef ])' => {
+ args => [ {
+ dbh_maker => $coderef,
+ dsn => 'blah',
+ user => 'bleh',
+ on_connect_do => [qw/a b c/],
+ on_disconnect_do => [qw/d e f/],
+ } ],
+ dbi_connect_info => [
+ $coderef
+ ],
+ warn => qr/Attribute\(s\) 'dsn', 'user' in connect_info were ignored/,
+ },
};
for my $type (keys %$invocations) {
local $Data::Dumper::Sortkeys = 1;
my $arg_dump = Dumper ($invocations->{$type}{args});
- $storage->connect_info ($invocations->{$type}{args});
+ warnings_exist (
+ sub { $storage->connect_info ($invocations->{$type}{args}) },
+ $invocations->{$type}{warn} || (),
+ 'Warned about ignored attributes',
+ );
is ($arg_dump, Dumper ($invocations->{$type}{args}), "$type didn't modify passed arguments");
-
is_deeply ($storage->_dbi_connect_info, $invocations->{$type}{dbi_connect_info}, "$type produced correct _dbi_connect_info");
ok ( (not $storage->auto_savepoint and not $storage->unsafe), "$type correctly ignored extra hashref");
);
}
+done_testing;
+
1;
--- /dev/null
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::Schema;
+
+# make sure nothing eats the exceptions (an unchecked eval in Storage::DESTROY used to be a problem)
+
+{
+ package Dying::Storage;
+
+ use warnings;
+ use strict;
+
+ use base 'DBIx::Class::Storage::DBI';
+
+ sub _populate_dbh {
+ my $self = shift;
+ my $death = $self->_dbi_connect_info->[3]{die};
+
+ die "storage test died: $death" if $death eq 'before_populate';
+ my $ret = $self->next::method (@_);
+ die "storage test died: $death" if $death eq 'after_populate';
+
+ return $ret;
+ }
+}
+
+for (qw/before_populate after_populate/) {
+ dies_ok (sub {
+ my $schema = DBICTest::Schema->clone;
+ $schema->storage_type ('Dying::Storage');
+ $schema->connection (DBICTest->_database, { die => $_ });
+ $schema->storage->ensure_connected;
+ }, "$_ exception found");
+}
+
+done_testing;
use Test::More tests => 9;
+use DBIx::Class::Storage::DBI;
my $schema = DBICTest->init_schema(
no_connect => 1,
no_deploy => 1,
use Test::More;
use lib qw(t/lib);
use DBICTest;
-use Data::Dumper;
use DBIC::SqlMakerTest;
my $ping_count = 0;
plan tests => 6;
-my $db_orig = "$FindBin::Bin/var/DBIxClass.db";
+my $db_orig = "$FindBin::Bin/../var/DBIxClass.db";
my $db_tmp = "$db_orig.tmp";
# Set up the "usual" sqlite for DBICTest
use strict;
use warnings;
use Test::More;
+use Benchmark;
use lib qw(t/lib);
use DBICTest; # do not remove even though it is not used
plan skip_all => 'Skipping as AUTOMATED_TESTING is set'
if ( $ENV{AUTOMATED_TESTING} );
-eval "use Benchmark ':all'";
-plan skip_all => 'needs Benchmark for testing' if $@;
-
plan tests => 3;
ok( 1, 'Dummy - prevents next test timing out' );