Revision history for DBIx::Class
+0.06999_02 2006-06-09 23:58:33
+ - Fixed up POD::Coverage tests, filled in some POD holes
+ - Added a warning for incorrect component order in load_components
+ - Fixed resultset bugs to do with related searches
+ - added code and tests for Componentized::ensure_class_found and
+ load_optional_class
+ - NoBindVars + Sybase + MSSQL stuff
+ - only rebless S::DBI if it is still S::DBI and not a subclass
+ - Added `use' statement for DBD::Pg in Storage::DBI::Pg
+ - stopped test relying on order of unordered search
+ - bugfix for join-types in nested joins using the from attribute
+ - obscure prefetch problem fixed
+ - tightened up deep search_related
+ - Fixed 'DBIx/Class/DB.pm did not return a true value' error
+ - Revert change to test for deprecated find usage and swallow warnings
+ - Slight wording change to new_related() POD
+ - new specific test for connect_info coderefs
+ - POD clarification and content bugfixing + a few code formatting fixes
+ - POD::Coverage additions
+ - fixed debugfh
+ - Fix column_info stomping
+
0.06999_01 2006-05-28 17:19:30
- add automatic naming of unique constraints
- marked DB.pm as deprecated and noted it will be removed by 1.0
ColumnCase is loaded
- reorganized and simplified tests
- added Ordered
+ - added the ability to set on_connect_do and the various sql_maker
+ options as part of Storage::DBI's connect_info.
0.06003 2006-05-19 15:37:30
- make find_or_create_related check defined() instead of truth
- columns_info_for made more robust / informative
- ithreads compat added, fork compat improved
- weaken result_source in all resultsets
- - Make pg seq extractor less sensitive.
+ - Make pg seq extractor less sensitive.
0.05999_03 2006-03-14 01:58:10
- has_many prefetch fixes
# i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
# brain damage and presumably various other packaging systems too
-$VERSION = '0.06999_01';
+$VERSION = '0.06999_02';
sub MODIFY_CODE_ATTRIBUTES {
my ($class,$code,@attrs) = @_;
# Create a result set to search for artists.
# This does not query the DB.
my $johns_rs = $schema->resultset('Artist')->search(
- # Build your WHERE using an L<SQL::Abstract> structure:
+ # Build your WHERE using an SQL::Abstract structure:
{ name => { like => 'John%' } }
);
{
no strict 'refs';
foreach my $to (reverse @to_inject) {
- unshift( @{"${target}::ISA"}, $to )
- unless ($target eq $to || $target->isa($to));
+ my @comps = qw(DigestColumns ResultSetManager Ordered UTF8Columns);
+ # Add components here that need to be loaded before Core
+ foreach my $first_comp (@comps) {
+ if ($to eq 'DBIx::Class::Core' &&
+ $target->isa("DBIx::Class::${first_comp}")) {
+ warn "Possible incorrect order of components in ".
+ "${target}::load_components($first_comp) call: Core loaded ".
+ "before $first_comp. See the documentation for ".
+ "DBIx::Class::$first_comp for more information";
+ }
+ }
+ unshift( @{"${target}::ISA"}, $to )
+ unless ($target eq $to || $target->isa($to));
}
}
$class->inject_base($class => @comp);
}
+# Given a class name, tests to see if it is already loaded or otherwise
+# defined. If it is not yet loaded, the package is require'd, and an exception
+# is thrown if the class is still not loaded.
+#
# TODO: handle ->has_many('rel', 'Class'...) instead of
# ->has_many('rel', 'Some::Schema::Class'...)
sub ensure_class_loaded {
eval "require $f_class";
my $err = $@;
Class::Inspector->loaded($f_class)
- or die $err || "require $f_class was successful but the package".
- "is not defined";
+ or $class->throw_exception($err || "`require $f_class' was successful".
+ "but the package is not defined");
+}
+
+# Returns true if the specified class is installed or already loaded, false
+# otherwise
+sub ensure_class_found {
+ my ($class, $f_class) = @_;
+ return Class::Inspector->loaded($f_class) ||
+ Class::Inspector->installed($f_class);
+}
+
+# Returns a true value if the specified class is installed and loaded
+# successfully, throws an exception if the class is found but not loaded
+# successfully, and false if the class is not installed
+sub load_optional_class {
+ my ($class, $f_class) = @_;
+ if ($class->ensure_class_found($f_class)) {
+ $class->ensure_class_loaded($f_class);
+ return 1;
+ } else {
+ return 0;
+ }
}
1;
=item L<DBIx::Class::Relationship>
+=item L<DBIx::Class::PK::Auto>
+
=item L<DBIx::Class::PK>
=item L<DBIx::Class::Row>
sub storage { shift->schema_instance(@_)->storage; }
-sub resultset_instance {
- my $class = ref $_[0] || $_[0];
- my $source = $class->result_source_instance;
- if ($source->result_class ne $class) {
- $source = $source->new($source);
- $source->result_class($class);
- }
- return $source->resultset;
-}
-
=head1 NAME
DBIx::Class::DB - (DEPRECATED) classdata schema component
}
}
-1;
+=head2 resultset_instance
+
+Returns an instance of a resultset for this class - effectively
+mapping the L<Class::DBI> connection-as-classdata paradigm into the
+native L<DBIx::Class::ResultSet> system.
+
+=cut
+
+sub resultset_instance {
+ my $class = ref $_[0] || $_[0];
+ my $source = $class->result_source_instance;
+ if ($source->result_class ne $class) {
+ $source = $source->new($source);
+ $source->result_class($class);
+ }
+ return $source->resultset;
+}
+
+=head2 resolve_class
+
+****DEPRECATED****
+
+See L<class_resolver>
+
+=head2 dbi_commit
+
+****DEPRECATED****
+
+Alias for L<txn_commit>
+
+=head2 dbi_rollback
+
+****DEPRECATED****
+
+Alias for L<txn_rollback>
=head1 AUTHORS
=cut
+1;
return $deflate->($value, $self);
}
+=head2 get_inflated_column
+
+ my $val = $obj->get_inflated_column($col);
+
+Fetch a column value in its inflated state. This is directly
+analogous to L<DBIx::Class::Row/get_column> in that it only fetches a
+column already retreived from the database, and then inflates it.
+Throws an exception if the column requested is not an inflated column.
+
+=cut
+
sub get_inflated_column {
my ($self, $col) = @_;
$self->throw_exception("$col is not an inflated column")
$self->_inflated_column($col, $self->get_column($col));
}
+=head2 set_inflated_column
+
+ my $copy = $obj->set_inflated_column($col => $val);
+
+Sets a column value from an inflated value. This is directly
+analogous to L<DBIx::Class::Row/set_column>.
+
+=cut
+
sub set_inflated_column {
my ($self, $col, @rest) = @_;
my $ret = $self->_inflated_column_op('set', $col, @rest);
return $ret;
}
+=head2 store_inflated_column
+
+ my $copy = $obj->store_inflated_column($col => $val);
+
+Sets a column value from an inflated value without marking the column
+as dirty. This is directly analogous to
+L<DBIx::Class::Row/store_column>.
+
+=cut
+
sub store_inflated_column {
my ($self, $col, @rest) = @_;
my $ret = $self->_inflated_column_op('store', $col, @rest);
return $obj;
}
+=head2 update
+
+Updates a row in the same way as L<DBIx::Class::Row/update>, handling
+inflation and deflation of columns appropriately.
+
+=cut
+
sub update {
my ($class, $attrs, @rest) = @_;
$attrs ||= {};
return $class->next::method($attrs, @rest);
}
+=head2 new
+
+Creates a row in the same way as L<DBIx::Class::Row/new>, handling
+inflation and deflation of columns appropriately.
+
+=cut
+
sub new {
my ($class, $attrs, @rest) = @_;
$attrs ||= {};
__PACKAGE__->mk_group_accessors('simple' => '__datetime_parser');
+=head2 register_column
+
+Chains with the L<DBIx::Class::Row/register_column> method, and sets
+up datetime columns appropriately. This would not normally be
+directly called by end users.
+
+=cut
+
sub register_column {
my ($self, $column, $info, @rest) = @_;
$self->next::method($column, $info, @rest);
- if ($info->{data_type} =~ /^datetime$/i) {
+ return unless defined($info->{data_type});
+ my $type = lc($info->{data_type});
+ if ($type eq 'datetime' || $type eq 'date') {
+ my ($parse, $format) = ("parse_${type}", "format_${type}");
$self->inflate_column(
$column =>
{
inflate => sub {
my ($value, $obj) = @_;
- $obj->_datetime_parser->parse_datetime($value);
+ $obj->_datetime_parser->$parse($value);
},
deflate => sub {
my ($value, $obj) = @_;
- $obj->_datetime_parser->format_datetime($value);
+ $obj->_datetime_parser->$format($value);
},
}
);
You might have a class C<Artist> which has many C<CD>s. Further, you
want to create a C<CD> object every time you insert an C<Artist> object.
-You can accomplish this by overriding C<insert>:
+You can accomplish this by overriding C<insert> on your objects:
sub insert {
- my ( $class, $args_ref ) = @_;
- my $self = $class->next::method($args_ref);
+ my ( $self, @args ) = @_;
+ $self->next::method(@args);
$self->cds->new({})->fill_from_artist($self)->insert;
return $self;
}
So, you are bored with SQL, and want a native Perl interface for your
database? Or you've been doing this for a while with L<Class::DBI>,
and think there's a better way? You've come to the right place.
+
+=head1 THE DBIx::Class WAY
+
+Here are a few simple tips that will help you get your bearings
+with DBIx::Class.
+
+=head2 Tables become ResultSources
+
+DBIx::Class needs to know what your Table structure looks like. You do that
+by defining L<DBIx::Class::ResultSource>s. Each table get's a ResultSource,
+which defines the Columns it has, along with any Relationships it has to
+other tables. (And oh, so much more besides) The important thing to
+understand:
+
+ A ResultSource == Table
+
+(most of the time, but just bear with my simplification)
+
+=head2 It's all about the ResultSet
+
+So, we've got some ResultSources defined. Now, we want to actually use
+those definitions to help us translate the queries we need into
+handy perl objects!
+
+Let's say we defined a ResultSource for an "album" table with three
+columns: "albumid", "artist", and "title". Any time we want to query
+this table, we'll be creating a L<DBIx::Class::ResultSet> from it's
+ResultSource. For example, the results of:
+
+ SELECT albumid, artist, title FROM album;
+
+Would be retrieved by creating a ResultSet object from the album
+table's ResultSource, likely by using the "search" method.
+
+DBIx::Class doesn't limit you to creating only simple ResultSets --
+if you wanted to do something like:
+
+ SELECT title FROM album GROUP BY title;
+
+You could easily achieve it.
+
+The important thing to understand:
+
+ Any time you would reach for a SQL query in DBI, you are
+ creating a DBIx::Class::ResultSet.
+
+=head2 Search is like "prepare"
+
+DBIx::Class tends to wait until it absolutely must fetch information
+from the database. If you are returning a ResultSet, the query won't
+execute until you use a method that wants to access the data. (Such
+as "next", or "first")
+
+The important thing to understand:
+
+ Setting up a ResultSet does not execute the query; retrieving
+ the data does.
+
+=head1 SETTING UP DBIx::Class
+
Let's look at how you can set and use your first native L<DBIx::Class>
tree.
map { $_ . '=' . $vals{$_} } sort keys %vals;
}
+=head2 ident_condition
+
+ my $cond = $result_source->ident_condition();
+
+ my $cond = $result_source->ident_condition('alias');
+
+Produces a condition hash to locate a row based on the primary key(s).
+
+=cut
+
sub ident_condition {
my ($self, $alias) = @_;
my %cond;
use strict;
use warnings;
+use Scalar::Util ();
use base qw/DBIx::Class/;
=head1 NAME
my $new_obj = $obj->new_related('relname', \%col_data);
Create a new item of the related foreign class. If called on a
-L<DBIx::Class::Manual::Glossary/"Row"> object, it will magically set any
-primary key values into foreign key columns for you. The newly created item
-will not be saved into your storage until you call L<DBIx::Class::Row/insert>
+L<DBIx::Class::Manual::Glossary/"Row"> object, it will magically
+set any foreign key columns of the new object to the related primary
+key columns of the source object for you. The newly created item will
+not be saved into your storage until you call L<DBIx::Class::Row/insert>
on it.
=cut
if (defined $f_obj) {
my $f_class = $self->result_source->schema->class($rel_obj->{class});
$self->throw_exception( "Object $f_obj isn't a ".$f_class )
- unless $f_obj->isa($f_class);
+ unless Scalar::Util::blessed($f_obj) and $f_obj->isa($f_class);
}
$self->set_columns(
$self->result_source->resolve_condition(
-package DBIx::Class::Relationship::BelongsTo;
+package # hide from PAUSE
+ DBIx::Class::Relationship::BelongsTo;
+
+# Documentation for these methods can be found in
+# DBIx::Class::Relationship
use strict;
use warnings;
use Storable;
use Data::Dumper;
use Scalar::Util qw/weaken/;
-
+use Data::Dumper;
use DBIx::Class::ResultSetColumn;
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/AccessorGroup/);
sub search_rs {
my $self = shift;
- my $our_attrs = { %{$self->{attrs}} };
- my $having = delete $our_attrs->{having};
my $attrs = {};
$attrs = pop(@_) if @_ > 1 and ref $_[$#_] eq 'HASH';
-
+ my $our_attrs = ($attrs->{_parent_attrs})
+ ? { %{$attrs->{_parent_attrs}} }
+ : { %{$self->{attrs}} };
+ my $having = delete $our_attrs->{having};
+
+ # XXX this is getting messy
+ if ($attrs->{_live_join_stack}) {
+ my $live_join = $attrs->{_live_join_stack};
+ foreach (reverse @{$live_join}) {
+ $attrs->{_live_join_h} = (defined $attrs->{_live_join_h}) ? { $_ => $attrs->{_live_join_h} } : $_;
+ }
+ }
+
# merge new attrs into old
foreach my $key (qw/join prefetch/) {
next unless (exists $attrs->{$key});
+ if ($attrs->{_live_join_stack} || $our_attrs->{_live_join_stack}) {
+ my $live_join = $attrs->{_live_join_stack} ||
+ $our_attrs->{_live_join_stack};
+ foreach (reverse @{$live_join}) {
+ $attrs->{$key} = { $_ => $attrs->{$key} };
+ }
+ }
+
if (exists $our_attrs->{$key}) {
$our_attrs->{$key} = $self->_merge_attr($our_attrs->{$key}, $attrs->{$key});
} else {
delete $attrs->{$key};
}
- if (exists $our_attrs->{prefetch}) {
- $our_attrs->{join} = $self->_merge_attr($our_attrs->{join}, $our_attrs->{prefetch}, 1);
+ $our_attrs->{join} = $self->_merge_attr(
+ $our_attrs->{join}, $attrs->{_live_join_h}, 1
+ ) if ($attrs->{_live_join_h});
+
+ if (defined $our_attrs->{prefetch}) {
+ $our_attrs->{join} = $self->_merge_attr(
+ $our_attrs->{join}, $our_attrs->{prefetch}, 1
+ );
}
my $new_attrs = { %{$our_attrs}, %{$attrs} };
-
- # merge new where and having into old
my $where = (@_
- ? ((@_ == 1 || ref $_[0] eq "HASH")
- ? shift
- : ((@_ % 2)
- ? $self->throw_exception(
- "Odd number of arguments to search")
- : {@_}))
- : undef());
+ ? (
+ (@_ == 1 || ref $_[0] eq "HASH")
+ ? shift
+ : (
+ (@_ % 2)
+ ? $self->throw_exception("Odd number of arguments to search")
+ : {@_}
+ )
+ )
+ : undef()
+ );
+
if (defined $where) {
- $new_attrs->{where} = (defined $new_attrs->{where}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $where, $new_attrs->{where} ] }
- : $where);
+ $new_attrs->{where} = (
+ defined $new_attrs->{where}
+ ? { '-and' => [
+ map {
+ ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_
+ } $where, $new_attrs->{where}
+ ]
+ }
+ : $where);
}
if (defined $having) {
- $new_attrs->{having} = (defined $new_attrs->{having}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $having, $new_attrs->{having} ] }
- : $having);
+ $new_attrs->{having} = (
+ defined $new_attrs->{having}
+ ? { '-and' => [
+ map {
+ ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_
+ } $having, $new_attrs->{having}
+ ]
+ }
+ : $having);
}
my $rs = (ref $self)->new($self->result_source, $new_attrs);
- $rs->{_parent_rs} = $self->{_parent_rs} if ($self->{_parent_rs}); #XXX - hack to pass through parent of related resultsets
+ $rs->{_parent_rs} = $self->{_parent_rs} if ($self->{_parent_rs});
+ #XXX - hack to pass through parent of related resultsets
- unless (@_) { # no search, effectively just a clone
+ unless (@_) { # no search, effectively just a clone
my $rows = $self->get_cache;
if ($rows) {
$rs->set_cache($rows);
You can also find a row by a specific unique constraint using the C<key>
attribute. For example:
- my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', { key => 'cd_artist_title' });
+ my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', {
+ key => 'cd_artist_title'
+ });
Additionally, you can specify the columns explicitly by name:
}
my @unique_queries = $self->_unique_queries($input_query, $attrs);
-# use Data::Dumper; warn Dumper $self->result_source->name, $input_query, \@unique_queries, $self->{attrs}->{where};
# Handle cases where the ResultSet defines the query, or where the user is
# abusing find
# Add the ResultSet's alias
foreach my $key (grep { ! m/\./ } keys %$unique_query) {
- $unique_query->{"$self->{attrs}->{alias}.$key"} = delete $unique_query->{$key};
+ my $alias = ($self->{attrs}->{_live_join})
+ ? $self->{attrs}->{_live_join}
+ : $self->{attrs}->{alias};
+ $unique_query->{"$alias.$key"} = delete $unique_query->{$key};
}
push @unique_queries, $unique_query;
=over 4
-=item Arguments: $cond, \%attrs?
+=item Arguments: $rel, $cond, \%attrs?
=item Return Value: $new_resultset
}
unless ($self->_is_unique_query($attrs->{where})) {
- carp "Query not guarnteed to return a single row"
+ carp "Query not guaranteed to return a single row"
. "; please declare your unique constraints or use search instead";
}
my @data = $self->result_source->storage->select_single(
- $attrs->{from}, $attrs->{select},
- $attrs->{where},$attrs);
+ $attrs->{from}, $attrs->{select},
+ $attrs->{where},$attrs
+ );
+
return (@data ? $self->_construct_object(@data) : ());
}
my ($self, $query) = @_;
my $collapsed = $self->_collapse_query($query);
-# use Data::Dumper; warn Dumper $query, $collapsed;
+ my $alias = ($self->{attrs}->{_live_join})
+ ? $self->{attrs}->{_live_join}
+ : $self->{attrs}->{alias};
foreach my $name ($self->result_source->unique_constraint_names) {
- my @unique_cols = map { "$self->{attrs}->{alias}.$_" }
- $self->result_source->unique_constraint_columns($name);
+ my @unique_cols = map {
+ "$alias.$_"
+ } $self->result_source->unique_constraint_columns($name);
# Count the values for each unique column
my %seen = map { $_ => 0 } @unique_cols;
foreach my $key (keys %$collapsed) {
my $aliased = $key;
- $aliased = "$self->{attrs}->{alias}.$key" unless $key =~ /\./;
+ $aliased = "$alias.$key" unless $key =~ /\./;
next unless exists $seen{$aliased}; # Additional constraints are okay
$seen{$aliased} = scalar @{ $collapsed->{$key} };
sub get_column {
my ($self, $column) = @_;
-
my $new = DBIx::Class::ResultSetColumn->new($self, $column);
return $new;
}
$self->{all_cache_position} = 1;
return ($self->all)[0];
}
- my @row = (exists $self->{stashed_row} ?
- @{delete $self->{stashed_row}} :
- $self->cursor->next
+ my @row = (
+ exists $self->{stashed_row}
+ ? @{delete $self->{stashed_row}}
+ : $self->cursor->next
);
return unless (@row);
return $self->_construct_object(@row);
return if(exists $self->{_attrs}); #return if _resolve has already been called
- my $attrs = $self->{attrs};
- my $source = ($self->{_parent_rs}) ? $self->{_parent_rs} : $self->{result_source};
+ my $attrs = $self->{attrs};
+ my $source = ($self->{_parent_rs})
+ ? $self->{_parent_rs}
+ : $self->{result_source};
# XXX - lose storable dclone
- my $record_filter = delete $attrs->{record_filter} if (defined $attrs->{record_filter});
+ my $record_filter = delete $attrs->{record_filter}
+ if (defined $attrs->{record_filter});
$attrs = Storable::dclone($attrs || {}); # { %{ $attrs || {} } };
$attrs->{record_filter} = $record_filter if ($record_filter);
$self->{attrs}->{record_filter} = $record_filter if ($record_filter);
$attrs->{columns} ||= delete $attrs->{cols} if $attrs->{cols};
delete $attrs->{as} if $attrs->{columns};
- $attrs->{columns} ||= [ $self->{result_source}->columns ] unless $attrs->{select};
- my $select_alias = ($self->{_parent_rs}) ? $self->{attrs}->{_live_join} : $alias;
+ $attrs->{columns} ||= [ $self->{result_source}->columns ]
+ unless $attrs->{select};
+ my $select_alias = ($self->{_parent_rs})
+ ? $self->{attrs}->{_live_join}
+ : $alias;
$attrs->{select} = [
- map { m/\./ ? $_ : "${select_alias}.$_" } @{delete $attrs->{columns}}
- ] if $attrs->{columns};
+ map { m/\./ ? $_ : "${select_alias}.$_" } @{delete $attrs->{columns}}
+ ] if $attrs->{columns};
$attrs->{as} ||= [
- map { m/^\Q$alias.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
- ];
+ map { m/^\Q$alias.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
+ ];
if (my $include = delete $attrs->{include_columns}) {
- push(@{$attrs->{select}}, @$include);
- push(@{$attrs->{as}}, map { m/([^.]+)$/; $1; } @$include);
+ push(@{$attrs->{select}}, @$include);
+ push(@{$attrs->{as}}, map { m/([^.]+)$/; $1; } @$include);
}
$attrs->{from} ||= [ { $alias => $source->from } ];
$attrs->{seen_join} ||= {};
my %seen;
if (my $join = delete $attrs->{join}) {
- foreach my $j (ref $join eq 'ARRAY' ? @$join : ($join)) {
- if (ref $j eq 'HASH') {
- $seen{$_} = 1 foreach keys %$j;
- } else {
- $seen{$j} = 1;
- }
+ foreach my $j (ref $join eq 'ARRAY' ? @$join : ($join)) {
+ if (ref $j eq 'HASH') {
+ $seen{$_} = 1 foreach keys %$j;
+ } else {
+ $seen{$j} = 1;
}
-
- push(@{$attrs->{from}}, $source->resolve_join($join, $attrs->{alias}, $attrs->{seen_join}));
+ }
+ push(@{$attrs->{from}},
+ $source->resolve_join($join, $attrs->{alias}, $attrs->{seen_join})
+ );
}
$attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct};
$attrs->{order_by} = [ $attrs->{order_by} ] if
$attrs->{order_by} and !ref($attrs->{order_by});
$attrs->{order_by} ||= [];
- if(my $seladds = delete($attrs->{'+select'})) {
- my @seladds = (ref($seladds) eq 'ARRAY' ? @$seladds : ($seladds));
- $attrs->{select} = [
- @{ $attrs->{select} },
- map { (m/\./ || ref($_)) ? $_ : "${alias}.$_" } $seladds
- ];
- }
- if(my $asadds = delete($attrs->{'+as'})) {
- my @asadds = (ref($asadds) eq 'ARRAY' ? @$asadds : ($asadds));
- $attrs->{as} = [ @{ $attrs->{as} }, @asadds ];
- }
-
+ if(my $seladds = delete($attrs->{'+select'})) {
+ my @seladds = (ref($seladds) eq 'ARRAY' ? @$seladds : ($seladds));
+ $attrs->{select} = [
+ @{ $attrs->{select} },
+ map { (m/\./ || ref($_)) ? $_ : "${alias}.$_" } $seladds
+ ];
+ }
+ if(my $asadds = delete($attrs->{'+as'})) {
+ my @asadds = (ref($asadds) eq 'ARRAY' ? @$asadds : ($asadds));
+ $attrs->{as} = [ @{ $attrs->{as} }, @asadds ];
+ }
my $collapse = $attrs->{collapse} || {};
if (my $prefetch = delete $attrs->{prefetch}) {
- my @pre_order;
- foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
- if ( ref $p eq 'HASH' ) {
- foreach my $key (keys %$p) {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$key};
- }
- } else {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$p};
- }
- my @prefetch = $source->resolve_prefetch(
- $p, $attrs->{alias}, {}, \@pre_order, $collapse);
- push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
- push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
+ my @pre_order;
+ foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
+ if ( ref $p eq 'HASH' ) {
+ foreach my $key (keys %$p) {
+ push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
+ unless $seen{$key};
+ }
+ } else {
+ push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
+ unless $seen{$p};
}
- push(@{$attrs->{order_by}}, @pre_order);
+
+ # we're about to resolve_join on the current class, so we need to bring
+ # the joins (which are from the original class) to the right level
+ # XXX the below alg is ridiculous
+ if ($attrs->{_live_join_stack}) {
+ STACK:
+ foreach (@{$attrs->{_live_join_stack}}) {
+ if (ref $p eq 'HASH') {
+ if (exists $p->{$_}) {
+ $p = $p->{$_};
+ } else {
+ $p = undef;
+ last STACK;
+ }
+ } elsif (ref $p eq 'ARRAY') {
+ foreach my $pe (@{$p}) {
+ if ($pe eq $_) {
+ $p = undef;
+ last STACK;
+ }
+ next unless(ref $pe eq 'HASH');
+ next unless(exists $pe->{$_});
+ $p = $pe->{$_};
+ next STACK;
+ }
+ $p = undef;
+ last STACK;
+ } else {
+ $p = undef;
+ last STACK;
+ }
+ }
+ }
+
+ if ($p) {
+ my @prefetch = $self->result_source->resolve_prefetch(
+ $p, $attrs->{alias}, {}, \@pre_order, $collapse
+ );
+
+ push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
+ push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
+ }
+ }
+ push(@{$attrs->{order_by}}, @pre_order);
}
$attrs->{collapse} = $collapse;
$self->{_attrs} = $attrs;
return $b unless $a;
if (ref $b eq 'HASH' && ref $a eq 'HASH') {
- foreach my $key (keys %{$b}) {
- if (exists $a->{$key}) {
- $a->{$key} = $self->_merge_attr($a->{$key}, $b->{$key}, $is_prefetch);
- } else {
- $a->{$key} = delete $b->{$key};
- }
- }
- return $a;
+ foreach my $key (keys %{$b}) {
+ if (exists $a->{$key}) {
+ $a->{$key} = $self->_merge_attr($a->{$key}, $b->{$key}, $is_prefetch);
+ } else {
+ $a->{$key} = $b->{$key};
+ }
+ }
+ return $a;
} else {
- $a = [$a] unless (ref $a eq 'ARRAY');
- $b = [$b] unless (ref $b eq 'ARRAY');
-
- my $hash = {};
- my $array = [];
- foreach ($a, $b) {
- foreach my $element (@{$_}) {
- if (ref $element eq 'HASH') {
- $hash = $self->_merge_attr($hash, $element, $is_prefetch);
- } elsif (ref $element eq 'ARRAY') {
- $array = [@{$array}, @{$element}];
- } else {
- if (($b == $_) && $is_prefetch) {
- $self->_merge_array($array, $element, $is_prefetch);
- } else {
- push(@{$array}, $element);
- }
- }
- }
- }
-
- if ((keys %{$hash}) && (scalar(@{$array} > 0))) {
- return [$hash, @{$array}];
- } else {
- return (keys %{$hash}) ? $hash : $array;
- }
+ $a = [$a] unless (ref $a eq 'ARRAY');
+ $b = [$b] unless (ref $b eq 'ARRAY');
+
+ my $hash = {};
+ my $array = [];
+ foreach ($a, $b) {
+ foreach my $element (@{$_}) {
+ if (ref $element eq 'HASH') {
+ $hash = $self->_merge_attr($hash, $element, $is_prefetch);
+ } elsif (ref $element eq 'ARRAY') {
+ $array = [@{$array}, @{$element}];
+ } else {
+ if (($b == $_) && $is_prefetch) {
+ $self->_merge_array($array, $element, $is_prefetch);
+ } else {
+ push(@{$array}, $element);
+ }
+ }
+ }
+ }
+
+ my $final_array = [];
+ foreach my $element (@{$array}) {
+ push(@{$final_array}, $element) unless (exists $hash->{$element});
+ }
+ $array = $final_array;
+
+ if ((keys %{$hash}) && (scalar(@{$array} > 0))) {
+ return [$hash, @{$array}];
+ } else {
+ return (keys %{$hash}) ? $hash : $array;
+ }
}
}
sub _merge_array {
- my ($self, $a, $b) = @_;
-
- $b = [$b] unless (ref $b eq 'ARRAY');
- # add elements from @{$b} to @{$a} which aren't already in @{$a}
- foreach my $b_element (@{$b}) {
- push(@{$a}, $b_element) unless grep {$b_element eq $_} @{$a};
- }
+ my ($self, $a, $b) = @_;
+
+ $b = [$b] unless (ref $b eq 'ARRAY');
+ # add elements from @{$b} to @{$a} which aren't already in @{$a}
+ foreach my $b_element (@{$b}) {
+ push(@{$a}, $b_element) unless grep {$b_element eq $_} @{$a};
+ }
}
sub _construct_object {
my ($self, @row) = @_;
my @as = @{ $self->{_attrs}{as} };
-
+
my $info = $self->_collapse_result(\@as, \@row);
my $new = $self->result_class->inflate_result($self->result_source, @$info);
$new = $self->{_attrs}{record_filter}->($new)
$info->[0] = $const{$key};
}
}
-
my @collapse;
+
if (defined $prefix) {
@collapse = map {
m/^\Q${prefix}.\E(.+)$/ ? ($1) : ()
}
my $c_prefix = (defined($prefix) ? "${prefix}.${c}" : $c);
my @co_key = @{$self->{_attrs}->{collapse}{$c_prefix}};
- my %co_check = map { ($_, $target->[0]->{$_}); } @co_key;
my $tree = $self->_collapse_result($as, $row, $c_prefix);
+ my %co_check = map { ($_, $tree->[0]->{$_}); } @co_key;
my (@final, @raw);
- while ( !(grep {
- !defined($tree->[0]->{$_}) ||
- $co_check{$_} ne $tree->[0]->{$_}
- } @co_key) ) {
+
+ while (
+ !(
+ grep {
+ !defined($tree->[0]->{$_}) || $co_check{$_} ne $tree->[0]->{$_}
+ } @co_key
+ )
+ ) {
push(@final, $tree);
last unless (@raw = $self->cursor->next);
$row = $self->{stashed_row} = \@raw;
@$target = (@final ? @final : [ {}, {} ]);
# single empty result to indicate an empty prefetched has_many
}
+
+ #print "final info: " . Dumper($info);
return $info;
}
my $self = shift;
return $self->search(@_)->count if @_ and defined $_[0];
return scalar @{ $self->get_cache } if $self->get_cache;
-
my $count = $self->_count;
return 0 unless $count;
# offset, order by and page are not needed to count. record_filter is cdbi
delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/;
- my ($count) = (ref $self)->new($self->result_source, $attrs)->cursor->next;
+ my $tmp_rs = (ref $self)->new($self->result_source, $attrs);
+ $tmp_rs->{_parent_rs} = $self->{_parent_rs} if ($self->{_parent_rs});
+ #XXX - hack to pass through parent of related resultsets
+
+ my ($count) = $tmp_rs->cursor->next;
return $count;
}
sub related_resultset {
my ( $self, $rel ) = @_;
-
+
$self->{related_resultsets} ||= {};
return $self->{related_resultsets}{$rel} ||= do {
- #warn "fetching related resultset for rel '$rel' " . $self->result_source->{name};
- my $rel_obj = $self->result_source->relationship_info($rel);
- $self->throw_exception(
- "search_related: result source '" . $self->result_source->name .
+ #warn "fetching related resultset for rel '$rel' " . $self->result_source->{name};
+ my $rel_obj = $self->result_source->relationship_info($rel);
+ #print Dumper($self->result_source->_relationships);
+ $self->throw_exception(
+ "search_related: result source '" . $self->result_source->name .
"' has no such relationship ${rel}")
- unless $rel_obj; #die Dumper $self->{attrs};
-
- my $rs = $self->result_source->schema->resultset($rel_obj->{class}
- )->search( undef,
- { %{$self->{attrs}},
- select => undef,
- as => undef,
- join => $rel,
- _live_join => $rel }
- );
-
- # keep reference of the original resultset
- $rs->{_parent_rs} = $self->result_source;
- return $rs;
+ unless $rel_obj; #die Dumper $self->{attrs};
+
+ my @live_join_stack = (
+ exists $self->{attrs}->{_live_join_stack})
+ ? @{$self->{attrs}->{_live_join_stack}}
+ : ();
+
+ push(@live_join_stack, $rel);
+
+ my $rs = $self->result_source->schema->resultset($rel_obj->{class})->search(
+ undef, {
+ select => undef,
+ as => undef,
+ _live_join => $rel, #the most recent
+ _live_join_stack => \@live_join_stack, #the trail of rels
+ _parent_attrs => $self->{attrs}}
+ );
+
+ # keep reference of the original resultset
+ $rs->{_parent_rs} = ($self->{_parent_rs})
+ ? $self->{_parent_rs}
+ : $self->result_source;
+
+ return $rs;
};
}
=head1 NAME
- DBIx::Class::ResultSetManager - helpful methods for managing
- resultset classes (EXPERIMENTAL)
+DBIx::Class::ResultSetManager - helpful methods for managing resultset
+classes (EXPERIMENTAL)
=head1 SYNOPSIS
# in a table class
__PACKAGE__->load_components(qw/ResultSetManager Core/); # note order!
__PACKAGE__->load_resultset_components(qw/AlwaysRS/);
-
+
# will be removed from the table class and inserted into a
# table-specific resultset class
sub search_by_year_desc : ResultSet {
__PACKAGE__->base_resultset_class('DBIx::Class::ResultSet');
__PACKAGE__->table_resultset_class_suffix('::_resultset');
+=head2 table
+
+Stacks on top of the normal L<DBIx::Class> C<table> method. Any
+methods tagged with the C<ResultSet> attribute will be moved into a
+table-specific resultset class (by default called
+C<Class::_resultset>, but configurable via
+C<table_resultset_class_suffix>). The magic for this is done within
+this C<< __PACKAGE__->table >> call.
+
+=cut
+
sub table {
my ($self,@rest) = @_;
my $ret = $self->next::method(@rest);
return $ret;
}
+=head2 load_resultset_components
+
+ # in a table class
+ __PACKAGE__->load_components(qw/ResultSetManager Core/); # note order!
+ __PACKAGE__->load_resultset_components(qw/AlwaysRS/);
+
+C<load_resultset_components> loads components in addition to
+C<DBIx::Class::ResultSet> (or whatever you set as
+C<base_resultset_class>).
+
+=cut
+
sub load_resultset_components {
my ($self,@comp) = @_;
my $resultset_class = $self->_setup_resultset_class;
my $self = shift;
my $cache = $self->_attr_cache;
return if keys %$cache == 0;
-
+
foreach my $meth (@{Class::Inspector->methods($self) || []}) {
my $attrs = $cache->{$self->can($meth)};
next unless $attrs;
=head1 METHODS
+=pod
+
+=head2 new
+
+ $class->new();
+
+ $class->new({attribute_name => value});
+
+Creates a new ResultSource object. Not normally called directly by end users.
+
=cut
sub new {
$lc_info->{lc $realcol} = $info->{$realcol};
}
foreach my $col ( keys %{$self->_columns} ) {
- $self->_columns->{$col} = $info->{$col} || $lc_info->{lc $col};
+ $self->_columns->{$col} = { %{ $self->_columns->{$col}}, %{$info->{$col} || $lc_info->{lc $col}} };
}
}
}
return undef;
}
+=head2 has_column_loaded
+
+ if ( $obj->has_column_loaded($col) ) {
+ print "$col has been loaded from db";
+ }
+
+Returns a true value if the column value has been loaded from the
+database (or set locally).
+
+=cut
+
sub has_column_loaded {
my ($self, $column) = @_;
$self->throw_exception( "Can't call has_column data as class method" ) unless ref $self;
Updates the object if it's already in the db, else inserts it.
+=head2 insert_or_update
+
+ $obj->insert_or_update
+
+Alias for L</update_or_insert>
+
=cut
*insert_or_update = \&update_or_insert;
my @changed_col_names = $obj->is_changed();
if ($obj->is_changed()) { ... }
+In array context returns a list of columns with uncommited changes, or
+in scalar context returns a true value if there are uncommitted
+changes.
+
=cut
sub is_changed {
if ($obj->is_column_changed('col')) { ... }
+Returns a true value if the column has uncommitted changes.
+
=cut
sub is_column_changed {
=head2 result_source
- Accessor to the ResultSource this object was created from
+ my $resultsource = $object->result_source;
-=head2 register_column
+Accessor to the ResultSource this object was created from
-=over 4
+=head2 register_column
-=item Arguments: $column, $column_info
+ $column_info = { .... };
+ $class->register_column($column_name, $column_info);
-=back
+Registers a column on the class. If the column_info has an 'accessor'
+key, creates an accessor named after the value if defined; if there is
+no such key, creates an accessor with the same name as the column
- Registers a column on the class. If the column_info has an 'accessor' key,
- creates an accessor named after the value if defined; if there is no such
- key, creates an accessor with the same name as the column
+The column_info attributes are described in
+L<DBIx::Class::ResultSource/add_columns>
=cut
$self->storage->create_ddl_dir($self, @_);
}
+=head2 ddl_filename (EXPERIMENTAL)
+
+ my $filename = $table->ddl_filename($type, $dir, $version)
+
+Creates a filename for a SQL file based on the table class name. Not
+intended for direct end user use.
+
+=cut
+
sub ddl_filename
{
my ($self, $type, $dir, $version) = @_;
# in a table class definition
__PACKAGE__->load_components(qw/Serialize::Storable/);
-
+
# meanwhile, in a nearby piece of code
my $cd = $schema->resultset('CD')->find(12);
# if the cache uses Storable, this will work automatically
serialized. It assumes that your row object class (C<result_class>) is
the same as your table class, which is the normal situation.
+=head1 HOOKS
+
+The following hooks are defined for L<Storable> - see the
+documentation for L<Storable/Hooks> for detailed information on these
+hooks.
+
+=head2 STORABLE_freeze
+
+The serializing hook, called on the object during serialization. It
+can be inherited, or defined in the class itself, like any other
+method.
+
+=head2 STORABLE_thaw
+
+The deserializing hook called on the object during deserialization.
+
=head1 AUTHORS
David Kamholz <dkamholz@cpan.org>
# check whether a join type exists
my $join_clause = '';
- if (ref($to) eq 'HASH' and exists($to->{-join_type})) {
- $join_clause = ' '.uc($to->{-join_type}).' JOIN ';
+ my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to;
+ if (ref($to_jt) eq 'HASH' and exists($to_jt->{-join_type})) {
+ $join_clause = ' '.uc($to_jt->{-join_type}).' JOIN ';
} else {
$join_clause = ' JOIN ';
}
$self->SUPER::_RowNum(@_);
}
-# Accessor for setting limit dialect. This is useful
-# for JDBC-bridge among others where the remote SQL-dialect cannot
-# be determined by the name of the driver alone.
-#
sub limit_dialect {
my $self = shift;
$self->{limit_dialect} = shift if @_;
qw/_connect_info _dbh _sql_maker _conn_pid _conn_tid debug debugobj
cursor on_connect_do transaction_depth/);
+=head1 NAME
+
+DBIx::Class::Storage::DBI - DBI storage handler
+
+=head1 SYNOPSIS
+
+=head1 DESCRIPTION
+
+This class represents the connection to the database
+
+=head1 METHODS
+
+=head2 new
+
+=cut
+
sub new {
my $new = bless({}, ref $_[0] || $_[0]);
$new->cursor("DBIx::Class::Storage::DBI::Cursor");
} else {
$fh = IO::File->new('>&STDERR');
}
- $new->debugobj->debugfh($fh);
+ $new->debugfh($fh);
$new->debug(1) if $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG};
return $new;
}
+=head2 throw_exception
+
+Throws an exception - croaks.
+
+=cut
+
sub throw_exception {
my ($self, $msg) = @_;
croak($msg);
}
-=head1 NAME
-
-DBIx::Class::Storage::DBI - DBI storage handler
+=head2 connect_info
-=head1 SYNOPSIS
+The arguments of C<connect_info> are always a single array reference.
-=head1 DESCRIPTION
+This is normally accessed via L<DBIx::Class::Schema/connection>, which
+encapsulates its argument list in an arrayref before calling
+C<connect_info> here.
-This class represents the connection to the database
+The arrayref can either contain the same set of arguments one would
+normally pass to L<DBI/connect>, or a lone code reference which returns
+a connected database handle.
-=head1 METHODS
+In either case, there is an optional final element within the arrayref
+which can hold a hashref of connection-specific Storage::DBI options.
+These include C<on_connect_do>, and the sql_maker options
+C<limit_dialect>, C<quote_char>, and C<name_sep>. Examples:
-=cut
+ ->connect_info([ 'dbi:SQLite:./foo.db' ]);
-=head2 connect_info
+ ->connect_info([ sub { DBI->connect(...) } ]);
-Connection information arrayref. Can either be the same arguments
-one would pass to DBI->connect, or a code-reference which returns
-a connected database handle. In either case, there is an optional
-final element in the arrayref, which can hold a hashref of
-connection-specific Storage::DBI options. These include
-C<on_connect_do>, and the sql_maker options C<limit_dialect>,
-C<quote_char>, and C<name_sep>. Examples:
+ ->connect_info(
+ [
+ 'dbi:Pg:dbname=foo',
+ 'postgres',
+ 'my_pg_password',
+ { AutoCommit => 0 },
+ { quote_char => q{`}, name_sep => q{@} },
+ ]
+ );
- ->connect_info([ 'dbi:SQLite:./foo.db' ]);
- ->connect_info(sub { DBI->connect(...) });
- ->connect_info([ 'dbi:Pg:dbname=foo',
- 'postgres',
- '',
- { AutoCommit => 0 },
- { quote_char => q{`}, name_sep => q{@} },
- ]);
+ ->connect_info(
+ [
+ sub { DBI->connect(...) },
+ { quote_char => q{`}, name_sep => q{@} },
+ ]
+ );
=head2 on_connect_do
Executes the sql statements given as a listref on every db connect.
-=head2 quote_char
-
-Specifies what characters to use to quote table and column names. If
-you use this you will want to specify L<name_sep> as well.
-
-quote_char expectes either a single character, in which case is it is placed
-on either side of the table/column, or an array of length 2 in which case the
-table/column name is placed between the elements.
-
-For example under MySQL you'd use C<quote_char('`')>, and user SQL Server you'd
-use C<quote_char(qw/[ ]/)>.
-
-=head2 name_sep
-
-This only needs to be used in conjunction with L<quote_char>, and is used to
-specify the charecter that seperates elements (schemas, tables, columns) from
-each other. In most cases this is simply a C<.>.
+This option can also be set via L</connect_info>.
=head2 debug
set to be STDERR - although see information on the
L<DBIX_CLASS_STORAGE_DBI_DEBUG> environment variable.
+=cut
+
+sub debugfh {
+ my $self = shift;
+
+ if ($self->debugobj->can('debugfh')) {
+ return $self->debugobj->debugfh(@_);
+ }
+}
+
=head2 debugobj
Sets or retrieves the object used for metric collection. Defaults to an instance
See L<debugobj> for a better way.
=cut
+
sub debugcb {
- my $self = shift();
+ my $self = shift;
- if($self->debugobj()->can('callback')) {
- $self->debugobj()->callback(shift());
+ if ($self->debugobj->can('callback')) {
+ return $self->debugobj->callback(@_);
}
}
+=head2 disconnect
+
+Disconnect the L<DBI> handle, performing a rollback first if the
+database is not in C<AutoCommit> mode.
+
+=cut
+
sub disconnect {
my ($self) = @_;
}
}
-sub connected {
- my ($self) = @_;
+=head2 connected
+
+Check if the L<DBI> handle is connected. Returns true if the handle
+is connected.
+
+=cut
+
+sub connected { my ($self) = @_;
if(my $dbh = $self->_dbh) {
if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) {
return 0;
}
+=head2 ensure_connected
+
+Check whether the database handle is connected - if not then make a
+connection.
+
+=cut
+
sub ensure_connected {
my ($self) = @_;
return ( limit_dialect => $self->dbh );
}
+=head2 sql_maker
+
+Returns a C<sql_maker> object - normally an object of class
+C<DBIC::SQL::Abstract>.
+
+=cut
+
sub sql_maker {
my ($self) = @_;
unless ($self->_sql_maker) {
}
sub connect_info {
- my ($self, $info_arg) = @_;
-
- if($info_arg) {
- my $info = [ @$info_arg ]; # copy because we can alter it
- my $last_info = $info->[-1];
- if(ref $last_info eq 'HASH') {
- my $used;
- if(my $on_connect_do = $last_info->{on_connect_do}) {
- $used = 1;
- $self->on_connect_do($on_connect_do);
- }
- for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) {
- if(my $opt_val = $last_info->{$sql_maker_opt}) {
- $used = 1;
- $self->sql_maker->$sql_maker_opt($opt_val);
- }
- }
-
- # remove our options hashref if it was there, to avoid confusing
- # DBI in the case the user didn't use all 4 DBI options, as in:
- # [ 'dbi:SQLite:foo.db', { quote_char => q{`} } ]
- pop(@$info) if $used;
+ my ($self, $info_arg) = @_;
+
+ if($info_arg) {
+ my %sql_maker_opts;
+ my $info = [ @$info_arg ]; # copy because we can alter it
+ my $last_info = $info->[-1];
+ if(ref $last_info eq 'HASH') {
+ my $used;
+ if(my $on_connect_do = $last_info->{on_connect_do}) {
+ $used = 1;
+ $self->on_connect_do($on_connect_do);
+ }
+ for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) {
+ if(my $opt_val = $last_info->{$sql_maker_opt}) {
+ $used = 1;
+ $sql_maker_opts{$sql_maker_opt} = $opt_val;
}
+ }
- $self->_connect_info($info);
+ # remove our options hashref if it was there, to avoid confusing
+ # DBI in the case the user didn't use all 4 DBI options, as in:
+ # [ 'dbi:SQLite:foo.db', { quote_char => q{`} } ]
+ pop(@$info) if $used;
}
- $self->_connect_info;
+ $self->_connect_info($info);
+ $self->sql_maker->$_($sql_maker_opts{$_}) for(keys %sql_maker_opts);
+ }
+
+ $self->_connect_info;
}
sub _populate_dbh {
my ($self) = @_;
my @info = @{$self->_connect_info || []};
$self->_dbh($self->_connect(@info));
- my $driver = $self->_dbh->{Driver}->{Name};
- eval "require DBIx::Class::Storage::DBI::${driver}";
- unless ($@) {
- bless $self, "DBIx::Class::Storage::DBI::${driver}";
- $self->_rebless() if $self->can('_rebless');
+
+ if(ref $self eq 'DBIx::Class::Storage::DBI') {
+ my $driver = $self->_dbh->{Driver}->{Name};
+ if ($self->load_optional_class("DBIx::Class::Storage::DBI::${driver}")) {
+ bless $self, "DBIx::Class::Storage::DBI::${driver}";
+ $self->_rebless() if $self->can('_rebless');
+ }
}
+
# if on-connect sql statements are given execute them
foreach my $sql_statement (@{$self->on_connect_do || []}) {
$self->debugobj->query_start($sql_statement) if $self->debug();
}
eval {
- if(ref $info[0] eq 'CODE') {
- $dbh = &{$info[0]};
- }
- else {
- $dbh = DBI->connect(@info);
- }
+ $dbh = ref $info[0] eq 'CODE'
+ ? &{$info[0]}
+ : DBI->connect(@info);
};
$DBI::connect_via = $old_connect_via if $old_connect_via;
return $self->_execute(@args);
}
+=head2 select
+
+Handle a SQL select statement.
+
+=cut
+
sub select {
my $self = shift;
my ($ident, $select, $condition, $attrs) = @_;
return $self->cursor->new($self, \@_, $attrs);
}
+=head2 select_single
+
+Performs a select, fetch and return of data - handles a single row
+only.
+
+=cut
+
# Need to call finish() to work round broken DBDs
sub select_single {
return @row;
}
+=head2 sth
+
+Returns a L<DBI> sth (statement handle) for the supplied SQL.
+
+=cut
+
sub sth {
my ($self, $sql) = @_;
# 3 is the if_active parameter which avoids active sth re-use
return \%result;
}
+=head2 last_insert_id
+
+Return the row id of the last insert.
+
+=cut
+
sub last_insert_id {
my ($self, $row) = @_;
}
+=head2 sqlt_type
+
+Returns the database driver name.
+
+=cut
+
sub sqlt_type { shift->dbh->{Driver}->{Name} }
+=head2 create_ddl_dir (EXPERIMENTAL)
+
+=over 4
+
+=item Arguments: $schema \@databases, $version, $directory, $sqlt_args
+
+=back
+
+Creates an SQL file based on the Schema, for each of the specified
+database types, in the given directory.
+
+Note that this feature is currently EXPERIMENTAL and may not work correctly
+across all databases, or fully handle complex relationships.
+
+=cut
+
sub create_ddl_dir
{
my ($self, $schema, $databases, $version, $dir, $sqltargs) = @_;
}
+=head2 deployment_statements
+
+Create the statements for L</deploy> and
+L<DBIx::Class::Schema/deploy>.
+
+=cut
+
sub deployment_statements {
my ($self, $schema, $type, $version, $dir, $sqltargs) = @_;
$type ||= $self->sqlt_type;
}
+=head2 deploy
+
+Sends the appropriate statements to create or modify tables to the
+db. This would normally be called through
+L<DBIx::Class::Schema/deploy>.
+
+=cut
+
sub deploy {
my ($self, $schema, $type, $sqltargs) = @_;
foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, $sqltargs) ) {
# next if($_ =~ /^DROP/m);
next if($_ =~ /^BEGIN TRANSACTION/m);
next if($_ =~ /^COMMIT/m);
- $self->debugobj->query_begin($_) if $self->debug;
+ $self->debugobj->query_start($_) if $self->debug;
$self->dbh->do($_) or warn "SQL was:\n $_";
$self->debugobj->query_end($_) if $self->debug;
}
}
}
+=head2 datetime_parser
+
+Returns the datetime parser class
+
+=cut
+
sub datetime_parser {
my $self = shift;
return $self->{datetime_parser} ||= $self->build_datetime_parser(@_);
}
+=head2 datetime_parser_type
+
+Defines (returns) the datetime parser class - currently hardwired to
+L<DateTime::Format::MySQL>
+
+=cut
+
sub datetime_parser_type { "DateTime::Format::MySQL"; }
+=head2 build_datetime_parser
+
+See L</datetime_parser>
+
+=cut
+
sub build_datetime_parser {
my $self = shift;
my $type = $self->datetime_parser_type(@_);
1;
+=head1 SQL METHODS
+
+The module defines a set of methods within the DBIC::SQL::Abstract
+namespace. These build on L<SQL::Abstract::Limit> to provide the
+SQL query functions.
+
+The following methods are extended:-
+
+=over 4
+
+=item delete
+
+=item insert
+
+=item select
+
+=item update
+
+=item limit_dialect
+
+Accessor for setting limit dialect. This is useful
+for JDBC-bridge among others where the remote SQL-dialect cannot
+be determined by the name of the driver alone.
+
+This option can also be set via L</connect_info>.
+
+=item quote_char
+
+Specifies what characters to use to quote table and column names. If
+you use this you will want to specify L<name_sep> as well.
+
+quote_char expectes either a single character, in which case is it is placed
+on either side of the table/column, or an arrayref of length 2 in which case the
+table/column name is placed between the elements.
+
+For example under MySQL you'd use C<quote_char('`')>, and user SQL Server you'd
+use C<quote_char(qw/[ ]/)>.
+
+This option can also be set via L</connect_info>.
+
+=item name_sep
+
+This only needs to be used in conjunction with L<quote_char>, and is used to
+specify the charecter that seperates elements (schemas, tables, columns) from
+each other. In most cases this is simply a C<.>.
+
+This option can also be set via L</connect_info>.
+
+=back
+
=head1 ENVIRONMENT VARIABLES
=head2 DBIX_CLASS_STORAGE_DBI_DEBUG
package DBIx::Class::Storage::DBI::MSSQL;
-\r
+
use strict;
use warnings;
-\r
+
use base qw/DBIx::Class::Storage::DBI/;
-\r
-# __PACKAGE__->load_components(qw/PK::Auto/);
-\r
+
sub last_insert_id {
my( $id ) = $_[0]->_dbh->selectrow_array('SELECT @@IDENTITY' );
return $id;
$self->throw_exception("Couldn't load ${type}: $@") if $@;
return $type->new( pattern => '%m/%d/%Y %H:%M:%S' );
}
-\r
+
1;
-\r
+
=head1 NAME
-\r
-DBIx::Class::Storage::DBI::MSSQL - Automatic primary key class for MSSQL
-\r
+
+DBIx::Class::Storage::DBI::MSSQL - Storage::DBI subclass for MSSQL
+
=head1 SYNOPSIS
-\r
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto Core/);
- __PACKAGE__->set_primary_key('id');
-\r
-=head1 DESCRIPTION
-\r
-This class implements autoincrements for MSSQL.
-\r
+
+This subclass supports MSSQL, and can in theory be used directly
+via the C<storage_type> mechanism:
+
+ $schema->storage_type('::DBI::MSSQL');
+ $schema->connect_info('dbi:....', ...);
+
+However, as there is no L<DBD::MSSQL>, you will probably want to use
+one of the other DBD-specific MSSQL classes, such as
+L<DBIx::Class::Storage::DBI::Sybase::MSSQL>. These classes will
+merge this class with a DBD-specific class to obtain fully
+correct behavior for your scenario.
+
=head1 AUTHORS
-\r
+
Brian Cassidy <bricas@cpan.org>
-\r
+
=head1 LICENSE
-\r
+
You may distribute this code under the same terms as Perl itself.
-\r
+
=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::NoBindVars;
+
+use strict;
+use warnings;
+
+use base 'DBIx::Class::Storage::DBI';
+
+sub _execute {
+ my ($self, $op, $extra_bind, $ident, @args) = @_;
+ my ($sql, @bind) = $self->sql_maker->$op($ident, @args);
+ unshift(@bind, @$extra_bind) if $extra_bind;
+ if ($self->debug) {
+ my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind;
+ $self->debugobj->query_start($sql, @debug_bind);
+ }
+
+ while(my $bvar = shift @bind) {
+ $bvar = $self->dbh->quote($bvar);
+ $sql =~ s/\?/$bvar/;
+ }
+
+ my $sth = eval { $self->sth($sql,$op) };
+
+ if (!$sth || $@) {
+ $self->throw_exception(
+ 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql"
+ );
+ }
+
+ my $rv;
+ if ($sth) {
+ my $time = time();
+ $rv = eval { $sth->execute };
+
+ if ($@ || !$rv) {
+ $self->throw_exception("Error executing '$sql': ".($@ || $sth->errstr));
+ }
+ } else {
+ $self->throw_exception("'$sql' did not generate a statement.");
+ }
+ if ($self->debug) {
+ my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind;
+ $self->debugobj->query_end($sql, @debug_bind);
+ }
+ return (wantarray ? ($rv, $sth, @bind) : $rv);
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::NoBindVars - Sometime DBDs have poor to no support for bind variables
+
+=head1 SYNOPSIS
+
+=head1 DESCRIPTION
+
+This class allows queries to work when the DBD or underlying library does not
+support the usual C<?> placeholders, or at least doesn't support them very
+well, as is the case with L<DBD::Sybase>
+
+=head1 AUTHORS
+
+Brandon Black <blblack@gmail.com>
+Trym Skaar <trym@tryms.no>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
use strict;
use warnings;
+use DBD::Pg;
+
use base qw/DBIx::Class::Storage::DBI/;
# __PACKAGE__->load_components(qw/PK::Auto/);
--- /dev/null
+package DBIx::Class::Storage::DBI::Sybase;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI::NoBindVars/;
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Sybase - Storage::DBI subclass for Sybase
+
+=head1 SYNOPSIS
+
+This subclass supports L<DBD::Sybase> for real Sybase databases. If
+you are using an MSSQL database via L<DBD::Sybase>, see
+L<DBIx::Class::Storage::DBI::Sybase::MSSQL>.
+
+=head1 AUTHORS
+
+Brandon L Black <blblack@gmail.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::Sybase::MSSQL;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI::MSSQL DBIx::Class::Storage::DBI::Sybase/;
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Sybase::MSSQL - Storage::DBI subclass for MSSQL via
+DBD::Sybase
+
+=head1 SYNOPSIS
+
+This subclass supports MSSQL connected via L<DBD::Sybase>.
+
+ $schema->storage_type('::DBI::Sybase::MSSQL');
+ $schema->connect_info('dbi:Sybase:....', ...);
+
+=head1 AUTHORS
+
+Brandon L Black <blblack@gmail.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
+++ /dev/null
-package DBIx::Class::Validation;
-
-use strict;
-use warnings;
-
-use base qw( DBIx::Class );
-use English qw( -no_match_vars );
-
-#local $^W = 0; # Silence C:D:I redefined sub errors.
-# Switched to C::D::Accessor which doesn't do this. Hate hate hate hate.
-
-our $VERSION = '0.01';
-
-__PACKAGE__->mk_classdata( 'validation_module' => 'FormValidator::Simple' );
-__PACKAGE__->mk_classdata( 'validation_profile' );
-__PACKAGE__->mk_classdata( 'validation_auto' => 1 );
-
-sub validation_module {
- my $class = shift;
- my $module = shift;
-
- eval("use $module");
- $class->throw_exception("Unable to load the validation module '$module' because $EVAL_ERROR") if ($EVAL_ERROR);
- $class->throw_exception("The '$module' module does not support the check method") if (!$module->can('check'));
-
- $class->_validation_module_accessor( $module );
-}
-
-sub validation {
- my $class = shift;
- my %args = @_;
-
- $class->validation_module( $args{module} ) if (exists $args{module});
- $class->validation_profile( $args{profile} ) if (exists $args{profile});
- $class->validation_auto( $args{auto} ) if (exists $args{auto});
-}
-
-sub validate {
- my $self = shift;
- my %data = $self->get_columns();
- my $module = $self->validation_module();
- my $profile = $self->validation_profile();
- my $result = $module->check( \%data => $profile );
- return $result if ($result->success());
- $self->throw_exception( $result );
-}
-
-sub insert {
- my $self = shift;
- $self->validate if ($self->validation_auto());
- $self->next::method(@_);
-}
-
-sub update {
- my $self = shift;
- $self->validate if ($self->validation_auto());
- $self->next::method(@_);
-}
-
-1;
-__END__
-
-=head1 NAME
-
-DBIx::Class::Validation - Validate all data before submitting to your database.
-
-=head1 SYNOPSIS
-
-In your base DBIC package:
-
- __PACKAGE__->load_components(qw/... Validation/);
-
-And in your subclasses:
-
- __PACKAGE__->validation(
- module => 'FormValidator::Simple',
- profile => { ... },
- auto => 1,
- );
-
-And then somewhere else:
-
- eval{ $obj->validate() };
- if( my $results = $EVAL_ERROR ){
- ...
- }
-
-=head1 METHODS
-
-=head2 validation
-
- __PACKAGE__->validation(
- module => 'FormValidator::Simple',
- profile => { ... },
- auto => 1,
- );
-
-Calls validation_module(), validation_profile(), and validation_auto() if the corresponding
-argument is defined.
-
-=head2 validation_module
-
- __PACKAGE__->validation_module('Data::FormValidator');
-
-Sets the validation module to use. Any module that supports a check() method just like
-Data::FormValidator's can be used here, such as FormValidator::Simple.
-
-Defaults to FormValidator::Simple.
-
-=head2 validation_profile
-
- __PACKAGE__->validation_profile(
- { ... }
- );
-
-Sets the profile that will be passed to the validation module.
-
-=head2 validation_auto
-
- __PACKAGE__->validation_auto( 1 );
-
-This flag, when enabled, causes any updates or inserts of the class
-to call validate() before actually executing.
-
-=head2 validate
-
- $obj->validate();
-
-Validates all the data in the object against the pre-defined validation
-module and profile. If there is a problem then a hard error will be
-thrown. If you put the validation in an eval you can capture whatever
-the module's check() method returned.
-
-=head2 auto_validate
-
- __PACKAGE__->auto_validate( 0 );
-
-Turns on and off auto-validation. This feature makes all UPDATEs and
-INSERTs call the validate() method before doing anything. The default
-is for auto-validation to be on.
-
-Defaults to on.
-
-=head1 AUTHOR
-
-Aran C. Deltac <bluefeet@cpan.org>
-
-=head1 LICENSE
-
-You may distribute this code under the same terms as Perl itself.
-
--- /dev/null
+use Test::More;
+
+eval "use Test::Pod::Coverage 1.04";
+plan skip_all => 'Test::Pod::Coverage 1.04 required' if $@;
+plan skip_all => 'set TEST_POD to enable this test' unless $ENV{TEST_POD};
+
+my @modules = sort { $a cmp $b } (all_modules());
+plan tests => scalar(@modules);
+
+# Since this is about checking documentation, a little documentation
+# of what this is doing might be in order...
+# The exceptions structure below is a hash keyed by the module
+# name. The value for each is a hash, which contains one or more
+# (although currently more than one makes no sense) of the following
+# things:-
+# skip => a true value means this module is not checked
+# ignore => array ref containing list of methods which
+# do not need to be documented.
+my $exceptions = {
+ 'DBIx::Class' => {
+ ignore => [
+ qw/MODIFY_CODE_ATTRIBUTES
+ component_base_class
+ mk_classdata/
+ ]
+ },
+ 'DBIx::Class::CDBICompat::AccessorMapping' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::AttributeAPI' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::AutoUpdate' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::ColumnCase' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::ColumnGroups' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Constraints' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Constructor' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::DestroyWarning' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::GetSet' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::HasA' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::HasMany' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::ImaDBI' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::LazyLoading' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::LiveObjectIndex' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::MightHave' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::ObjIndexStubs' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Pager' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::ReadOnly' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Retrieve' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Stringify' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::TempColumns' => { skip => 1 },
+ 'DBIx::Class::CDBICompat::Triggers' => { skip => 1 },
+ 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 },
+ 'DBIx::Class::Componentised' => { skip => 1 },
+ 'DBIx::Class::Relationship::Accessor' => { skip => 1 },
+ 'DBIx::Class::Relationship::BelongsTo' => { skip => 1 },
+ 'DBIx::Class::Relationship::CascadeActions' => { skip => 1 },
+ 'DBIx::Class::Relationship::HasMany' => { skip => 1 },
+ 'DBIx::Class::Relationship::HasOne' => { skip => 1 },
+ 'DBIx::Class::Relationship::Helpers' => { skip => 1 },
+ 'DBIx::Class::Relationship::ManyToMany' => { skip => 1 },
+ 'DBIx::Class::Relationship::ProxyMethods' => { skip => 1 },
+ 'DBIx::Class::ResultSetProxy' => { skip => 1 },
+ 'DBIx::Class::ResultSourceProxy' => { skip => 1 },
+ 'DBIx::Class::Storage' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::DB2' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::MSSQL' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::MultiDistinctEmulation' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::ODBC400' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::ODBC::DB2_400_SQL' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::Oracle' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::Pg' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::SQLite' => { skip => 1 },
+ 'DBIx::Class::Storage::DBI::mysql' => { skip => 1 },
+ 'SQL::Translator::Parser::DBIx::Class' => { skip => 1 },
+ 'SQL::Translator::Producer::DBIx::Class::File' => { skip => 1 },
+};
+
+foreach my $module (@modules) {
+ SKIP:
+ {
+ skip "No real methods", 1 if ($exceptions->{$module}{skip});
+
+ # build parms up from ignore list
+ my $parms = {};
+ $parms->{trustme} =
+ [ map { qr/^$_$/ } @{ $exceptions->{$module}{ignore} } ]
+ if exists($exceptions->{$module}{ignore});
+
+ # run the test with the potentially modified parm set
+ pod_coverage_ok($module, $parms, "$module POD coverage");
+ }
+}
+++ /dev/null
-use Test::More;
-
-eval "use Test::Pod::Coverage 1.04";
-plan skip_all => 'Test::Pod::Coverage 1.04 required' if $@;
-plan skip_all => 'set TEST_POD to enable this test' unless $ENV{TEST_POD};
-
-all_pod_coverage_ok();
use lib qw(t/lib);
use DBICTest::ForeignComponent;
-plan tests => 2;
+plan tests => 5;
# Tests if foreign component was loaded by calling foreign's method
ok( DBICTest::ForeignComponent->foreign_test_method, 'foreign component' );
/],
'inject_base filters duplicates'
);
+
+# Test for a warning with incorrect order in load_components
+my @warnings = ();
+{
+ package A::Test;
+ our @ISA = 'DBIx::Class';
+ {
+ local $SIG{__WARN__} = sub { push @warnings, shift};
+ __PACKAGE__->load_components(qw(Core UTF8Columns));
+ }
+}
+like( $warnings[0], qr/Core loaded before UTF8Columns/,
+ 'warning issued for incorrect order in load_components()' );
+is( scalar @warnings, 1,
+ 'only one warning issued for incorrect load_components call' );
+
+# Test that no warning is issued for the correct order in load_components
+{
+ @warnings = ();
+ package B::Test;
+ our @ISA = 'DBIx::Class';
+ {
+ local $SIG{__WARN__} = sub { push @warnings, shift };
+ __PACKAGE__->load_components(qw(UTF8Columns Core));
+ }
+}
+is( scalar @warnings, 0,
+ 'warning not issued for correct order in load_components()' );
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+plan tests => 1;
+
+# Set up the "usual" sqlite for DBICTest
+my $normal_schema = DBICTest->init_schema;
+
+# Steal the dsn, which should be like 'dbi:SQLite:t/var/DBIxClass.db'
+my $normal_dsn = $normal_schema->storage->connect_info->[0];
+
+# Make sure we have no active connection
+$normal_schema->storage->disconnect;
+
+# Make a new clone with a new connection, using a code reference
+my $code_ref_schema = $normal_schema->connect(sub { DBI->connect($normal_dsn); });
+
+# Stolen from 60core.t - this just verifies things seem to work at all
+my @art = $code_ref_schema->resultset("Artist")->search({ }, { order_by => 'name DESC'});
+cmp_ok(@art, '==', 3, "Three artists returned");
my $schema = DBICTest->init_schema();
-plan tests => 60;
+plan tests => 61;
# figure out if we've got a version of sqlite that is older than 3.2.6, in
# which case COUNT(DISTINCT()) doesn't work
# Test backwards compatibility
{
+ my $warnings = '';
+ local $SIG{__WARN__} = sub { $warnings .= $_[0] };
+
my $artist_by_hash = $schema->resultset('Artist')->find(artistid => 4);
is($artist_by_hash->name, 'Man With A Spoon', 'Retrieved correctly');
is($artist_by_hash->ID, 'DBICTest::Artist|artist|artistid=4', 'unique object id generated correctly');
+ like($warnings, qr/deprecated/, 'warned about deprecated find usage');
}
is($schema->resultset("Artist")->count, 4, 'count ok');
use lib qw(t/lib);
use DBICTest;
+DBICTest::Schema::CD->add_column('year');
my $schema = DBICTest->init_schema();
eval { require DateTime };
plan tests => 4;
+my $storage_type = '::DBI::MSSQL';
+$storage_type = '::DBI::Sybase::MSSQL' if $dsn =~ /^dbi:Sybase:/;
+# Add more for others in the future when they exist (ODBC? ADO? JDBC?)
+
+DBICTest::Schema->storage_type($storage_type);
DBICTest::Schema->compose_connection( 'MSSQLTest' => $dsn, $user, $pass );
my $dbh = MSSQLTest->schema->storage->dbh;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 42 );
+ : ( tests => 43 );
}
# figure out if we've got a version of sqlite that is older than 3.2.6, in
is( $sa->_recurse_from(@j3), $match, 'join 3 (inner join) ok');
+my @j4 = (
+ { mother => 'person' },
+ [ [ { child => 'person', -join_type => 'left' },
+ [ { father => 'person', -join_type => 'right' },
+ { 'father.person_id' => 'child.father_id' }
+ ]
+ ],
+ { 'mother.person_id' => 'child.mother_id' }
+ ],
+);
+$match = 'person mother LEFT JOIN (person child RIGHT JOIN person father ON ('
+ . ' father.person_id = child.father_id )) ON ( mother.person_id = '
+ . 'child.mother_id )'
+ ;
+is( $sa->_recurse_from(@j4), $match, 'join 4 (nested joins + join types) ok');
+
my $rs = $schema->resultset("CD")->search(
{ 'year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ from => [ { 'me' => 'cd' },
+++ /dev/null
-use strict;
-use warnings;
-
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-
-my $schema = DBICTest->init_schema();
-
-eval 'use Data::UUID ; 1'
- or plan skip_all => 'Install Data::UUID run this test';
-
-plan tests => 1;
-DBICTest::Schema::Artist->load_components('UUIDColumns');
-DBICTest::Schema::Artist->uuid_columns('name');
-Class::C3->reinitialize();
-
-my $artist = $schema->resultset("Artist")->create( { artistid => 100 } );
-like $artist->name, qr/[\w-]{36}/, 'got something like uuid';
-
my $schema = DBICTest->init_schema();
-plan tests => 6;
-
-ok(Class::Inspector->loaded('TestPackage::A'),
- 'anon. package exists');
-eval {
- $schema->ensure_class_loaded('TestPackage::A');
-};
-
-ok(!$@, 'ensure_class_loaded detected an anon. class');
-
-eval {
- $schema->ensure_class_loaded('FakePackage::B');
-};
-
-like($@, qr/Can't locate/,
- 'ensure_class_loaded threw exception for nonexistent class');
-
-ok(!Class::Inspector->loaded('DBICTest::FakeComponent'),
- 'DBICTest::FakeComponent not loaded yet');
-
-eval {
- $schema->ensure_class_loaded('DBICTest::FakeComponent');
-};
-
-ok(!$@, 'ensure_class_loaded detected an existing but non-loaded class');
-ok(Class::Inspector->loaded('DBICTest::FakeComponent'),
- 'DBICTest::FakeComponent now loaded');
+plan tests => 17;
+
+# Test ensure_class_found
+ok( $schema->ensure_class_found('DBIx::Class::Schema'),
+ 'loaded package DBIx::Class::Schema was found' );
+ok( !Class::Inspector->loaded('DBICTest::FakeComponent'),
+ 'DBICTest::FakeComponent not loaded yet' );
+ok( $schema->ensure_class_found('DBICTest::FakeComponent'),
+ 'package DBICTest::FakeComponent was found' );
+ok( !Class::Inspector->loaded('DBICTest::FakeComponent'),
+ 'DBICTest::FakeComponent not loaded by ensure_class_found()' );
+ok( $schema->ensure_class_found('TestPackage::A'),
+ 'anonymous package TestPackage::A found' );
+ok( !$schema->ensure_class_found('FAKE::WONT::BE::FOUND'),
+ 'fake package not found' );
+
+# Test load_optional_class
+my $retval = eval { $schema->load_optional_class('ANOTHER::FAKE::PACKAGE') };
+ok( !$@, 'load_optional_class on a nonexistent class did not throw' );
+ok( !$retval, 'nonexistent package not loaded' );
+$retval = eval { $schema->load_optional_class('DBICTest::OptionalComponent') };
+ok( !$@, 'load_optional_class on an existing class did not throw' );
+ok( $retval, 'DBICTest::OptionalComponent loaded' );
+eval { $schema->load_optional_class('DBICTest::ErrorComponent') };
+like( $@, qr/did not return a true value/, 'DBICTest::ErrorComponent threw ok' );
+
+# Test ensure_class_loaded
+ok( Class::Inspector->loaded('TestPackage::A'), 'anonymous package exists' );
+eval { $schema->ensure_class_loaded('TestPackage::A'); };
+ok( !$@, 'ensure_class_loaded detected an anon. class' );
+eval { $schema->ensure_class_loaded('FakePackage::B'); };
+like( $@, qr/Can't locate/,
+ 'ensure_class_loaded threw exception for nonexistent class' );
+ok( !Class::Inspector->loaded('DBICTest::FakeComponent'),
+ 'DBICTest::FakeComponent not loaded yet' );
+eval { $schema->ensure_class_loaded('DBICTest::FakeComponent'); };
+ok( !$@, 'ensure_class_loaded detected an existing but non-loaded class' );
+ok( Class::Inspector->loaded('DBICTest::FakeComponent'),
+ 'DBICTest::FakeComponent now loaded' );
1;
my $schema = DBICTest->init_schema();
-plan tests => 4;
+plan tests => 14;
+my @rs1a_results = $schema->resultset("Artist")->search_related('cds', {title => 'Forkful of bees'}, {order_by => 'title'});
+is($rs1a_results[0]->title, 'Forkful of bees', "bare field conditions okay after search related");
my $rs1 = $schema->resultset("Artist")->search({ 'tags.tag' => 'Blue' }, { join => {'cds' => 'tracks'}, prefetch => {'cds' => 'tags'} });
my @artists = $rs1->all;
cmp_ok(@artists, '==', 1, "Two artists returned");
my $rs2 = $rs1->search({ artistid => '1' }, { join => {'cds' => {'cd_to_producer' => 'producer'} } });
-my $rs3 = $rs2->search_related('cds')->search({'cds.title' => 'Forkful of bees'});
-cmp_ok($rs3->count, '==', 3, "Three artists returned");
+
+my @artists2 = $rs2->search({ 'producer.name' => 'Matt S Trout' });
+my @cds = $artists2[0]->cds;
+cmp_ok(scalar @cds, '==', 1, "condition based on inherited join okay");
+
+#this is wrong, should accept me.title really
+my $rs3 = $rs2->search_related('cds');
+cmp_ok($rs3->count, '==', 9, "Nine artists returned");
my $rs4 = $schema->resultset("CD")->search({ 'artist.artistid' => '1' }, { join => ['tracks', 'artist'], prefetch => 'artist' });
my @rs4_results = $rs4->all;
-
is($rs4_results[0]->cdid, 1, "correct artist returned");
my $rs5 = $rs4->search({'tracks.title' => 'Sticky Honey'});
is($rs5->count, 1, "search without using previous joins okay");
+my $record_rs = $schema->resultset("Artist")->search(undef, { join => 'cds' })->search(undef, { prefetch => { 'cds' => 'tracks' }});
+my $record_jp = $record_rs->next;
+ok($record_jp, "prefetch on same rel okay");
+
+my $artist = $schema->resultset("Artist")->find(1);
+my $cds = $artist->cds;
+is($cds->find(2)->title, 'Forkful of bees', "find on has many rs okay");
+
+my $cd = $cds->search({'me.title' => 'Forkful of bees'}, { prefetch => 'tracks' })->first;
+my @tracks = $cd->tracks->all;
+is(scalar(@tracks), 3, 'right number of prefetched tracks after has many');
+
+#causes ambig col error due to order_by
+#my $tracks_rs = $cds->search_related('tracks', { 'tracks.position' => '2', 'disc.title' => 'Forkful of bees' });
+#my $first_tracks_rs = $tracks_rs->first;
+
+my $related_rs = $schema->resultset("Artist")->search({ name => 'Caterwauler McCrae' })->search_related('cds', { year => '2001'})->search_related('tracks', { 'position' => '2' });
+is($related_rs->first->trackid, '5', 'search related on search related okay');
+
+#causes ambig col error due to order_by
+#$related_rs->search({'cd.year' => '2001'}, {join => ['cd', 'cd']})->all;
+
+my $title = $schema->resultset("Artist")->search_related('twokeys')->search_related('cd')->search({'tracks.position' => '2'}, {join => 'tracks', order_by => 'tracks.trackid'})->next->title;
+is($title, 'Forkful of bees', 'search relateds with order by okay');
+
+my $prod_rs = $schema->resultset("CD")->find(1)->producers_sorted;
+my $prod_rs2 = $prod_rs->search({ name => 'Matt S Trout' });
+my $prod_first = $prod_rs2->first;
+is($prod_first->id, '1', 'somewhat pointless search on rel with order_by on it okay');
+
+my $prod_map_rs = $schema->resultset("Artist")->find(1)->cds->search_related('cd_to_producer', {}, { join => 'producer', prefetch => 'producer' });
+ok($prod_map_rs->next->producer, 'search related with prefetch okay');
+
+my $stupid = $schema->resultset("Artist")->search_related('artist_undirected_maps', {}, { prefetch => 'artist1' })->search_related('mapped_artists')->search_related('cds', {'cds.cdid' => '2'}, { prefetch => 'tracks' });
+#use Data::Dumper; warn Dumper($stupid->{attrs});
+
+my $cd_final = $schema->resultset("Artist")->search_related('artist_undirected_maps', {}, { prefetch => 'artist1' })->search_related('mapped_artists')->search_related('cds', {'cds.cdid' => '2'}, { prefetch => 'tracks' })->first;
+is($cd_final->cdid, '2', 'bonkers search_related-with-join-midway okay');
+
1;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 2;
+
+ok ( $schema->storage->debug(1), 'debug' );
+ok ( defined(
+ $schema->storage->debugfh(
+ IO::File->new('t/var/sql.log', 'w')
+ )
+ ),
+ 'debugfh'
+ );
+
+1;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+plan tests => 1;
+
+my $schema = DBICTest->init_schema();
+
+is( ref($schema->storage), 'DBIx::Class::Storage::DBI::SQLite',
+ 'Storage reblessed correctly into DBIx::Class::Storage::DBI::SQLite' );
+
+1;
--- /dev/null
+# belongs to t/run/90ensure_class_loaded.tl
+package # hide from PAUSE
+ DBICTest::ErrorComponent;
+use warnings;
+use strict;
+
+# this is missing on purpose
+# 1;
-# belongs to t/run/30ensure_class_loaded.tl
+# belongs to t/run/90ensure_class_loaded.tl
package # hide from PAUSE
DBICTest::FakeComponent;
use warnings;
--- /dev/null
+# belongs to t/run/90ensure_class_loaded.tl
+package # hide from PAUSE
+ DBICTest::OptionalComponent;
+use warnings;
+use strict;
+
+1;