From: Matt S Trout Date: Thu, 23 Feb 2006 19:27:36 +0000 (+0000) Subject: Merge 'trunk' into 'DBIx-Class-current' X-Git-Tag: v0.06000~61^2~13 X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=9574e7cc118caddd8bcd628881bde8e7763b3322;hp=2437a1e37464dc79c36dca000fd6b4c439f4c7b2;p=dbsrgits%2FDBIx-Class.git Merge 'trunk' into 'DBIx-Class-current' r8098@obrien (orig r958): ningu | 2006-02-23 11:09:00 +0000 attempt to fix weird overload '0+' bug, modify tests to make sure it works r8099@obrien (orig r959): matthewt | 2006-02-23 13:55:52 +0000 Updated Oracle test r8157@obrien (orig r960): jesper | 2006-02-23 19:14:42 +0000 Double char quoting implemented, now supports stuff like [] (for MSSQL) --- diff --git a/Changes b/Changes index 6386364..e7e7f9d 100644 --- a/Changes +++ b/Changes @@ -1,5 +1,11 @@ Revision history for DBIx::Class + - minor tweak to tests for join edge case + - added cascade_copy relationship attribute + (sponsored by Airspace Software, http://www.airspace.co.uk/) + - clean up set_from_related + - made copy() automatically null out auto-inc columns + - tweak to Componentised for Class::C3 0.11 - fixes for auto-inc under MSSQL @@ -22,10 +28,7 @@ Revision history for DBIx::Class 0.05003 2006-02-08 17:50:20 - add component_class accessors and use them for *_class - small fixes to Serialize and ResultSetManager - - prevent accidental table-wide update/delete on row-object - from PK-less table - rollback on disconnect, and disconnect on DESTROY - - fixes to deep search and search_relateduser 0.05002 2006-02-06 12:12:03 - Added recommends for Class::Inspector @@ -129,7 +132,6 @@ Revision history for DBIx::Class 0.03004 - Added an || '' to the CDBICompat stringify to avoid null warnings - Updated name section for manual pods - 0.03003 2005-11-03 17:00:00 - POD fixes. - Changed use to require in Relationship/Base to avoid import. diff --git a/MANIFEST b/MANIFEST index 1d24b6f..6f8325a 100644 --- a/MANIFEST +++ b/MANIFEST @@ -64,17 +64,29 @@ lib/DBIx/Class/ResultSourceProxy.pm lib/DBIx/Class/ResultSourceProxy/Table.pm lib/DBIx/Class/Row.pm lib/DBIx/Class/Schema.pm -lib/DBIx/Class/Serialize.pm +lib/DBIx/Class/Serialize/Storable.pm lib/DBIx/Class/Storage/DBI.pm lib/DBIx/Class/Storage/DBI/Cursor.pm +lib/DBIx/Class/Storage/DBI/DB2.pm +lib/DBIx/Class/Storage/DBI/MSSQL.pm +lib/DBIx/Class/Storage/DBI/mysql.pm +lib/DBIx/Class/Storage/DBI/Oracle.pm +lib/DBIx/Class/Storage/DBI/Pg.pm +lib/DBIx/Class/Storage/DBI/SQLite.pm lib/DBIx/Class/Test/SQLite.pm lib/DBIx/Class/UUIDColumns.pm +lib/DBIx/Class/UUIDMaker.pm +lib/DBIx/Class/UUIDMaker/APR/UUID.pm +lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm +lib/DBIx/Class/UUIDMaker/Data/UUID.pm +lib/DBIx/Class/UUIDMaker/UUID.pm +lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm +lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm lib/DBIx/Class/Validation.pm lib/SQL/Translator/Parser/DBIx/Class.pm lib/SQL/Translator/Producer/DBIx/Class/File.pm Makefile.PL MANIFEST This list of files -META.yml README t/02pod.t t/03podcoverage.t.disabled @@ -98,12 +110,15 @@ t/basicrels/10auto.t t/basicrels/11mysql.t t/basicrels/12pg.t t/basicrels/13oracle.t +t/basicrels/145db2.t t/basicrels/14mssql.t t/basicrels/15limit.t t/basicrels/16joins.t t/basicrels/17join_count.t t/basicrels/18self_referencial.t t/basicrels/19uuid.t +t/basicrels/20unique.t +t/basicrels/21serialize.t t/cdbi-sweet-t/08pager.t t/cdbi-t/01-columns.t t/cdbi-t/02-Film.t @@ -135,6 +150,7 @@ t/helperrels/10auto.t t/helperrels/11mysql.t t/helperrels/12pg.t t/helperrels/13oracle.t +t/helperrels/145db2.t t/helperrels/14mssql.t t/helperrels/15limit.t t/helperrels/16joins.t @@ -142,6 +158,7 @@ t/helperrels/17join_count.t t/helperrels/18self_referencial.t t/helperrels/19uuid.t t/helperrels/20unique.t +t/helperrels/21serialize.t t/lib/DBICTest.pm t/lib/DBICTest/BasicRels.pm t/lib/DBICTest/Extra.pm @@ -180,6 +197,7 @@ t/run/10auto.tl t/run/11mysql.tl t/run/12pg.tl t/run/13oracle.tl +t/run/145db2.tl t/run/14mssql.tl t/run/15limit.tl t/run/16joins.tl @@ -187,6 +205,7 @@ t/run/17join_count.tl t/run/18self_referencial.tl t/run/19uuid.tl t/run/20unique.tl +t/run/21serialize.tl t/testlib/Actor.pm t/testlib/ActorAlias.pm t/testlib/Binary.pm @@ -205,3 +224,4 @@ t/testlib/MyStarLinkMCPK.pm t/testlib/Order.pm t/testlib/OtherFilm.pm t/testlib/PgBase.pm +META.yml diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 56fe277..644fa1d 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -160,6 +160,8 @@ Jesper Krogh Brandon Black +Christopher H. Laco + Scotty Allen =head1 LICENSE diff --git a/lib/DBIx/Class/Core.pm b/lib/DBIx/Class/Core.pm index 303d1cc..455e741 100644 --- a/lib/DBIx/Class/Core.pm +++ b/lib/DBIx/Class/Core.pm @@ -7,6 +7,7 @@ no warnings 'qw'; use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/ + Serialize::Storable InflateColumn Relationship PK diff --git a/lib/DBIx/Class/DB.pm b/lib/DBIx/Class/DB.pm index 1a4adff..14b421f 100644 --- a/lib/DBIx/Class/DB.pm +++ b/lib/DBIx/Class/DB.pm @@ -14,7 +14,7 @@ __PACKAGE__->load_components(qw/ResultSetProxy/); sub storage { shift->schema_instance(@_)->storage; } sub resultset_instance { - my $class = shift; + my $class = ref $_[0] || $_[0]; my $source = $class->result_source_instance; if ($source->result_class ne $class) { $source = $source->new($source); diff --git a/lib/DBIx/Class/PK/Auto.pm b/lib/DBIx/Class/PK/Auto.pm index c2bb440..64c8c83 100644 --- a/lib/DBIx/Class/PK/Auto.pm +++ b/lib/DBIx/Class/PK/Auto.pm @@ -53,24 +53,17 @@ sub insert { my ($self, @rest) = @_; my $ret = $self->next::method(@rest); - # if all primaries are already populated, skip auto-inc - my $populated = 0; - map { $populated++ if defined $self->get_column($_) } $self->primary_columns; - return $ret if ( $populated == scalar $self->primary_columns ); - - my ($pri, $too_many) = - (grep { $self->column_info($_)->{'auto_increment'} } - $self->primary_columns) - || $self->primary_columns; + my ($pri, $too_many) = grep { !defined $self->get_column($_) } $self->primary_columns; + return $ret unless defined $pri; # if all primaries are already populated, skip auto-inc $self->throw_exception( "More than one possible key found for auto-inc on ".ref $self ) - if $too_many; - unless (defined $self->get_column($pri)) { - $self->throw_exception( "Can't auto-inc for $pri on ".ref $self.": no _last_insert_id method" ) - unless $self->can('last_insert_id'); - my $id = $self->last_insert_id; - $self->throw_exception( "Can't get last insert id" ) unless $id; - $self->store_column($pri => $id); - } + if defined $too_many; + + my $storage = $self->result_source->storage; + $self->throw_exception( "Missing primary key but Storage doesn't support last_insert_id" ) unless $storage->can('last_insert_id'); + my $id = $storage->last_insert_id($self->result_source,$pri); + $self->throw_exception( "Can't get last insert id" ) unless $id; + $self->store_column($pri => $id); + return $ret; } @@ -81,7 +74,12 @@ associated with looking up the sequence automatically. =cut -__PACKAGE__->mk_classdata('sequence'); +sub sequence { + my ($self,$seq) = @_; + foreach my $pri ($self->primary_columns) { + $self->column_info($pri)->{sequence} = $seq; + } +} 1; diff --git a/lib/DBIx/Class/PK/Auto/DB2.pm b/lib/DBIx/Class/PK/Auto/DB2.pm index f05f781..29ecf78 100644 --- a/lib/DBIx/Class/PK/Auto/DB2.pm +++ b/lib/DBIx/Class/PK/Auto/DB2.pm @@ -7,39 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id -{ - my ($self) = @_; - - my $dbh = $self->result_source->storage->dbh; - my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3); - $sth->execute(); - - my @res = $sth->fetchrow_array(); - - return @res ? $res[0] : undef; - -} - 1; =head1 NAME -DBIx::Class::PK::Auto::DB2 - Automatic primary key class for DB2 +DBIx::Class::PK::Auto::DB2 - (DEPRECATED) Automatic primary key class for DB2 =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::DB2 Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for DB2. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Jess Robinson +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/MSSQL.pm b/lib/DBIx/Class/PK/Auto/MSSQL.pm index a3c4329..8ac2778 100644 --- a/lib/DBIx/Class/PK/Auto/MSSQL.pm +++ b/lib/DBIx/Class/PK/Auto/MSSQL.pm @@ -1,40 +1,28 @@ -package DBIx::Class::PK::Auto::MSSQL; - -use strict; -use warnings; - -use base qw/DBIx::Class/; - -__PACKAGE__->load_components(qw/PK::Auto/); - -sub last_insert_id { - my( $id ) = $_[0]->result_source->storage->dbh->selectrow_array( - 'SELECT @@IDENTITY' ); - return $id; -} - -1; - -=head1 NAME - -DBIx::Class::PK::Auto::MSSQL - Automatic primary key class for MSSQL - -=head1 SYNOPSIS - - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::MSSQL Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for MSSQL. - -=head1 AUTHORS - -Brian Cassidy - -=head1 LICENSE - -You may distribute this code under the same terms as Perl itself. - -=cut +package DBIx::Class::PK::Auto::MSSQL; + +use strict; +use warnings; + +use base qw/DBIx::Class/; + +__PACKAGE__->load_components(qw/PK::Auto/); + +1; + +=head1 NAME + +DBIx::Class::PK::Auto::MSSQL - (DEPRECATED) Automatic primary key class for MSSQL + +=head1 SYNOPSIS + +Just load PK::Auto instead; auto-inc is now handled by Storage. + +=head1 AUTHORS + +Matt S Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/PK/Auto/MySQL.pm b/lib/DBIx/Class/PK/Auto/MySQL.pm index e65bd39..7a1f78e 100644 --- a/lib/DBIx/Class/PK/Auto/MySQL.pm +++ b/lib/DBIx/Class/PK/Auto/MySQL.pm @@ -7,29 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - return $_[0]->result_source->storage->dbh->{mysql_insertid}; -} - 1; =head1 NAME -DBIx::Class::PK::Auto::MySQL - Automatic primary key class for MySQL +DBIx::Class::PK::Auto::MySQL - (DEPRECATED) Automatic primary key class for MySQL =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::MySQL Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for MySQL. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Matt S. Trout +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/Oracle.pm b/lib/DBIx/Class/PK/Auto/Oracle.pm index 7455408..437246b 100644 --- a/lib/DBIx/Class/PK/Auto/Oracle.pm +++ b/lib/DBIx/Class/PK/Auto/Oracle.pm @@ -3,71 +3,23 @@ package DBIx::Class::PK::Auto::Oracle; use strict; use warnings; -use Carp qw/croak/; - use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - my $self = shift; - $self->get_autoinc_seq unless $self->{_autoinc_seq}; - my $sql = "SELECT " . $self->{_autoinc_seq} . ".currval FROM DUAL"; - my ($id) = $self->result_source->storage->dbh->selectrow_array($sql); - return $id; -} - -sub get_autoinc_seq { - my $self = shift; - - # return the user-defined sequence if known - if ($self->sequence) { - return $self->{_autoinc_seq} = $self->sequence; - } - - # look up the correct sequence automatically - my $dbh = $self->result_source->storage->dbh; - my $sql = qq{ - SELECT trigger_body FROM ALL_TRIGGERS t - WHERE t.table_name = ? - AND t.triggering_event = 'INSERT' - AND t.status = 'ENABLED' - }; - # trigger_body is a LONG - $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024); - my $sth = $dbh->prepare($sql); - $sth->execute( uc($self->result_source->name) ); - while (my ($insert_trigger) = $sth->fetchrow_array) { - if ($insert_trigger =~ m!(\w+)\.nextval!i ) { - $self->{_autoinc_seq} = uc($1); - } - } - unless ($self->{_autoinc_seq}) { - croak "Unable to find a sequence INSERT trigger on table '" . $self->_table_name . "'."; - } -} - 1; =head1 NAME -DBIx::Class::PK::Auto::Oracle - Automatic primary key class for Oracle +DBIx::Class::PK::Auto::Oracle - (DEPRECATED) Automatic primary key class for Oracle =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::Oracle Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for Oracle. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Andy Grundman - -Scott Connelly +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/Pg.pm b/lib/DBIx/Class/PK/Auto/Pg.pm index ad12dfa..00cd24f 100644 --- a/lib/DBIx/Class/PK/Auto/Pg.pm +++ b/lib/DBIx/Class/PK/Auto/Pg.pm @@ -7,54 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - my $self = shift; - $self->get_autoinc_seq unless $self->{_autoinc_seq}; - $self->result_source->storage->dbh->last_insert_id(undef,undef,undef,undef, - {sequence=>$self->{_autoinc_seq}}); -} - -sub get_autoinc_seq { - my $self = shift; - - # return the user-defined sequence if known - if ($self->sequence) { - return $self->{_autoinc_seq} = $self->sequence; - } - - my @pri = $self->primary_columns; - my $dbh = $self->result_source->storage->dbh; - my ($schema,$table) = $self->table =~ /^(.+)\.(.+)$/ ? ($1,$2) : (undef,$self->table); - while (my $col = shift @pri) { - my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref; - if (defined $info->[12] and $info->[12] =~ - /^nextval\('"?([^"']+)"?'::(?:text|regclass)\)/) - { - $self->{_autoinc_seq} = $1; - last; - } - } -} - 1; =head1 NAME -DBIx::Class::PK::Auto::Pg - Automatic primary key class for PostgreSQL +DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::Pg Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for PostgreSQL. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Marcus Ramberg +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/SQLite.pm b/lib/DBIx/Class/PK/Auto/SQLite.pm index e405eac..de42922 100644 --- a/lib/DBIx/Class/PK/Auto/SQLite.pm +++ b/lib/DBIx/Class/PK/Auto/SQLite.pm @@ -7,29 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - return $_[0]->result_source->storage->dbh->func('last_insert_rowid'); -} - 1; =head1 NAME -DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite +DBIx::Class::PK::Auto::SQLite - (DEPRECATED) Automatic primary key class for SQLite =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for SQLite. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Matt S. Trout +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index e04b082..c048fc9 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -86,42 +86,7 @@ sub register_relationship { } =cut sub search_related { - my $self = shift; - die "Can't call *_related as class methods" unless ref $self; - my $rel = shift; - my $attrs = { }; - if (@_ > 1 && ref $_[$#_] eq 'HASH') { - $attrs = { %{ pop(@_) } }; - } - my $rel_obj = $self->relationship_info($rel); - $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj; - $attrs = { %{$rel_obj->{attrs} || {}}, %{$attrs || {}} }; - - $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1)); - my $query = ((@_ > 1) ? {@_} : shift); - - my ($cond) = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self); - if (ref $cond eq 'ARRAY') { - $cond = [ map { my %hash; - foreach my $key (keys %{$_}) { - unless ($key =~ m/\./) { - $hash{"me.$key"} = $_->{$key}; - } else { - $hash{$key} = $_->{$key}; - } - }; \%hash; } @$cond ]; - } else { - foreach my $key (keys %$cond) { - unless ($key =~ m/\./) { - $cond->{"me.$key"} = delete $cond->{$key}; - } - } - } - $query = ($query ? { '-and' => [ $cond, $query ] } : $cond); - #use Data::Dumper; warn Dumper($cond); - #warn $rel_obj->{class}." $meth $cond ".join(', ', @{$attrs->{bind}||[]}); - return $self->result_source->related_source($rel - )->resultset->search($query, $attrs); + return shift->related_resultset(shift)->search(@_); } =head2 count_related @@ -144,7 +109,9 @@ sub count_related { sub create_related { my $self = shift; my $rel = shift; - return $self->search_related($rel)->create(@_); + my $obj = $self->search_related($rel)->create(@_); + delete $self->{related_resultsets}->{$rel}; + return $obj; } =head2 new_related @@ -198,15 +165,9 @@ sub set_from_related { my $f_class = $self->result_source->schema->class($rel_obj->{class}); $self->throw_exception( "Object $f_obj isn't a ".$f_class ) unless $f_obj->isa($f_class); - foreach my $key (keys %$cond) { - next if ref $cond->{$key}; # Skip literals and complex conditions - $self->throw_exception("set_from_related can't handle $key as key") - unless $key =~ m/^foreign\.([^\.]+)$/; - my $val = $f_obj->get_column($1); - $self->throw_exception("set_from_related can't handle ".$cond->{$key}." as value") - unless $cond->{$key} =~ m/^self\.([^\.]+)$/; - $self->set_column($1 => $val); - } + $self->set_columns( + $self->result_source->resolve_condition( + $rel_obj->{cond}, $f_obj, $rel)); return 1; } @@ -230,11 +191,72 @@ sub update_from_related { sub delete_related { my $self = shift; - return $self->search_related(@_)->delete; + my $obj = $self->search_related(@_)->delete; + delete $self->{related_resultsets}->{$_[0]}; + return $obj; } 1; +=head2 related_resultset($name) + +Returns a L for the relationship named $name. + + $rs = $obj->related_resultset('related_table'); + +=cut + +sub related_resultset { + my $self = shift; + $self->throw_exception("Can't call *_related as class methods") unless ref $self; + my $rel = shift; + $self->{related_resultsets} ||= {}; + #use Data::Dumper; warn "related_resultsets: ", Dumper $self->{related_resultsets}; + my $resultsets = $self->{related_resultsets}; + if( !exists $resultsets->{$rel} ) { + + #warn "creating related resultset for relation '$rel'", \$self; + my $source = $self->result_source; + # if relation exists but resultset doesn't, create the resultset + + my $attrs = { }; + if (@_ > 1 && ref $_[$#_] eq 'HASH') { + $attrs = { %{ pop(@_) } }; + } + + my $rel_obj = $self->relationship_info($rel); + $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj; + $attrs = { %{$rel_obj->{attrs} || {}}, %{$attrs || {}} }; + + $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1)); + my $query = ((@_ > 1) ? {@_} : shift); + + my ($cond) = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self); + if (ref $cond eq 'ARRAY') { + $cond = [ map { my %hash; + foreach my $key (keys %{$_}) { + unless ($key =~ m/\./) { + $hash{"me.$key"} = $_->{$key}; + } else { + $hash{$key} = $_->{$key}; + } + }; \%hash; } @$cond ]; + } else { + foreach my $key (keys %$cond) { + unless ($key =~ m/\./) { + $cond->{"me.$key"} = delete $cond->{$key}; + } + } + } + $query = ($query ? { '-and' => [ $cond, $query ] } : $cond); + #use Data::Dumper; warn Dumper($cond); + #warn $rel_obj->{class}." $meth $cond ".join(', ', @{$attrs->{bind}||[]}); + $resultsets->{$rel} = + $self->result_source->related_source($rel)->resultset->search($query, $attrs); + } + return $resultsets->{$rel}; +} + =head1 AUTHORS Matt S. Trout diff --git a/lib/DBIx/Class/Relationship/HasMany.pm b/lib/DBIx/Class/Relationship/HasMany.pm index 716c292..1e61c74 100644 --- a/lib/DBIx/Class/Relationship/HasMany.pm +++ b/lib/DBIx/Class/Relationship/HasMany.pm @@ -35,6 +35,7 @@ sub has_many { { accessor => 'multi', join_type => 'LEFT', cascade_delete => 1, + cascade_copy => 1, %{$attrs||{}} } ); } diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 5d6f601..4ab3389 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -156,24 +156,40 @@ sub search { my $self = shift; #use Data::Dumper;warn Dumper(@_); + my $rs; + if( @_ ) { + + my $attrs = { %{$self->{attrs}} }; + my $having = delete $attrs->{having}; + if (@_ > 1 && ref $_[$#_] eq 'HASH') { + $attrs = { %$attrs, %{ pop(@_) } }; + } - my $attrs = { %{$self->{attrs}} }; - if (@_ > 1 && ref $_[$#_] eq 'HASH') { - $attrs = { %$attrs, %{ pop(@_) } }; - } - - my $where = (@_ ? ((@_ == 1 || ref $_[0] eq "HASH") ? shift : {@_}) : undef()); - if (defined $where) { - $where = (defined $attrs->{where} + my $where = (@_ ? ((@_ == 1 || ref $_[0] eq "HASH") ? shift : {@_}) : undef()); + if (defined $where) { + $where = (defined $attrs->{where} ? { '-and' => [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } $where, $attrs->{where} ] } : $where); - $attrs->{where} = $where; - } + $attrs->{where} = $where; + } - my $rs = (ref $self)->new($self->result_source, $attrs); + if (defined $having) { + $having = (defined $attrs->{having} + ? { '-and' => + [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } + $having, $attrs->{having} ] } + : $having); + $attrs->{having} = $having; + } + $rs = (ref $self)->new($self->result_source, $attrs); + } + else { + $rs = $self; + $rs->reset(); + } return (wantarray ? $rs->all : $rs); } @@ -246,7 +262,9 @@ sub find { $query->{$self->{attrs}{alias}.'.'.$_} = delete $query->{$_}; } #warn Dumper($query); - return $self->search($query,$attrs)->next; + return (keys %$attrs + ? $self->search($query,$attrs)->single + : $self->single($query)); } =head2 search_related @@ -259,22 +277,7 @@ records. =cut sub search_related { - my ($self, $rel, @rest) = @_; - my $rel_obj = $self->result_source->relationship_info($rel); - $self->throw_exception( - "No such relationship ${rel} in search_related") - unless $rel_obj; - my $rs = $self->search(undef, { join => $rel }); - my $alias = ($rs->{attrs}{seen_join}{$rel} > 1 - ? join('_', $rel, $rs->{attrs}{seen_join}{$rel}) - : $rel); - return $self->result_source->schema->resultset($rel_obj->{class} - )->search( undef, - { %{$rs->{attrs}}, - alias => $alias, - select => undef(), - as => undef() } - )->search(@rest); + return shift->related_resultset(shift)->search(@_); } =head2 cursor @@ -292,6 +295,34 @@ sub cursor { $attrs->{where},$attrs); } +=head2 single + +Inflates the first result without creating a cursor + +=cut + +sub single { + my ($self, $extra) = @_; + my ($attrs) = $self->{attrs}; + $attrs = { %$attrs }; + if ($extra) { + if (defined $attrs->{where}) { + $attrs->{where} = { + '-and' + => [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } + delete $attrs->{where}, $extra ] + }; + } else { + $attrs->{where} = $extra; + } + } + my @data = $self->result_source->storage->select_single( + $self->{from}, $attrs->{select}, + $attrs->{where},$attrs); + return (@data ? $self->_construct_object(@data) : ()); +} + + =head2 search_like Perform a search, but use C instead of equality as the condition. Note @@ -346,6 +377,13 @@ Can be used to efficiently iterate over records in the resultset: sub next { my ($self) = @_; + my $cache = $self->get_cache; + if( @$cache ) { + $self->{all_cache_position} ||= 0; + my $obj = $cache->[$self->{all_cache_position}]; + $self->{all_cache_position}++; + return $obj; + } my @row = $self->cursor->next; # warn Dumper(\@row); use Data::Dumper; return unless (@row); @@ -354,25 +392,79 @@ sub next { sub _construct_object { my ($self, @row) = @_; + my @row_orig = @row; # copy @row for key comparison later, because @row will change my @as = @{ $self->{attrs}{as} }; +#use Data::Dumper; warn Dumper \@as; #warn "@cols -> @row"; my $info = [ {}, {} ]; foreach my $as (@as) { + my $rs = $self; my $target = $info; my @parts = split(/\./, $as); my $col = pop(@parts); foreach my $p (@parts) { $target = $target->[1]->{$p} ||= []; + + $rs = $rs->related_resultset($p) if $rs->{attrs}->{cache}; } - $target->[0]->{$col} = shift @row; + + $target->[0]->{$col} = shift @row + if ref($target->[0]) ne 'ARRAY'; # arrayref is pre-inflated objects, do not overwrite } #use Data::Dumper; warn Dumper(\@as, $info); my $new = $self->result_source->result_class->inflate_result( $self->result_source, @$info); $new = $self->{attrs}{record_filter}->($new) if exists $self->{attrs}{record_filter}; + + if( $self->{attrs}->{cache} ) { + while( my( $rel, $rs ) = each( %{$self->{related_resultsets}} ) ) { + $rs->all; + #warn "$rel:", @{$rs->get_cache}; + } + $self->build_rr( $self, $new ); + } + return $new; } + +sub build_rr { + # build related resultsets for supplied object + my ( $self, $context, $obj ) = @_; + + my $re = qr/^\w+\./; + while( my ($rel, $rs) = each( %{$context->{related_resultsets}} ) ) { + #warn "context:", $context->result_source->name, ", rel:$rel, rs:", $rs->result_source->name; + my @objs = (); + my $map = {}; + my $cond = $context->result_source->relationship_info($rel)->{cond}; + keys %$cond; + while( my( $rel_key, $pk ) = each(%$cond) ) { + $rel_key =~ s/$re//; + $pk =~ s/$re//; + $map->{$rel_key} = $pk; + } + + $rs->reset(); + while( my $rel_obj = $rs->next ) { + while( my( $rel_key, $pk ) = each(%$map) ) { + if( $rel_obj->get_column($rel_key) eq $obj->get_column($pk) ) { + push @objs, $rel_obj; + } + } + } + + my $rel_rs = $obj->related_resultset($rel); + $rel_rs->{attrs}->{cache} = 1; + $rel_rs->set_cache( \@objs ); + + while( my $rel_obj = $rel_rs->next ) { + $self->build_rr( $rs, $rel_obj ); + } + + } + +} =head2 result_source @@ -399,15 +491,19 @@ sub count { my $self = shift; return $self->search(@_)->count if @_ && defined $_[0]; unless (defined $self->{count}) { + return scalar @{ $self->get_cache } + if @{ $self->get_cache }; my $group_by; my $select = { 'count' => '*' }; - if( $group_by = delete $self->{attrs}{group_by} ) { + my $attrs = { %{ $self->{attrs} } }; + if( $group_by = delete $attrs->{group_by} ) { + delete $attrs->{having}; my @distinct = (ref $group_by ? @$group_by : ($group_by)); # todo: try CONCAT for multi-column pk my @pk = $self->result_source->primary_columns; if( scalar(@pk) == 1 ) { my $pk = shift(@pk); - my $alias = $self->{attrs}{alias}; + my $alias = $attrs->{alias}; my $re = qr/^($alias\.)?$pk$/; foreach my $column ( @distinct) { if( $column =~ $re ) { @@ -421,14 +517,12 @@ sub count { #use Data::Dumper; die Dumper $select; } - my $attrs = { %{ $self->{attrs} }, - select => $select, - as => [ 'count' ] }; + $attrs->{select} = $select; + $attrs->{as} = [ 'count' ]; # offset, order by and page are not needed to count. record_filter is cdbi delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/; ($self->{count}) = (ref $self)->new($self->result_source, $attrs)->cursor->next; - $self->{attrs}{group_by} = $group_by; } return 0 unless $self->{count}; my $count = $self->{count}; @@ -455,6 +549,14 @@ is returned in list context. sub all { my ($self) = @_; + return @{ $self->get_cache } + if @{ $self->get_cache }; + if( $self->{attrs}->{cache} ) { + my @obj = map { $self->_construct_object(@$_); } + $self->cursor->all; + $self->set_cache( \@obj ); + return @{ $self->get_cache }; + } return map { $self->_construct_object(@$_); } $self->cursor->all; } @@ -467,6 +569,7 @@ Resets the resultset's cursor, so you can iterate through the elements again. sub reset { my ($self) = @_; + $self->{all_cache_position} = 0; $self->cursor->reset; return $self; } @@ -741,6 +844,90 @@ sub update_or_create { return $row; } +=head2 get_cache + +Gets the contents of the cache for the resultset. + +=cut + +sub get_cache { + my $self = shift; + return $self->{all_cache} || []; +} + +=head2 set_cache + +Sets the contents of the cache for the resultset. Expects an arrayref of objects of the same class as those produced by the resultset. + +=cut + +sub set_cache { + my ( $self, $data ) = @_; + $self->throw_exception("set_cache requires an arrayref") + if ref $data ne 'ARRAY'; + my $result_class = $self->result_source->result_class; + foreach( @$data ) { + $self->throw_exception("cannot cache object of type '$_', expected '$result_class'") + if ref $_ ne $result_class; + } + $self->{all_cache} = $data; +} + +=head2 clear_cache + +Clears the cache for the resultset. + +=cut + +sub clear_cache { + my $self = shift; + $self->set_cache([]); +} + +=head2 related_resultset + +Returns a related resultset for the supplied relationship name. + + $rs = $rs->related_resultset('foo'); + +=cut + +sub related_resultset { + my ( $self, $rel, @rest ) = @_; + $self->{related_resultsets} ||= {}; + my $resultsets = $self->{related_resultsets}; + if( !exists $resultsets->{$rel} ) { + #warn "fetching related resultset for rel '$rel'"; + my $rel_obj = $self->result_source->relationship_info($rel); + $self->throw_exception( + "search_related: result source '" . $self->result_source->name . + "' has no such relationship ${rel}") + unless $rel_obj; #die Dumper $self->{attrs}; + my $rs; + if( $self->{attrs}->{cache} ) { + $rs = $self->search(undef); + } + else { + $rs = $self->search(undef, { join => $rel }); + } + #use Data::Dumper; die Dumper $rs->{attrs};#$rs = $self->search( undef ); + #use Data::Dumper; warn Dumper $self->{attrs}, Dumper $rs->{attrs}; + my $alias = (defined $rs->{attrs}{seen_join}{$rel} + && $rs->{attrs}{seen_join}{$rel} > 1 + ? join('_', $rel, $rs->{attrs}{seen_join}{$rel}) + : $rel); + $resultsets->{$rel} = + $self->result_source->schema->resultset($rel_obj->{class} + )->search( undef, + { %{$rs->{attrs}}, + alias => $alias, + select => undef(), + as => undef() } + )->search(@rest); + } + return $resultsets->{$rel}; +} + =head2 throw_exception See Schema's throw_exception diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 41dc14e..343c019 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -7,6 +7,7 @@ use DBIx::Class::ResultSet; use Carp::Clan qw/^DBIx::Class/; use Storable; +use Scalar::Util qw/weaken/; use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/AccessorGroup/); @@ -33,7 +34,7 @@ retrieved, most usually a table (see L) sub new { my ($class, $attrs) = @_; $class = ref $class if ref $class; - my $new = bless({ %{$attrs || {}} }, $class); + my $new = bless({ %{$attrs || {}}, _resultset => undef }, $class); $new->{resultset_class} ||= 'DBIx::Class::ResultSet'; $new->{resultset_attributes} = { %{$new->{resultset_attributes} || {}} }; $new->{_ordered_columns} = [ @{$new->{_ordered_columns}||[]}]; @@ -484,6 +485,8 @@ sub resolve_condition { #warn "$self $k $for $v"; $ret{$k} = $for->get_column($v); #warn %ret; + } elsif (ref $as) { # reverse object + $ret{$v} = $as->get_column($k); } else { $ret{"${as}.${k}"} = "${for}.${v}"; } @@ -612,7 +615,12 @@ Specify here any attributes you wish to pass to your specialised resultset. sub resultset { my $self = shift; - return $self->resultset_class->new($self, $self->{resultset_attributes}); + return $self->{_resultset} if ref $self->{_resultset} eq $self->resultset_class; + return $self->{_resultset} = do { + my $rs = $self->resultset_class->new($self, $self->{resultset_attributes}); + weaken $rs->result_source; + $rs; + }; } =head2 throw_exception diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 3d2638d..df8bba8 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -66,6 +66,7 @@ sub insert { $source->storage->insert($source->from, { $self->get_columns }); $self->in_storage(1); $self->{_dirty_columns} = {}; + $self->{related_resultsets} = {}; return $self; } @@ -110,6 +111,7 @@ sub update { $self->throw_exception("Can't update ${self}: updated more than one row"); } $self->{_dirty_columns} = {}; + $self->{related_resultsets} = {}; return $self; } @@ -237,9 +239,26 @@ Inserts a new row with the specified changes. sub copy { my ($self, $changes) = @_; - my $new = bless({ _column_data => { %{$self->{_column_data}}} }, ref $self); - $new->set_column($_ => $changes->{$_}) for keys %$changes; - return $new->insert; + $changes ||= {}; + my $col_data = { %{$self->{_column_data}} }; + foreach my $col (keys %$col_data) { + delete $col_data->{$col} + if $self->result_source->column_info($col)->{is_auto_increment}; + } + my $new = bless({ _column_data => $col_data }, ref $self); + $new->set_columns($changes); + $new->insert; + foreach my $rel ($self->result_source->relationships) { + my $rel_info = $self->result_source->relationship_info($rel); + if ($rel_info->{attrs}{cascade_copy}) { + my $resolved = $self->result_source->resolve_condition( + $rel_info->{cond}, $rel, $new); + foreach my $related ($self->search_related($rel)) { + $related->copy($resolved); + } + } + } + return $new; } =head2 store_column @@ -276,25 +295,28 @@ sub inflate_result { }, ref $class || $class); my $schema; - PRE: foreach my $pre (keys %{$prefetch||{}}) { + foreach my $pre (keys %{$prefetch||{}}) { + my $pre_val = $prefetch->{$pre}; my $pre_source = $source->related_source($pre); - $class->throw_exception("Can't prefetch non-existant relationship ${pre}") unless $pre_source; + $class->throw_exception("Can't prefetch non-existent relationship ${pre}") unless $pre_source; my $fetched; unless ($pre_source->primary_columns == grep { exists $prefetch->{$pre}[0]{$_} and !defined $prefetch->{$pre}[0]{$_} } $pre_source->primary_columns) { $fetched = $pre_source->result_class->inflate_result( - $pre_source, @{$prefetch->{$pre}}); + $pre_source, @{$prefetch->{$pre}}); } my $accessor = $source->relationship_info($pre)->{attrs}{accessor}; $class->throw_exception("No accessor for prefetched $pre") - unless defined $accessor; + unless defined $accessor; if ($accessor eq 'single') { $new->{_relationship_data}{$pre} = $fetched; } elsif ($accessor eq 'filter') { - $new->{_inflated_column}{$pre} = $fetched; + $new->{_inflated_column}{$pre} = $fetched; + } elsif ($accessor eq 'multi') { + } else { - $class->throw_exception("Don't know how to store prefetched $pre"); + $class->throw_exception("Prefetch not supported with accessor '$accessor'"); } } return $new; diff --git a/lib/DBIx/Class/Serialize.pm b/lib/DBIx/Class/Serialize/Storable.pm similarity index 70% rename from lib/DBIx/Class/Serialize.pm rename to lib/DBIx/Class/Serialize/Storable.pm index 345b99c..8066337 100644 --- a/lib/DBIx/Class/Serialize.pm +++ b/lib/DBIx/Class/Serialize/Storable.pm @@ -1,19 +1,18 @@ -package DBIx::Class::Serialize; +package DBIx::Class::Serialize::Storable; use strict; -use Storable qw/freeze thaw/; +use Storable; sub STORABLE_freeze { my ($self,$cloning) = @_; - #return if $cloning; my $to_serialize = { %$self }; delete $to_serialize->{result_source}; - return (freeze($to_serialize)); + return (Storable::freeze($to_serialize)); } sub STORABLE_thaw { my ($self,$cloning,$serialized) = @_; - %$self = %{ thaw($serialized) }; - $self->result_source($self->result_source_instance); + %$self = %{ Storable::thaw($serialized) }; + $self->result_source($self->result_source_instance) if $self->can('result_source_instance'); } 1; @@ -22,12 +21,12 @@ __END__ =head1 NAME - DBIx::Class::Serialize - hooks for Storable freeze/thaw (EXPERIMENTAL) + DBIx::Class::Serialize::Storable - hooks for Storable freeze/thaw (EXPERIMENTAL) =head1 SYNOPSIS # in a table class definition - __PACKAGE__->load_components(qw/Serialize/); + __PACKAGE__->load_components(qw/Serialize::Storable/); # meanwhile, in a nearby piece of code my $obj = $schema->resultset('Foo')->find(12); diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index a197ed2..95673ce 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -17,8 +17,10 @@ use base qw/SQL::Abstract::Limit/; sub select { my ($self, $table, $fields, $where, $order, @rest) = @_; @rest = (-1) unless defined $rest[0]; - $self->SUPER::select($table, $self->_recurse_fields($fields), - $where, $order, @rest); + local $self->{having_bind} = []; + my ($sql, @ret) = $self->SUPER::select($table, + $self->_recurse_fields($fields), $where, $order, @rest); + return wantarray ? ($sql, @ret, @{$self->{having_bind}}) : $sql; } sub _emulate_limit { @@ -49,11 +51,18 @@ sub _recurse_fields { sub _order_by { my $self = shift; my $ret = ''; + my @extra; if (ref $_[0] eq 'HASH') { if (defined $_[0]->{group_by}) { $ret = $self->_sqlcase(' group by ') .$self->_recurse_fields($_[0]->{group_by}); } + if (defined $_[0]->{having}) { + my $frag; + ($frag, @extra) = $self->_recurse_where($_[0]->{having}); + push(@{$self->{having_bind}}, @extra); + $ret .= $self->_sqlcase(' having ').$frag; + } if (defined $_[0]->{order_by}) { $ret .= $self->SUPER::_order_by($_[0]->{order_by}); } @@ -305,7 +314,11 @@ sub _populate_dbh { my ($self) = @_; my @info = @{$self->connect_info || []}; $self->_dbh($self->_connect(@info)); - + my $driver = $self->_dbh->{Driver}->{Name}; + eval "require DBIx::Class::Storage::DBI::${driver}"; + unless ($@) { + bless $self, "DBIx::Class::Storage::DBI::${driver}"; + } # if on-connect sql statements are given execute them foreach my $sql_statement (@{$self->on_connect_do || []}) { $self->_dbh->do($sql_statement); @@ -325,7 +338,10 @@ sub _connect { return $dbh; } - DBI->connect(@info); + my $dbh = DBI->connect(@info); + croak "DBI Connection failed: $DBI::errstr" + unless $dbh; + $dbh; } =head2 txn_begin @@ -412,8 +428,9 @@ sub _select { if (ref $condition eq 'SCALAR') { $order = $1 if $$condition =~ s/ORDER BY (.*)$//i; } - if (exists $attrs->{group_by}) { + if (exists $attrs->{group_by} || $attrs->{having}) { $order = { group_by => $attrs->{group_by}, + having => $attrs->{having}, ($order ? (order_by => $order) : ()) }; } my @args = ('select', $attrs->{bind}, $ident, $select, $condition, $order); @@ -478,6 +495,15 @@ sub columns_info_for { return \%result; } +sub last_insert_id { + my ($self, $row) = @_; + + return $self->dbh->func('last_insert_rowid'); + +} + + + sub DESTROY { shift->disconnect } 1; diff --git a/lib/DBIx/Class/Storage/DBI/DB2.pm b/lib/DBIx/Class/Storage/DBI/DB2.pm new file mode 100644 index 0000000..a6e1452 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/DB2.pm @@ -0,0 +1,48 @@ +package DBIx::Class::Storage::DBI::DB2; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id +{ + my ($self) = @_; + + my $dbh = $self->_dbh; + my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3); + $sth->execute(); + + my @res = $sth->fetchrow_array(); + + return @res ? $res[0] : undef; + +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::DB2 - Automatic primary key class for DB2 + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for DB2. + +=head1 AUTHORS + +Jess Robinson + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/MSSQL.pm b/lib/DBIx/Class/Storage/DBI/MSSQL.pm new file mode 100644 index 0000000..7a30b65 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/MSSQL.pm @@ -0,0 +1,39 @@ +package DBIx::Class::Storage::DBI::MSSQL; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my( $id ) = $_[0]->_dbh->selectrow_array('SELECT @@IDENTITY' ); + return $id; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::MSSQL - Automatic primary key class for MSSQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for MSSQL. + +=head1 AUTHORS + +Brian Cassidy + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/Oracle.pm b/lib/DBIx/Class/Storage/DBI/Oracle.pm new file mode 100644 index 0000000..5fa4fce --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/Oracle.pm @@ -0,0 +1,68 @@ +package DBIx::Class::Storage::DBI::Oracle; + +use strict; +use warnings; + +use Carp qw/croak/; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my ($self,$source,$col) = @_; + my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col)); + my $sql = "SELECT " . $seq . ".currval FROM DUAL"; + my ($id) = $self->_dbh->selectrow_array($sql); + return $id; +} + +sub get_autoinc_seq { + my ($self,$source,$col) = @_; + + # look up the correct sequence automatically + my $dbh = $self->_dbh; + my $sql = q{ + SELECT trigger_body FROM ALL_TRIGGERS t + WHERE t.table_name = ? + AND t.triggering_event = 'INSERT' + AND t.status = 'ENABLED' + }; + # trigger_body is a LONG + $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024); + my $sth = $dbh->prepare($sql); + $sth->execute( uc($source->name) ); + while (my ($insert_trigger) = $sth->fetchrow_array) { + return uc($1) if $insert_trigger =~ m!(\w+)\.nextval!i; # col name goes here??? + } + croak "Unable to find a sequence INSERT trigger on table '" . $source->name . "'."; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::Oracle - Automatic primary key class for Oracle + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + __PACKAGE__->sequence('mysequence'); + +=head1 DESCRIPTION + +This class implements autoincrements for Oracle. + +=head1 AUTHORS + +Andy Grundman + +Scott Connelly + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/Pg.pm b/lib/DBIx/Class/Storage/DBI/Pg.pm new file mode 100644 index 0000000..7fb6b8c --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/Pg.pm @@ -0,0 +1,58 @@ +package DBIx::Class::Storage::DBI::Pg; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my ($self,$source,$col) = @_; + my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col)); + $self->_dbh->last_insert_id(undef,undef,undef,undef, {sequence => $seq}); +} + +sub get_autoinc_seq { + my ($self,$source,$col) = @_; + + my @pri = $source->primary_columns; + my $dbh = $self->_dbh; + my ($schema,$table) = $source->name =~ /^(.+)\.(.+)$/ ? ($1,$2) + : (undef,$source->name); + while (my $col = shift @pri) { + my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref; + if (defined $info->[12] and $info->[12] =~ + /^nextval\('"?([^"']+)"?'::(?:text|regclass)\)/) + { + return $1; + } + } +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::Pg - Automatic primary key class for PostgreSQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + __PACKAGE__->sequence('mysequence'); + +=head1 DESCRIPTION + +This class implements autoincrements for PostgreSQL. + +=head1 AUTHORS + +Marcus Ramberg + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/SQLite.pm b/lib/DBIx/Class/Storage/DBI/SQLite.pm new file mode 100644 index 0000000..e6175b5 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/SQLite.pm @@ -0,0 +1,36 @@ +package DBIx::Class::Storage::DBI::SQLite; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +sub last_insert_id { + return $_[0]->dbh->func('last_insert_rowid'); +} + +1; + +=head1 NAME + +DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for SQLite. + +=head1 AUTHORS + +Matt S. Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/mysql.pm b/lib/DBIx/Class/Storage/DBI/mysql.pm new file mode 100644 index 0000000..dcd124f --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/mysql.pm @@ -0,0 +1,38 @@ +package DBIx::Class::Storage::DBI::mysql; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + return $_[0]->_dbh->{mysql_insertid}; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::mysql - Automatic primary key class for MySQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for MySQL. + +=head1 AUTHORS + +Matt S. Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/UUIDColumns.pm b/lib/DBIx/Class/UUIDColumns.pm index efbe3d4..8a58527 100644 --- a/lib/DBIx/Class/UUIDColumns.pm +++ b/lib/DBIx/Class/UUIDColumns.pm @@ -1,9 +1,73 @@ package DBIx::Class::UUIDColumns; use base qw/DBIx::Class/; -use Data::UUID; - __PACKAGE__->mk_classdata( 'uuid_auto_columns' => [] ); +__PACKAGE__->mk_classdata( 'uuid_maker' ); +__PACKAGE__->uuid_class( __PACKAGE__->_find_uuid_module ); + +# be compatible with Class::DBI::UUID +sub uuid_columns { + my $self = shift; + for (@_) { + $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_); + } + $self->uuid_auto_columns(\@_); +} + +sub uuid_class { + my ($self, $class) = @_; + + if ($class) { + $class = "DBIx::Class::UUIDMaker$class" if $class =~ /^::/; + + if (!eval "require $class") { + $self->throw_exception("$class could not be loaded: $@"); + } elsif (!$class->isa('DBIx::Class::UUIDMaker')) { + $self->throw_exception("$class is not a UUIDMaker subclass"); + } else { + $self->uuid_maker($class->new); + }; + }; + + return ref $self->uuid_maker; +}; + +sub insert { + my $self = shift; + for my $column (@{$self->uuid_auto_columns}) { + $self->store_column( $column, $self->get_uuid ) + unless defined $self->get_column( $column ); + } + $self->next::method(@_); +} + +sub get_uuid { + return shift->uuid_maker->as_string; +} + +sub _find_uuid_module { + if (eval{require Data::UUID}) { + return '::Data::UUID'; + } elsif ($^O ne 'openbsd' && eval{require APR::UUID}) { + # APR::UUID on openbsd causes some as yet unfound nastyness for XS + return '::APR::UUID'; + } elsif (eval{require UUID}) { + return '::UUID'; + } elsif (eval{ + # squelch the 'too late for INIT' warning in Win32::API::Type + local $^W = 0; + require Win32::Guidgen; + }) { + return '::Win32::Guidgen'; + } elsif (eval{require Win32API::GUID}) { + return '::Win32API::GUID'; + } else { + shift->throw_exception('no suitable uuid module could be found') + }; +}; + +1; +__END__ =head1 NAME @@ -11,7 +75,7 @@ DBIx::Class::UUIDColumns - Implicit uuid columns =head1 SYNOPSIS - pacakge Artist; + package Artist; __PACKAGE__->load_components(qw/UUIDColumns Core DB/); __PACKAGE__->uuid_columns( 'artist_id' ); @@ -20,44 +84,65 @@ DBIx::Class::UUIDColumns - Implicit uuid columns This L component resembles the behaviour of L, to make some columns implicitly created as uuid. +When loaded, C will search for a suitable uuid generation module +from the following list of supported modules: + + Data::UUID + APR::UUID* + UUID + Win32::Guidgen + Win32API::GUID + +If no supporting module can be found, an exception will be thrown. + +*APR::UUID will not be loaded under OpenBSD due to an as yet unidentified XS +issue. + +If you would like to use a specific module, you can set C: + + __PACKAGE__->uuid_class('::Data::UUID'); + __PACKAGE__->uuid_class('MyUUIDGenerator'); + Note that the component needs to be loaded before Core. =head1 METHODS -=head2 uuid_columns +=head2 uuid_columns(@columns) -=cut +Takes a list of columns to be filled with uuids during insert. -# be compatible with Class::DBI::UUID -sub uuid_columns { - my $self = shift; - for (@_) { - $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_); - } - $self->uuid_auto_columns(\@_); -} + __PACKAGE__->uuid_columns('id'); -sub insert { - my $self = shift; - for my $column (@{$self->uuid_auto_columns}) { - $self->store_column( $column, $self->get_uuid ) - unless defined $self->get_column( $column ); - } - $self->next::method(@_); -} +=head2 uuid_class($classname) -sub get_uuid { - return Data::UUID->new->to_string(Data::UUID->new->create), -} +Takes the name of a UUIDMaker subclass to be used for uuid value generation. +This can be a fully qualified class name, or a shortcut name starting with :: +that matches one of the available DBIx::Class::UUIDMaker subclasses: + + __PACKAGE__->uuid_class('CustomUUIDGenerator'); + # loads CustomeUUIDGenerator + + __PACKAGE->uuid_class('::Data::UUID'); + # loads DBIx::Class::UUIDMaker::Data::UUID; + +Note that C chacks to see that the specified class isa +DBIx::Class::UUIDMaker subbclass and throws and exception if it isn't. + +=head2 uuid_maker + +Returns the current UUIDMaker instance for the given module. + + my $uuid = __PACKAGE__->uuid_maker->as_string; + +=head1 SEE ALSO + +L =head1 AUTHORS Chia-liang Kao +Chris Laco =head1 LICENSE You may distribute this code under the same terms as Perl itself. - -=cut - -1; diff --git a/lib/DBIx/Class/UUIDMaker.pm b/lib/DBIx/Class/UUIDMaker.pm new file mode 100644 index 0000000..b9c196c --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker.pm @@ -0,0 +1,56 @@ +package DBIx::Class::UUIDMaker; + +sub new { + return bless {}, shift; +}; + +sub as_string { + return undef; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker - UUID wrapper module + +=head1 SYNOPSIS + + package CustomUUIDMaker; + use base qw/DBIx::Class::/; + + sub as_string { + my $uuid; + ...magic encantations... + return $uuid; + }; + +=head1 DESCRIPTION + +DBIx::Class::UUIDMaker is a base class used by the various uuid generation +subclasses. + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L, +L, +L, +L, +L, +L, +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/APR/UUID.pm b/lib/DBIx/Class/UUIDMaker/APR/UUID.pm new file mode 100644 index 0000000..136ec5f --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/APR/UUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::APR::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use APR::UUID (); + +sub as_string { + return APR::UUID->new->format; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::APR::UUID - Create uuids using APR::UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::APR::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses APR::UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Data/UUID.pm b/lib/DBIx/Class/UUIDMaker/Data/UUID.pm new file mode 100644 index 0000000..820669c --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Data/UUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::Data::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use Data::UUID (); + +sub as_string { + return Data::UUID->new->to_string(Data::UUID->new->create); +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Data::UUID - Create uuids using Data::UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Data::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Data::UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm b/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm new file mode 100644 index 0000000..8d9a29d --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm @@ -0,0 +1,44 @@ +package DBIx::Class::UUIDMaker::Data::Uniqid; +use base qw/DBIx::Class::UUIDMaker/; +use Data::Uniqid (); + +sub as_string { + return Data::Uniqid->luniqid; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Data::Uniqid - Create uuids using Data::Uniqid + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Data::Uniqid'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Data::Uniqid to generate uuid +strings using Data::Uniqid::luniqid. + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/UUID.pm b/lib/DBIx/Class/UUIDMaker/UUID.pm new file mode 100644 index 0000000..7a647a9 --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/UUID.pm @@ -0,0 +1,50 @@ +package DBIx::Class::UUIDMaker::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use UUID (); + +sub as_string { + my ($uuid, $uuidstring); + UUID::generate($uuid); + UUID::unparse($uuid, $uuidstring); + + return $uuidstring; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::UUID - Create uuids using UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm b/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm new file mode 100644 index 0000000..3c34b9a --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm @@ -0,0 +1,49 @@ +package DBIx::Class::UUIDMaker::Win32::Guidgen; +use base qw/DBIx::Class::UUIDMaker/; +use Win32::Guidgen (); + +sub as_string { + my $uuid = Win32::Guidgen::create(); + $uuid =~ s/(^\{|\}$)//g; + + return $uuid; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Win32:::Guidgen - Create uuids using Win32::Guidgen + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Win32::Guidgen'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Win32::Guidgen to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm b/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm new file mode 100644 index 0000000..85caad1 --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::Win32API::GUID; +use base qw/DBIx::Class::UUIDMaker/; +use Win32API::GUID (); + +sub as_string { + return Win32API::GUID::CreateGuid(); +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Win32API:::GUID - Create uuids using Win32API::GUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Win32API::GUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Win32API::GUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/maint/gen-tests.pl b/maint/gen-tests.pl new file mode 100644 index 0000000..0fc6180 --- /dev/null +++ b/maint/gen-tests.pl @@ -0,0 +1,25 @@ +#!/usr/bin/perl + +die "must be run from DBIx::Class root dir" unless -d 't/run'; + +gen_tests($_) for qw/BasicRels HelperRels/; + +sub gen_tests { + my $variant = shift; + my $dir = lc $variant; + system("rm -f t/$dir/*.t"); + + foreach my $test (map { m[^t/run/(.+)\.tl$]; $1 } split(/\n/, `ls t/run/*.tl`)) { + open(my $fh, '>', "t/$dir/${test}.t") or die $!; + print $fh <<"EOF"; +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::$variant; + +require "t/run/${test}.tl"; +run_tests(DBICTest->schema); +EOF + close $fh; + } +} \ No newline at end of file diff --git a/t/basicrels/145db2.t b/t/basicrels/145db2.t new file mode 100644 index 0000000..9573802 --- /dev/null +++ b/t/basicrels/145db2.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/145db2.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/20unique.t b/t/basicrels/20unique.t new file mode 100644 index 0000000..5a87ef1 --- /dev/null +++ b/t/basicrels/20unique.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/20unique.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/21serialize.t b/t/basicrels/21serialize.t new file mode 100644 index 0000000..60e401d --- /dev/null +++ b/t/basicrels/21serialize.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/21serialize.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/22cache.t b/t/basicrels/22cache.t new file mode 100644 index 0000000..1f8672a --- /dev/null +++ b/t/basicrels/22cache.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/22cache.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/22cascade_copy.t b/t/basicrels/22cascade_copy.t new file mode 100644 index 0000000..c670152 --- /dev/null +++ b/t/basicrels/22cascade_copy.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/22cascade_copy.tl"; +run_tests(DBICTest->schema); diff --git a/t/cdbi-t/12-filter.t b/t/cdbi-t/12-filter.t index c161602..979ad56 100644 --- a/t/cdbi-t/12-filter.t +++ b/t/cdbi-t/12-filter.t @@ -164,6 +164,8 @@ package main; Actor->iterator_class('Class::DBI::My::Iterator'); +delete $film->{related_resultsets}; + { my @acts = $film->actors->slice(1, 2); is @acts, 2, "Slice gives 2 results"; diff --git a/t/helperrels/145db2.t b/t/helperrels/145db2.t new file mode 100644 index 0000000..c6925ef --- /dev/null +++ b/t/helperrels/145db2.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/145db2.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/21serialize.t b/t/helperrels/21serialize.t new file mode 100644 index 0000000..0db9775 --- /dev/null +++ b/t/helperrels/21serialize.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/21serialize.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/22cascade_copy.t b/t/helperrels/22cascade_copy.t new file mode 100644 index 0000000..bc124e1 --- /dev/null +++ b/t/helperrels/22cascade_copy.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/22cascade_copy.tl"; +run_tests(DBICTest->schema); diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index 2200cd4..d05526f 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::Artist; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::Artist->table('artist'); DBICTest::Schema::Artist->add_columns( 'artistid' => { diff --git a/t/lib/DBICTest/Schema/BasicRels.pm b/t/lib/DBICTest/Schema/BasicRels.pm index 7a6f520..ecb9cef 100644 --- a/t/lib/DBICTest/Schema/BasicRels.pm +++ b/t/lib/DBICTest/Schema/BasicRels.pm @@ -5,11 +5,12 @@ use base 'DBIx::Class::Core'; DBICTest::Schema::Artist->add_relationship( cds => 'DBICTest::Schema::CD', { 'foreign.artist' => 'self.artistid' }, - { order_by => 'year', join_type => 'LEFT', cascade_delete => 1 } + { order_by => 'year', join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' } ); DBICTest::Schema::Artist->add_relationship( twokeys => 'DBICTest::Schema::TwoKeys', - { 'foreign.artist' => 'self.artistid' } + { 'foreign.artist' => 'self.artistid' }, + { cascade_copy => 1 } ); DBICTest::Schema::Artist->add_relationship( onekeys => 'DBICTest::Schema::OneKey', @@ -37,7 +38,7 @@ DBICTest::Schema::CD->add_relationship( DBICTest::Schema::CD->add_relationship( tags => 'DBICTest::Schema::Tag', { 'foreign.cd' => 'self.cdid' }, - { join_type => 'LEFT', cascade_delete => 1 } + { join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' } ); #DBICTest::Schema::CD->might_have(liner_notes => 'DBICTest::Schema::LinerNotes' => qw/notes/); DBICTest::Schema::CD->add_relationship( diff --git a/t/lib/DBICTest/Schema/CD.pm b/t/lib/DBICTest/Schema/CD.pm index 8e04c16..4eaba4f 100644 --- a/t/lib/DBICTest/Schema/CD.pm +++ b/t/lib/DBICTest/Schema/CD.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::CD; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::CD->table('cd'); DBICTest::Schema::CD->add_columns( 'cdid' => { diff --git a/t/lib/DBICTest/Schema/HelperRels.pm b/t/lib/DBICTest/Schema/HelperRels.pm index ff47640..1fb8886 100644 --- a/t/lib/DBICTest/Schema/HelperRels.pm +++ b/t/lib/DBICTest/Schema/HelperRels.pm @@ -42,7 +42,8 @@ DBICTest::Schema::CD_to_Producer->belongs_to( ); DBICTest::Schema::Artist->has_many( 'artist_undirected_maps', 'DBICTest::Schema::ArtistUndirectedMap', - [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}] + [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}], + { cascade_copy => 0 } # this would *so* not make sense ); DBICTest::Schema::ArtistUndirectedMap->belongs_to( 'artist1', 'DBICTest::Schema::Artist', 'id1'); diff --git a/t/lib/DBICTest/Schema/OneKey.pm b/t/lib/DBICTest/Schema/OneKey.pm index 081c94b..19387eb 100644 --- a/t/lib/DBICTest/Schema/OneKey.pm +++ b/t/lib/DBICTest/Schema/OneKey.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::OneKey; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::OneKey->table('onekey'); DBICTest::Schema::OneKey->add_columns( 'id' => { diff --git a/t/lib/DBICTest/Schema/Tag.pm b/t/lib/DBICTest/Schema/Tag.pm index 0a303d6..5b4eb20 100644 --- a/t/lib/DBICTest/Schema/Tag.pm +++ b/t/lib/DBICTest/Schema/Tag.pm @@ -2,10 +2,12 @@ package DBICTest::Schema::Tag; use base qw/DBIx::Class::Core/; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::Tag->table('tags'); DBICTest::Schema::Tag->add_columns( 'tagid' => { - data_type => 'varchar', + data_type => 'integer', is_auto_increment => 1, }, 'cd' => { diff --git a/t/lib/sqlite.sql b/t/lib/sqlite.sql index 8015b29..f6060fe 100644 --- a/t/lib/sqlite.sql +++ b/t/lib/sqlite.sql @@ -1,6 +1,6 @@ -- -- Created by SQL::Translator::Producer::SQLite --- Created on Mon Feb 6 01:07:16 2006 +-- Created on Tue Feb 14 16:16:19 2006 -- BEGIN TRANSACTION; @@ -117,10 +117,9 @@ CREATE TABLE treelike ( -- Table: tags -- CREATE TABLE tags ( - tagid varchar NOT NULL, + tagid INTEGER PRIMARY KEY NOT NULL, cd integer NOT NULL, - tag varchar NOT NULL, - PRIMARY KEY (tagid) + tag varchar NOT NULL ); -- diff --git a/t/run/10auto.tl b/t/run/10auto.tl index 6e474a5..7c795f4 100644 --- a/t/run/10auto.tl +++ b/t/run/10auto.tl @@ -4,6 +4,7 @@ my $schema = shift; plan tests => 2; $schema->class("Artist")->load_components(qw/PK::Auto::SQLite/); + # Should just be PK::Auto but this ensures the compat shim works # add an artist without primary key to test Auto my $artist = $schema->resultset("Artist")->create( { name => 'Auto' } ); diff --git a/t/run/11mysql.tl b/t/run/11mysql.tl index 2411b96..234474f 100644 --- a/t/run/11mysql.tl +++ b/t/run/11mysql.tl @@ -20,7 +20,7 @@ $dbh->do("CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY #'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', ''); -MySQLTest::Artist->load_components('PK::Auto::MySQL'); +MySQLTest::Artist->load_components('PK::Auto'); # test primary key handling my $new = MySQLTest::Artist->create({ name => 'foo' }); diff --git a/t/run/12pg.tl b/t/run/12pg.tl index 22c4008..5ffef5c 100644 --- a/t/run/12pg.tl +++ b/t/run/12pg.tl @@ -16,7 +16,7 @@ my $dbh = PgTest->schema->storage->dbh; $dbh->do("CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(255), charfield CHAR(10));"); -PgTest::Artist->load_components('PK::Auto::Pg'); +PgTest::Artist->load_components('PK::Auto'); my $new = PgTest::Artist->create({ name => 'foo' }); diff --git a/t/run/13oracle.tl b/t/run/13oracle.tl index 0bd3060..278e663 100644 --- a/t/run/13oracle.tl +++ b/t/run/13oracle.tl @@ -38,7 +38,7 @@ $dbh->do(qq{ END; }); -OraTest::Artist->load_components('PK::Auto::Oracle'); +OraTest::Artist->load_components('PK::Auto'); OraTest::CD->load_components('PK::Auto::Oracle'); OraTest::Track->load_components('PK::Auto::Oracle'); diff --git a/t/run/145db2.tl b/t/run/145db2.tl new file mode 100644 index 0000000..4c860bf --- /dev/null +++ b/t/run/145db2.tl @@ -0,0 +1,73 @@ +sub run_tests { +my $schema = shift; + +my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_${_}" } qw/DSN USER PASS/}; + +#warn "$dsn $user $pass"; + +plan skip_all, 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test' + unless ($dsn && $user); + +plan tests => 5; + +DBICTest::Schema->compose_connection('DB2Test' => $dsn, $user, $pass); + +my $dbh = DB2Test->schema->storage->dbh; + +$dbh->do("DROP TABLE artist;"); + +$dbh->do("CREATE TABLE artist (artistid INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), name VARCHAR(255), charfield CHAR(10));"); + +#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', ''); + +DB2Test::Artist->load_components('PK::Auto'); + +# test primary key handling +my $new = DB2Test::Artist->create({ name => 'foo' }); +ok($new->artistid, "Auto-PK worked"); + +# test LIMIT support +for (1..6) { + DB2Test::Artist->create({ name => 'Artist ' . $_ }); +} +my $it = DB2Test::Artist->search( {}, + { rows => 3, + order_by => 'artistid' + } +); +is( $it->count, 3, "LIMIT count ok" ); +is( $it->next->name, "Artist 2", "iterator->next ok" ); +$it->next; +$it->next; +is( $it->next, undef, "next past end of resultset ok" ); + +my $test_type_info = { + 'artistid' => { + 'data_type' => 'INTEGER', + 'is_nullable' => 0, + 'size' => 11 + }, + 'name' => { + 'data_type' => 'VARCHAR', + 'is_nullable' => 1, + 'size' => 255 + }, + 'charfield' => { + 'data_type' => 'VARCHAR', + 'is_nullable' => 1, + 'size' => 10 + }, +}; + + +my $type_info = DB2Test->schema->storage->columns_info_for('artist'); +is_deeply($type_info, $test_type_info, 'columns_info_for - column data types'); + + + +# clean up our mess +$dbh->do("DROP TABLE artist"); + +} + +1; diff --git a/t/run/16joins.tl b/t/run/16joins.tl index 8c8378b..091cf74 100644 --- a/t/run/16joins.tl +++ b/t/run/16joins.tl @@ -7,7 +7,7 @@ BEGIN { eval "use DBD::SQLite"; plan $@ ? ( skip_all => 'needs DBD::SQLite for testing' ) - : ( tests => 41 ); + : ( tests => 42 ); } # figure out if we've got a version of sqlite that is older than 3.2.6, in @@ -148,7 +148,7 @@ is($selects, 1, 'prefetch ran only 1 select statement'); my $cd = $schema->resultset('CD')->find(1, { cols => [qw/title artist.name/], - join => 'artist' + join => { 'artist' => {} } } ); ok(eval { $cd->artist->name eq 'Caterwauler McCrae' }, 'single related column prefetched'); @@ -253,7 +253,16 @@ SKIP: { cmp_ok( $rs->count, '==', 3, "count() ok after group_by on related column" ); } -cmp_ok( scalar $rs->all, '==', 3, "all() returns same count as count() after group_by on related column" ); +$rs = $schema->resultset("Artist")->search( + {}, + { join => [qw/ cds /], group_by => [qw/ me.name /], having =>{ 'MAX(cds.cdid)'=> \'< 5' } } +); + +cmp_ok( $rs->all, '==', 2, "results ok after group_by on related column with a having" ); + +$rs = $rs->search( undef, { having =>{ 'count(*)'=> \'> 2' }}); + +cmp_ok( $rs->all, '==', 1, "count() ok after group_by on related column with a having" ); $rs = $schema->resultset("Artist")->search( { 'cds.title' => 'Spoonful of bees', diff --git a/t/run/21serialize.tl b/t/run/21serialize.tl new file mode 100644 index 0000000..7c746f2 --- /dev/null +++ b/t/run/21serialize.tl @@ -0,0 +1,14 @@ +use Storable; + +sub run_tests { +my $schema = shift; + +plan tests => 1; + +my $artist = $schema->resultset('Artist')->find(1); +my $copy = eval { Storable::dclone($artist) }; +is_deeply($copy, $artist, 'serialize row object works'); + +} + +1; diff --git a/t/run/22cache.tl b/t/run/22cache.tl new file mode 100644 index 0000000..6732fc3 --- /dev/null +++ b/t/run/22cache.tl @@ -0,0 +1,132 @@ +sub run_tests { +my $schema = shift; + +eval "use DBD::SQLite"; +plan skip_all => 'needs DBD::SQLite for testing' if $@; +plan tests => 12; + +my $rs = $schema->resultset("Artist")->search( + { artistid => 1 } +); + +my $artist = $rs->first; + +is( scalar @{ $rs->get_cache }, 0, 'cache is not populated without cache attribute' ); + +$rs = $schema->resultset("Artist")->search( + { 'artistid' => 1 }, + { + prefetch => [qw/ cds /], + cache => 1, + } +); + +use Data::Dumper; $Data::Dumper::Deparse = 1; + +# start test for prefetch SELECT count +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +$artist = $rs->first; +$rs->reset(); + +# make sure artist contains a related resultset for cds +is( ref $artist->{related_resultsets}->{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' ); + +# check if $artist->cds->get_cache is populated +is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records'); + +# ensure that $artist->cds returns correct number of objects +is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' ); + +# ensure that $artist->cds->count returns correct value +is( $artist->cds->count, 3, 'artist->cds->count returns correct value' ); + +# ensure that $artist->count_related('cds') returns correct value +is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' ); + +# count the SELECTs +DBI->trace(0, undef); +my $selects = 0; +my $trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; +is($selects, 2, 'only one SQL statement for each cached table'); + +# make sure related_resultset is deleted after object is updated +$artist->set_column('name', 'New Name'); +$artist->update(); + +is( scalar keys %{$artist->{related_resultsets}}, 0, 'related resultsets deleted after update' ); + +# todo: make sure caching works with nested prefetch e.g. $artist->cds->tracks +$rs = $schema->resultset("Artist")->search( + { artistid => 1 }, + { + prefetch => { + cds => 'tags' + }, + cache => 1 + } +); + +# SELECT count for nested has_many prefetch +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +$artist = $rs->first; + +# count the SELECTs +DBI->trace(0, undef); +$selects = 0; +$trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; +is($selects, 3, 'one SQL statement for each cached table with nested prefetch'); + +my @objs; +$artist = $rs->find(1); + +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +my $cds = $artist->cds; +my $tags = $cds->next->tags; +while( my $tag = $tags->next ) { + push @objs, $tag->tagid; #warn "tag:", $tag->ID; +} + +is_deeply( \@objs, [ 1 ], 'first cd has correct tags' ); + +$tags = $cds->next->tags; +@objs = (); +while( my $tag = $tags->next ) { + push @objs, $tag->id; #warn "tag: ", $tag->ID; +} + +is_deeply( \@objs, [ 2, 5, 8 ], 'second cd has correct tags' ); + +# count the SELECTs +DBI->trace(0, undef); +$selects = 0; +$trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; + +is( $selects, 0, 'no additional SQL statements while checking nested data' ); + +} + +1; diff --git a/t/run/22cascade_copy.tl b/t/run/22cascade_copy.tl new file mode 100644 index 0000000..8c682e5 --- /dev/null +++ b/t/run/22cascade_copy.tl @@ -0,0 +1,23 @@ +use strict; +use warnings; + +sub run_tests { +my $schema = shift; + +plan tests => 4; +my $artist = $schema->resultset('Artist')->find(1); +my $artist_cds = $artist->search_related('cds'); +my $cover_band = $artist->copy; +my $cover_cds = $cover_band->search_related('cds'); +cmp_ok($cover_band->id, '!=', $artist->id, 'ok got new column id...'); +is($cover_cds->count, $artist_cds->count, 'duplicated rows count ok'); + +#check multi-keyed +cmp_ok($cover_band->search_related('twokeys')->count, '>', 0, 'duplicated multiPK ok'); + +#and check copying a few relations away +cmp_ok($cover_cds->search_related('tags')->count, '==', + $artist_cds->search_related('tags')->count , 'duplicated count ok'); + +} +1;