From: Matt S Trout Date: Fri, 3 Mar 2006 15:35:41 +0000 (+0000) Subject: Merge 'trunk' into 'DBIx-Class-current' X-Git-Tag: v0.06000~60^2~64 X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=39e45539f3373a1f84da5cbefc5b9919484ee384;hp=ec30888d452dd0529316caa75fa21d2e9da95451;p=dbsrgits%2FDBIx-Class.git Merge 'trunk' into 'DBIx-Class-current' r8257@obrien (orig r996): ningu | 2006-03-01 04:42:50 +0000 try to fix PK::Auto::Pg for sequences with quoted names r8259@obrien (orig r998): blblack | 2006-03-01 05:38:51 +0000 dbh->{InactiveDestroy} when reconnecting in child process (prevents auto-disconnecting a dbh still in use by the parent) r8260@obrien (orig r999): blblack | 2006-03-01 05:58:00 +0000 t/50fork.t made a little more resilient r8261@obrien (orig r1000): jguenther | 2006-03-01 08:34:34 +0000 - Changed documentation to use txn_do() for transactions - Fixed Storage::DBI trace such that each bind parameter is quoted on output, separated by commas - Fixed a couple typos in documentation r8263@obrien (orig r1002): zarquon | 2006-03-02 02:10:03 +0000 Altered example to populate the db with DBIC r8264@obrien (orig r1003): zarquon | 2006-03-02 02:11:07 +0000 Moved Example.pod to ExampleSchema.pod r8265@obrien (orig r1004): zarquon | 2006-03-02 02:12:24 +0000 Fixed a typo in the SQL::Translator section (missing comma in has assignment) r8266@obrien (orig r1005): blblack | 2006-03-02 11:29:53 +0000 add update_or_create proxy method r8269@obrien (orig r1008): zarquon | 2006-03-03 00:24:49 +0000 Changed the name of CD class to Cd to be compatible with default SchemaLoader monikerization r8273@obrien (orig r1011): matthewt | 2006-03-03 15:18:27 +0000 Fix to update(\%args) with inflation from test case by Peter Rabbitson r8274@obrien (orig r1012): matthewt | 2006-03-03 15:23:14 +0000 Missing svk add, as usual r8275@obrien (orig r1013): matthewt | 2006-03-03 15:35:13 +0000 MANIFEST nuked out of repo --- diff --git a/Changes b/Changes index 97bb9fe..57f845f 100644 --- a/Changes +++ b/Changes @@ -1,5 +1,16 @@ Revision history for DBIx::Class + - add_components() doesn't prepend base when comp. prefixed with + + - $schema->deploy + - HAVING support + - prefetch for has_many + - PK::Auto::* no longer required since Storage::DBI::* handle auto-inc + - minor tweak to tests for join edge case + - added cascade_copy relationship attribute + (sponsored by Airspace Software, http://www.airspace.co.uk/) + - clean up set_from_related + - made copy() automatically null out auto-inc columns + 0.05007 2006-02-24 00:59:00 - tweak to Componentised for Class::C3 0.11 - fixes for auto-inc under MSSQL @@ -23,10 +34,7 @@ Revision history for DBIx::Class 0.05003 2006-02-08 17:50:20 - add component_class accessors and use them for *_class - small fixes to Serialize and ResultSetManager - - prevent accidental table-wide update/delete on row-object - from PK-less table - rollback on disconnect, and disconnect on DESTROY - - fixes to deep search and search_relateduser 0.05002 2006-02-06 12:12:03 - Added recommends for Class::Inspector @@ -130,7 +138,6 @@ Revision history for DBIx::Class 0.03004 - Added an || '' to the CDBICompat stringify to avoid null warnings - Updated name section for manual pods - 0.03003 2005-11-03 17:00:00 - POD fixes. - Changed use to require in Relationship/Base to avoid import. diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 77fddc2..e638295 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -160,10 +160,14 @@ Jesper Krogh Brandon Black +Christopher H. Laco + Scotty Allen sc_ +Robert Sedlacek + Justin Guenther =head1 LICENSE diff --git a/lib/DBIx/Class/Componentised.pm b/lib/DBIx/Class/Componentised.pm index d4a6641..737e24b 100644 --- a/lib/DBIx/Class/Componentised.pm +++ b/lib/DBIx/Class/Componentised.pm @@ -20,7 +20,7 @@ sub inject_base { sub load_components { my $class = shift; my $base = $class->component_base_class; - my @comp = map { "${base}::$_" } grep { $_ !~ /^#/ } @_; + my @comp = map { /^\+(.*)$/ ? $1 : "${base}::$_" } grep { $_ !~ /^#/ } @_; $class->_load_components(@comp); Class::C3::reinitialize(); } diff --git a/lib/DBIx/Class/Core.pm b/lib/DBIx/Class/Core.pm index 303d1cc..455e741 100644 --- a/lib/DBIx/Class/Core.pm +++ b/lib/DBIx/Class/Core.pm @@ -7,6 +7,7 @@ no warnings 'qw'; use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/ + Serialize::Storable InflateColumn Relationship PK diff --git a/lib/DBIx/Class/DB.pm b/lib/DBIx/Class/DB.pm index 2051b01..62d93a2 100644 --- a/lib/DBIx/Class/DB.pm +++ b/lib/DBIx/Class/DB.pm @@ -14,7 +14,7 @@ __PACKAGE__->load_components(qw/ResultSetProxy/); sub storage { shift->schema_instance(@_)->storage; } sub resultset_instance { - my $class = shift; + my $class = ref $_[0] || $_[0]; my $source = $class->result_source_instance; if ($source->result_class ne $class) { $source = $source->new($source); diff --git a/lib/DBIx/Class/PK/Auto.pm b/lib/DBIx/Class/PK/Auto.pm index c2bb440..64c8c83 100644 --- a/lib/DBIx/Class/PK/Auto.pm +++ b/lib/DBIx/Class/PK/Auto.pm @@ -53,24 +53,17 @@ sub insert { my ($self, @rest) = @_; my $ret = $self->next::method(@rest); - # if all primaries are already populated, skip auto-inc - my $populated = 0; - map { $populated++ if defined $self->get_column($_) } $self->primary_columns; - return $ret if ( $populated == scalar $self->primary_columns ); - - my ($pri, $too_many) = - (grep { $self->column_info($_)->{'auto_increment'} } - $self->primary_columns) - || $self->primary_columns; + my ($pri, $too_many) = grep { !defined $self->get_column($_) } $self->primary_columns; + return $ret unless defined $pri; # if all primaries are already populated, skip auto-inc $self->throw_exception( "More than one possible key found for auto-inc on ".ref $self ) - if $too_many; - unless (defined $self->get_column($pri)) { - $self->throw_exception( "Can't auto-inc for $pri on ".ref $self.": no _last_insert_id method" ) - unless $self->can('last_insert_id'); - my $id = $self->last_insert_id; - $self->throw_exception( "Can't get last insert id" ) unless $id; - $self->store_column($pri => $id); - } + if defined $too_many; + + my $storage = $self->result_source->storage; + $self->throw_exception( "Missing primary key but Storage doesn't support last_insert_id" ) unless $storage->can('last_insert_id'); + my $id = $storage->last_insert_id($self->result_source,$pri); + $self->throw_exception( "Can't get last insert id" ) unless $id; + $self->store_column($pri => $id); + return $ret; } @@ -81,7 +74,12 @@ associated with looking up the sequence automatically. =cut -__PACKAGE__->mk_classdata('sequence'); +sub sequence { + my ($self,$seq) = @_; + foreach my $pri ($self->primary_columns) { + $self->column_info($pri)->{sequence} = $seq; + } +} 1; diff --git a/lib/DBIx/Class/PK/Auto/DB2.pm b/lib/DBIx/Class/PK/Auto/DB2.pm index f05f781..29ecf78 100644 --- a/lib/DBIx/Class/PK/Auto/DB2.pm +++ b/lib/DBIx/Class/PK/Auto/DB2.pm @@ -7,39 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id -{ - my ($self) = @_; - - my $dbh = $self->result_source->storage->dbh; - my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3); - $sth->execute(); - - my @res = $sth->fetchrow_array(); - - return @res ? $res[0] : undef; - -} - 1; =head1 NAME -DBIx::Class::PK::Auto::DB2 - Automatic primary key class for DB2 +DBIx::Class::PK::Auto::DB2 - (DEPRECATED) Automatic primary key class for DB2 =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::DB2 Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for DB2. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Jess Robinson +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/MSSQL.pm b/lib/DBIx/Class/PK/Auto/MSSQL.pm index a3c4329..8ac2778 100644 --- a/lib/DBIx/Class/PK/Auto/MSSQL.pm +++ b/lib/DBIx/Class/PK/Auto/MSSQL.pm @@ -1,40 +1,28 @@ -package DBIx::Class::PK::Auto::MSSQL; - -use strict; -use warnings; - -use base qw/DBIx::Class/; - -__PACKAGE__->load_components(qw/PK::Auto/); - -sub last_insert_id { - my( $id ) = $_[0]->result_source->storage->dbh->selectrow_array( - 'SELECT @@IDENTITY' ); - return $id; -} - -1; - -=head1 NAME - -DBIx::Class::PK::Auto::MSSQL - Automatic primary key class for MSSQL - -=head1 SYNOPSIS - - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::MSSQL Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for MSSQL. - -=head1 AUTHORS - -Brian Cassidy - -=head1 LICENSE - -You may distribute this code under the same terms as Perl itself. - -=cut +package DBIx::Class::PK::Auto::MSSQL; + +use strict; +use warnings; + +use base qw/DBIx::Class/; + +__PACKAGE__->load_components(qw/PK::Auto/); + +1; + +=head1 NAME + +DBIx::Class::PK::Auto::MSSQL - (DEPRECATED) Automatic primary key class for MSSQL + +=head1 SYNOPSIS + +Just load PK::Auto instead; auto-inc is now handled by Storage. + +=head1 AUTHORS + +Matt S Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/PK/Auto/MySQL.pm b/lib/DBIx/Class/PK/Auto/MySQL.pm index e65bd39..7a1f78e 100644 --- a/lib/DBIx/Class/PK/Auto/MySQL.pm +++ b/lib/DBIx/Class/PK/Auto/MySQL.pm @@ -7,29 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - return $_[0]->result_source->storage->dbh->{mysql_insertid}; -} - 1; =head1 NAME -DBIx::Class::PK::Auto::MySQL - Automatic primary key class for MySQL +DBIx::Class::PK::Auto::MySQL - (DEPRECATED) Automatic primary key class for MySQL =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::MySQL Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for MySQL. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Matt S. Trout +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/Oracle.pm b/lib/DBIx/Class/PK/Auto/Oracle.pm index 7455408..437246b 100644 --- a/lib/DBIx/Class/PK/Auto/Oracle.pm +++ b/lib/DBIx/Class/PK/Auto/Oracle.pm @@ -3,71 +3,23 @@ package DBIx::Class::PK::Auto::Oracle; use strict; use warnings; -use Carp qw/croak/; - use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - my $self = shift; - $self->get_autoinc_seq unless $self->{_autoinc_seq}; - my $sql = "SELECT " . $self->{_autoinc_seq} . ".currval FROM DUAL"; - my ($id) = $self->result_source->storage->dbh->selectrow_array($sql); - return $id; -} - -sub get_autoinc_seq { - my $self = shift; - - # return the user-defined sequence if known - if ($self->sequence) { - return $self->{_autoinc_seq} = $self->sequence; - } - - # look up the correct sequence automatically - my $dbh = $self->result_source->storage->dbh; - my $sql = qq{ - SELECT trigger_body FROM ALL_TRIGGERS t - WHERE t.table_name = ? - AND t.triggering_event = 'INSERT' - AND t.status = 'ENABLED' - }; - # trigger_body is a LONG - $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024); - my $sth = $dbh->prepare($sql); - $sth->execute( uc($self->result_source->name) ); - while (my ($insert_trigger) = $sth->fetchrow_array) { - if ($insert_trigger =~ m!(\w+)\.nextval!i ) { - $self->{_autoinc_seq} = uc($1); - } - } - unless ($self->{_autoinc_seq}) { - croak "Unable to find a sequence INSERT trigger on table '" . $self->_table_name . "'."; - } -} - 1; =head1 NAME -DBIx::Class::PK::Auto::Oracle - Automatic primary key class for Oracle +DBIx::Class::PK::Auto::Oracle - (DEPRECATED) Automatic primary key class for Oracle =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::Oracle Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for Oracle. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Andy Grundman - -Scott Connelly +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/Pg.pm b/lib/DBIx/Class/PK/Auto/Pg.pm index 166a772..00cd24f 100644 --- a/lib/DBIx/Class/PK/Auto/Pg.pm +++ b/lib/DBIx/Class/PK/Auto/Pg.pm @@ -7,55 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - my $self = shift; - $self->get_autoinc_seq unless $self->{_autoinc_seq}; - $self->result_source->storage->dbh->last_insert_id(undef,undef,undef,undef, - {sequence=>$self->{_autoinc_seq}}); -} - -sub get_autoinc_seq { - my $self = shift; - - # return the user-defined sequence if known - if ($self->sequence) { - return $self->{_autoinc_seq} = $self->sequence; - } - - my @pri = $self->primary_columns; - my $dbh = $self->result_source->storage->dbh; - my ($schema,$table) = $self->table =~ /^(.+)\.(.+)$/ ? ($1,$2) : (undef,$self->table); - while (my $col = shift @pri) { - my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref; - if (defined $info->[12] and $info->[12] =~ - /^nextval\('([^']+)'::(?:text|regclass)\)/) - { - $self->{_autoinc_seq} = $1; - #$self->{_autoinc_seq} =~ s/"//g; - last; - } - } -} - 1; =head1 NAME -DBIx::Class::PK::Auto::Pg - Automatic primary key class for PostgreSQL +DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::Pg Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for PostgreSQL. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Marcus Ramberg +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/PK/Auto/SQLite.pm b/lib/DBIx/Class/PK/Auto/SQLite.pm index e405eac..de42922 100644 --- a/lib/DBIx/Class/PK/Auto/SQLite.pm +++ b/lib/DBIx/Class/PK/Auto/SQLite.pm @@ -7,29 +7,19 @@ use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); -sub last_insert_id { - return $_[0]->result_source->storage->dbh->func('last_insert_rowid'); -} - 1; =head1 NAME -DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite +DBIx::Class::PK::Auto::SQLite - (DEPRECATED) Automatic primary key class for SQLite =head1 SYNOPSIS - # In your table classes - __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/); - __PACKAGE__->set_primary_key('id'); - -=head1 DESCRIPTION - -This class implements autoincrements for SQLite. +Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 AUTHORS -Matt S. Trout +Matt S Trout =head1 LICENSE diff --git a/lib/DBIx/Class/Relationship.pm b/lib/DBIx/Class/Relationship.pm index e9adb82..22be1d3 100644 --- a/lib/DBIx/Class/Relationship.pm +++ b/lib/DBIx/Class/Relationship.pm @@ -99,8 +99,12 @@ whereas C uses a left join. =head2 many_to_many - __PACKAGE__->many_to_many( 'accessorname' => 'a_to_b', 'table_b' ); - my @f_objs = $obj_a->accessorname; + __PACKAGE__->many_to_many( 'accessorname' => 'a_to_b', 'table_b' ); + my @f_objs = $obj_a->accessorname; + +Creates an accessor bridging two relationships; not strictly a relationship +in its own right, although the accessor will return a resultset or collection +of objects just as a has_many would. =cut diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index e04b082..c838d69 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -86,42 +86,7 @@ sub register_relationship { } =cut sub search_related { - my $self = shift; - die "Can't call *_related as class methods" unless ref $self; - my $rel = shift; - my $attrs = { }; - if (@_ > 1 && ref $_[$#_] eq 'HASH') { - $attrs = { %{ pop(@_) } }; - } - my $rel_obj = $self->relationship_info($rel); - $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj; - $attrs = { %{$rel_obj->{attrs} || {}}, %{$attrs || {}} }; - - $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1)); - my $query = ((@_ > 1) ? {@_} : shift); - - my ($cond) = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self); - if (ref $cond eq 'ARRAY') { - $cond = [ map { my %hash; - foreach my $key (keys %{$_}) { - unless ($key =~ m/\./) { - $hash{"me.$key"} = $_->{$key}; - } else { - $hash{$key} = $_->{$key}; - } - }; \%hash; } @$cond ]; - } else { - foreach my $key (keys %$cond) { - unless ($key =~ m/\./) { - $cond->{"me.$key"} = delete $cond->{$key}; - } - } - } - $query = ($query ? { '-and' => [ $cond, $query ] } : $cond); - #use Data::Dumper; warn Dumper($cond); - #warn $rel_obj->{class}." $meth $cond ".join(', ', @{$attrs->{bind}||[]}); - return $self->result_source->related_source($rel - )->resultset->search($query, $attrs); + return shift->related_resultset(shift)->search(@_); } =head2 count_related @@ -144,7 +109,9 @@ sub count_related { sub create_related { my $self = shift; my $rel = shift; - return $self->search_related($rel)->create(@_); + my $obj = $self->search_related($rel)->create(@_); + delete $self->{related_resultsets}->{$rel}; + return $obj; } =head2 new_related @@ -198,15 +165,9 @@ sub set_from_related { my $f_class = $self->result_source->schema->class($rel_obj->{class}); $self->throw_exception( "Object $f_obj isn't a ".$f_class ) unless $f_obj->isa($f_class); - foreach my $key (keys %$cond) { - next if ref $cond->{$key}; # Skip literals and complex conditions - $self->throw_exception("set_from_related can't handle $key as key") - unless $key =~ m/^foreign\.([^\.]+)$/; - my $val = $f_obj->get_column($1); - $self->throw_exception("set_from_related can't handle ".$cond->{$key}." as value") - unless $cond->{$key} =~ m/^self\.([^\.]+)$/; - $self->set_column($1 => $val); - } + $self->set_columns( + $self->result_source->resolve_condition( + $rel_obj->{cond}, $f_obj, $rel)); return 1; } @@ -230,11 +191,52 @@ sub update_from_related { sub delete_related { my $self = shift; - return $self->search_related(@_)->delete; + my $obj = $self->search_related(@_)->delete; + delete $self->{related_resultsets}->{$_[0]}; + return $obj; } 1; +=head2 related_resultset($name) + +Returns a L for the relationship named $name. + + $rs = $obj->related_resultset('related_table'); + +=cut + +sub related_resultset { + my $self = shift; + $self->throw_exception("Can't call *_related as class methods") unless ref $self; + my $rel = shift; + my $rel_obj = $self->relationship_info($rel); + $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj; + + return $self->{related_resultsets}{$rel} ||= do { + my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + $attrs = { %{$rel_obj->{attrs} || {}}, %$attrs }; + + $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1)); + my $query = ((@_ > 1) ? {@_} : shift); + + my $cond = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self); + if (ref $cond eq 'ARRAY') { + $cond = [ map { my $hash; + foreach my $key (keys %$_) { + my $newkey = $key =~ /\./ ? "me.$key" : $key; + $hash->{$newkey} = $_->{$key}; + }; $hash } @$cond ]; + } else { + foreach my $key (grep { ! /\./ } keys %$cond) { + $cond->{"me.$key"} = delete $cond->{$key}; + } + } + $query = ($query ? { '-and' => [ $cond, $query ] } : $cond); + $self->result_source->related_source($rel)->resultset->search($query, $attrs); + }; +} + =head1 AUTHORS Matt S. Trout diff --git a/lib/DBIx/Class/Relationship/HasMany.pm b/lib/DBIx/Class/Relationship/HasMany.pm index 716c292..1e61c74 100644 --- a/lib/DBIx/Class/Relationship/HasMany.pm +++ b/lib/DBIx/Class/Relationship/HasMany.pm @@ -35,6 +35,7 @@ sub has_many { { accessor => 'multi', join_type => 'LEFT', cascade_delete => 1, + cascade_copy => 1, %{$attrs||{}} } ); } diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index f1ca440..532e8b2 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -102,6 +102,10 @@ sub new { } $attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct}; + $attrs->{order_by} = [ $attrs->{order_by} ] + if $attrs->{order_by} && !ref($attrs->{order_by}); + $attrs->{order_by} ||= []; + if (my $prefetch = delete $attrs->{prefetch}) { foreach my $p (ref $prefetch eq 'ARRAY' ? (@{$prefetch}) : ($prefetch)) { @@ -115,7 +119,8 @@ sub new { push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias})) unless $seen{$p}; } - my @prefetch = $source->resolve_prefetch($p, $attrs->{alias}); + my @prefetch = $source->resolve_prefetch( + $p, $attrs->{alias}, {}, $attrs->{order_by}); #die Dumper \@cols; push(@{$attrs->{select}}, map { $_->[0] } @prefetch); push(@{$attrs->{as}}, map { $_->[1] } @prefetch); @@ -155,25 +160,47 @@ call it as C. sub search { my $self = shift; - #use Data::Dumper;warn Dumper(@_); - - my $attrs = { %{$self->{attrs}} }; - if (@_ > 1 && ref $_[$#_] eq 'HASH') { - $attrs = { %$attrs, %{ pop(@_) } }; - } + my $rs; + if( @_ ) { + + my $attrs = { %{$self->{attrs}} }; + my $having = delete $attrs->{having}; + if (@_ > 1 && ref $_[$#_] eq 'HASH') { + $attrs = { %$attrs, %{ pop(@_) } }; + } - my $where = (@_ ? ((@_ == 1 || ref $_[0] eq "HASH") ? shift : {@_}) : undef()); - if (defined $where) { - $where = (defined $attrs->{where} + my $where = (@_ + ? ((@_ == 1 || ref $_[0] eq "HASH") + ? shift + : ((@_ % 2) + ? $self->throw_exception( + "Odd number of arguments to search") + : {@_})) + : undef()); + if (defined $where) { + $where = (defined $attrs->{where} ? { '-and' => [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } $where, $attrs->{where} ] } : $where); - $attrs->{where} = $where; - } + $attrs->{where} = $where; + } - my $rs = (ref $self)->new($self->result_source, $attrs); + if (defined $having) { + $having = (defined $attrs->{having} + ? { '-and' => + [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } + $having, $attrs->{having} ] } + : $having); + $attrs->{having} = $having; + } + $rs = (ref $self)->new($self->result_source, $attrs); + } + else { + $rs = $self; + $rs->reset(); + } return (wantarray ? $rs->all : $rs); } @@ -246,7 +273,9 @@ sub find { $query->{$self->{attrs}{alias}.'.'.$_} = delete $query->{$_}; } #warn Dumper($query); - return $self->search($query,$attrs)->next; + return (keys %$attrs + ? $self->search($query,$attrs)->single + : $self->single($query)); } =head2 search_related @@ -259,22 +288,7 @@ records. =cut sub search_related { - my ($self, $rel, @rest) = @_; - my $rel_obj = $self->result_source->relationship_info($rel); - $self->throw_exception( - "No such relationship ${rel} in search_related") - unless $rel_obj; - my $rs = $self->search(undef, { join => $rel }); - my $alias = ($rs->{attrs}{seen_join}{$rel} > 1 - ? join('_', $rel, $rs->{attrs}{seen_join}{$rel}) - : $rel); - return $self->result_source->schema->resultset($rel_obj->{class} - )->search( undef, - { %{$rs->{attrs}}, - alias => $alias, - select => undef(), - as => undef() } - )->search(@rest); + return shift->related_resultset(shift)->search(@_); } =head2 cursor @@ -292,6 +306,34 @@ sub cursor { $attrs->{where},$attrs); } +=head2 single + +Inflates the first result without creating a cursor + +=cut + +sub single { + my ($self, $extra) = @_; + my ($attrs) = $self->{attrs}; + $attrs = { %$attrs }; + if ($extra) { + if (defined $attrs->{where}) { + $attrs->{where} = { + '-and' + => [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ } + delete $attrs->{where}, $extra ] + }; + } else { + $attrs->{where} = $extra; + } + } + my @data = $self->result_source->storage->select_single( + $self->{from}, $attrs->{select}, + $attrs->{where},$attrs); + return (@data ? $self->_construct_object(@data) : ()); +} + + =head2 search_like Perform a search, but use C instead of equality as the condition. Note @@ -346,7 +388,18 @@ Can be used to efficiently iterate over records in the resultset: sub next { my ($self) = @_; - my @row = $self->cursor->next; + my $cache; + if( @{$cache = $self->{all_cache} || []}) { + $self->{all_cache_position} ||= 0; + my $obj = $cache->[$self->{all_cache_position}]; + $self->{all_cache_position}++; + return $obj; + } + if ($self->{attrs}{cache}) { + $self->{all_cache_position} = 0; + return ($self->all)[0]; + } + my @row = delete $self->{stashed_row} || $self->cursor->next; # warn Dumper(\@row); use Data::Dumper; return unless (@row); return $self->_construct_object(@row); @@ -354,23 +407,31 @@ sub next { sub _construct_object { my ($self, @row) = @_; + my @row_orig = @row; # copy @row for key comparison later, because @row will change my @as = @{ $self->{attrs}{as} }; +#use Data::Dumper; warn Dumper \@as; #warn "@cols -> @row"; my $info = [ {}, {} ]; foreach my $as (@as) { + my $rs = $self; my $target = $info; my @parts = split(/\./, $as); my $col = pop(@parts); foreach my $p (@parts) { $target = $target->[1]->{$p} ||= []; + + $rs = $rs->related_resultset($p) if $rs->{attrs}->{cache}; } - $target->[0]->{$col} = shift @row; + + $target->[0]->{$col} = shift @row + if ref($target->[0]) ne 'ARRAY'; # arrayref is pre-inflated objects, do not overwrite } #use Data::Dumper; warn Dumper(\@as, $info); my $new = $self->result_source->result_class->inflate_result( $self->result_source, @$info); $new = $self->{attrs}{record_filter}->($new) if exists $self->{attrs}{record_filter}; + return $new; } @@ -399,15 +460,19 @@ sub count { my $self = shift; return $self->search(@_)->count if @_ && defined $_[0]; unless (defined $self->{count}) { + return scalar @{ $self->get_cache } + if @{ $self->get_cache }; my $group_by; my $select = { 'count' => '*' }; - if( $group_by = delete $self->{attrs}{group_by} ) { + my $attrs = { %{ $self->{attrs} } }; + if( $group_by = delete $attrs->{group_by} ) { + delete $attrs->{having}; my @distinct = (ref $group_by ? @$group_by : ($group_by)); # todo: try CONCAT for multi-column pk my @pk = $self->result_source->primary_columns; if( scalar(@pk) == 1 ) { my $pk = shift(@pk); - my $alias = $self->{attrs}{alias}; + my $alias = $attrs->{alias}; my $re = qr/^($alias\.)?$pk$/; foreach my $column ( @distinct) { if( $column =~ $re ) { @@ -421,14 +486,12 @@ sub count { #use Data::Dumper; die Dumper $select; } - my $attrs = { %{ $self->{attrs} }, - select => $select, - as => [ 'count' ] }; + $attrs->{select} = $select; + $attrs->{as} = [ 'count' ]; # offset, order by and page are not needed to count. record_filter is cdbi delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/; ($self->{count}) = (ref $self)->new($self->result_source, $attrs)->cursor->next; - $self->{attrs}{group_by} = $group_by; } return 0 unless $self->{count}; my $count = $self->{count}; @@ -455,6 +518,14 @@ is returned in list context. sub all { my ($self) = @_; + return @{ $self->get_cache } + if @{ $self->get_cache }; + if( $self->{attrs}->{cache} ) { + my @obj = map { $self->_construct_object(@$_); } + $self->cursor->all; + $self->set_cache( \@obj ); + return @obj; + } return map { $self->_construct_object(@$_); } $self->cursor->all; } @@ -467,6 +538,7 @@ Resets the resultset's cursor, so you can iterate through the elements again. sub reset { my ($self) = @_; + $self->{all_cache_position} = 0; $self->cursor->reset; return $self; } @@ -741,6 +813,89 @@ sub update_or_create { return $row; } +=head2 get_cache + +Gets the contents of the cache for the resultset. + +=cut + +sub get_cache { + my $self = shift; + return $self->{all_cache} || []; +} + +=head2 set_cache + +Sets the contents of the cache for the resultset. Expects an arrayref of objects of the same class as those produced by the resultset. + +=cut + +sub set_cache { + my ( $self, $data ) = @_; + $self->throw_exception("set_cache requires an arrayref") + if ref $data ne 'ARRAY'; + my $result_class = $self->result_source->result_class; + foreach( @$data ) { + $self->throw_exception("cannot cache object of type '$_', expected '$result_class'") + if ref $_ ne $result_class; + } + $self->{all_cache} = $data; +} + +=head2 clear_cache + +Clears the cache for the resultset. + +=cut + +sub clear_cache { + my $self = shift; + $self->set_cache([]); +} + +=head2 related_resultset + +Returns a related resultset for the supplied relationship name. + + $rs = $rs->related_resultset('foo'); + +=cut + +sub related_resultset { + my ( $self, $rel, @rest ) = @_; + $self->{related_resultsets} ||= {}; + my $resultsets = $self->{related_resultsets}; + if( !exists $resultsets->{$rel} ) { + #warn "fetching related resultset for rel '$rel'"; + my $rel_obj = $self->result_source->relationship_info($rel); + $self->throw_exception( + "search_related: result source '" . $self->result_source->name . + "' has no such relationship ${rel}") + unless $rel_obj; #die Dumper $self->{attrs}; + my $rs = $self->search(undef, { join => $rel }); + #if( $self->{attrs}->{cache} ) { + # $rs = $self->search(undef); + #} + #else { + #} + #use Data::Dumper; die Dumper $rs->{attrs};#$rs = $self->search( undef ); + #use Data::Dumper; warn Dumper $self->{attrs}, Dumper $rs->{attrs}; + my $alias = (defined $rs->{attrs}{seen_join}{$rel} + && $rs->{attrs}{seen_join}{$rel} > 1 + ? join('_', $rel, $rs->{attrs}{seen_join}{$rel}) + : $rel); + $resultsets->{$rel} = + $self->result_source->schema->resultset($rel_obj->{class} + )->search( undef, + { %{$rs->{attrs}}, + alias => $alias, + select => undef(), + as => undef() } + )->search(@rest); + } + return $resultsets->{$rel}; +} + =head2 throw_exception See Schema's throw_exception diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 41dc14e..8b3863f 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -7,6 +7,7 @@ use DBIx::Class::ResultSet; use Carp::Clan qw/^DBIx::Class/; use Storable; +use Scalar::Util qw/weaken/; use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/AccessorGroup/); @@ -33,7 +34,7 @@ retrieved, most usually a table (see L) sub new { my ($class, $attrs) = @_; $class = ref $class if ref $class; - my $new = bless({ %{$attrs || {}} }, $class); + my $new = bless({ %{$attrs || {}}, _resultset => undef }, $class); $new->{resultset_class} ||= 'DBIx::Class::ResultSet'; $new->{resultset_attributes} = { %{$new->{resultset_attributes} || {}} }; $new->{_ordered_columns} = [ @{$new->{_ordered_columns}||[]}]; @@ -484,6 +485,8 @@ sub resolve_condition { #warn "$self $k $for $v"; $ret{$k} = $for->get_column($v); #warn %ret; + } elsif (ref $as) { # reverse object + $ret{$v} = $as->get_column($k); } else { $ret{"${as}.${k}"} = "${for}.${v}"; } @@ -542,35 +545,41 @@ in the supplied relationships. Examples: =cut sub resolve_prefetch { - my ($self, $pre, $alias, $seen) = @_; + my ($self, $pre, $alias, $seen, $order) = @_; $seen ||= {}; use Data::Dumper; #$alias ||= $self->name; #warn $alias, Dumper $pre; if( ref $pre eq 'ARRAY' ) { - return map { $self->resolve_prefetch( $_, $alias, $seen ) } @$pre; + return map { $self->resolve_prefetch( $_, $alias, $seen, $order ) } @$pre; } elsif( ref $pre eq 'HASH' ) { my @ret = map { - $self->resolve_prefetch($_, $alias, $seen), + $self->resolve_prefetch($_, $alias, $seen, $order), $self->related_source($_)->resolve_prefetch( - $pre->{$_}, "${alias}.$_", $seen) + $pre->{$_}, "${alias}.$_", $seen, $order) } keys %$pre; #die Dumper \@ret; return @ret; } elsif( ref $pre ) { - $self->throw_exception( "don't know how to resolve prefetch reftype " . ref $pre); + $self->throw_exception( + "don't know how to resolve prefetch reftype ".ref($pre)); } else { my $count = ++$seen->{$pre}; my $as = ($count > 1 ? "${pre}_${count}" : $pre); my $rel_info = $self->relationship_info( $pre ); - $self->throw_exception( $self->name . " has no such relationship '$pre'" ) unless $rel_info; + $self->throw_exception( $self->name . " has no such relationship '$pre'" ) + unless $rel_info; my $as_prefix = ($alias =~ /^.*?\.(.*)$/ ? $1.'.' : ''); + my $rel_source = $self->related_source($pre); + push(@$order, + map { "${as}.$_" } + (@{$rel_info->{order_by}||[]}, $rel_source->primary_columns)); return map { [ "${as}.$_", "${as_prefix}${pre}.$_", ] } - $self->related_source($pre)->columns; + $rel_source->columns; #warn $alias, Dumper (\@ret); #return @ret; } @@ -612,7 +621,12 @@ Specify here any attributes you wish to pass to your specialised resultset. sub resultset { my $self = shift; - return $self->resultset_class->new($self, $self->{resultset_attributes}); + return $self->{_resultset} if ref $self->{_resultset} eq $self->resultset_class; + return $self->{_resultset} = do { + my $rs = $self->resultset_class->new($self, $self->{resultset_attributes}); + weaken $rs->result_source; + $rs; + }; } =head2 throw_exception diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 3d2638d..33c8d19 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -66,6 +66,7 @@ sub insert { $source->storage->insert($source->from, { $self->get_columns }); $self->in_storage(1); $self->{_dirty_columns} = {}; + $self->{related_resultsets} = {}; return $self; } @@ -110,6 +111,7 @@ sub update { $self->throw_exception("Can't update ${self}: updated more than one row"); } $self->{_dirty_columns} = {}; + $self->{related_resultsets} = {}; return $self; } @@ -237,9 +239,26 @@ Inserts a new row with the specified changes. sub copy { my ($self, $changes) = @_; - my $new = bless({ _column_data => { %{$self->{_column_data}}} }, ref $self); - $new->set_column($_ => $changes->{$_}) for keys %$changes; - return $new->insert; + $changes ||= {}; + my $col_data = { %{$self->{_column_data}} }; + foreach my $col (keys %$col_data) { + delete $col_data->{$col} + if $self->result_source->column_info($col)->{is_auto_increment}; + } + my $new = bless({ _column_data => $col_data }, ref $self); + $new->set_columns($changes); + $new->insert; + foreach my $rel ($self->result_source->relationships) { + my $rel_info = $self->result_source->relationship_info($rel); + if ($rel_info->{attrs}{cascade_copy}) { + my $resolved = $self->result_source->resolve_condition( + $rel_info->{cond}, $rel, $new); + foreach my $related ($self->search_related($rel)) { + $related->copy($resolved); + } + } + } + return $new; } =head2 store_column @@ -276,25 +295,40 @@ sub inflate_result { }, ref $class || $class); my $schema; - PRE: foreach my $pre (keys %{$prefetch||{}}) { + foreach my $pre (keys %{$prefetch||{}}) { + my $pre_val = $prefetch->{$pre}; my $pre_source = $source->related_source($pre); - $class->throw_exception("Can't prefetch non-existant relationship ${pre}") unless $pre_source; - my $fetched; - unless ($pre_source->primary_columns == grep { exists $prefetch->{$pre}[0]{$_} - and !defined $prefetch->{$pre}[0]{$_} } $pre_source->primary_columns) - { - $fetched = $pre_source->result_class->inflate_result( - $pre_source, @{$prefetch->{$pre}}); - } - my $accessor = $source->relationship_info($pre)->{attrs}{accessor}; - $class->throw_exception("No accessor for prefetched $pre") - unless defined $accessor; - if ($accessor eq 'single') { - $new->{_relationship_data}{$pre} = $fetched; - } elsif ($accessor eq 'filter') { - $new->{_inflated_column}{$pre} = $fetched; + $class->throw_exception("Can't prefetch non-existent relationship ${pre}") + unless $pre_source; + if (ref $pre_val->[0] eq 'ARRAY') { # multi + my @pre_objects; + foreach my $pre_rec (@$pre_val) { + unless ($pre_source->primary_columns == grep { exists $pre_rec->[0]{$_} + and !defined $pre_rec->[0]{$_} } $pre_source->primary_columns) { + next; + } + push(@pre_objects, $pre_source->result_class->inflate_result( + $pre_source, @{$pre_rec})); + } + $new->related_resultset($pre)->set_cache(\@pre_objects); } else { - $class->throw_exception("Don't know how to store prefetched $pre"); + my $fetched; + unless ($pre_source->primary_columns == grep { exists $pre_val->[0]{$_} + and !defined $pre_val->[0]{$_} } $pre_source->primary_columns) + { + $fetched = $pre_source->result_class->inflate_result( + $pre_source, @{$pre_val}); + } + my $accessor = $source->relationship_info($pre)->{attrs}{accessor}; + $class->throw_exception("No accessor for prefetched $pre") + unless defined $accessor; + if ($accessor eq 'single') { + $new->{_relationship_data}{$pre} = $fetched; + } elsif ($accessor eq 'filter') { + $new->{_inflated_column}{$pre} = $fetched; + } else { + $class->throw_exception("Prefetch not supported with accessor '$accessor'"); + } } } return $new; diff --git a/lib/DBIx/Class/Schema.pm b/lib/DBIx/Class/Schema.pm index 3b25530..2e6e6c0 100644 --- a/lib/DBIx/Class/Schema.pm +++ b/lib/DBIx/Class/Schema.pm @@ -521,6 +521,18 @@ sub throw_exception { croak @_; } +=head2 deploy + +Attempts to deploy the schema to the current storage + +=cut + +sub deploy { + my ($self) = shift; + $self->throw_exception("Can't deploy without storage") unless $self->storage; + $self->storage->deploy($self); +} + 1; =head1 AUTHORS diff --git a/lib/DBIx/Class/Serialize.pm b/lib/DBIx/Class/Serialize/Storable.pm similarity index 70% rename from lib/DBIx/Class/Serialize.pm rename to lib/DBIx/Class/Serialize/Storable.pm index 345b99c..8066337 100644 --- a/lib/DBIx/Class/Serialize.pm +++ b/lib/DBIx/Class/Serialize/Storable.pm @@ -1,19 +1,18 @@ -package DBIx::Class::Serialize; +package DBIx::Class::Serialize::Storable; use strict; -use Storable qw/freeze thaw/; +use Storable; sub STORABLE_freeze { my ($self,$cloning) = @_; - #return if $cloning; my $to_serialize = { %$self }; delete $to_serialize->{result_source}; - return (freeze($to_serialize)); + return (Storable::freeze($to_serialize)); } sub STORABLE_thaw { my ($self,$cloning,$serialized) = @_; - %$self = %{ thaw($serialized) }; - $self->result_source($self->result_source_instance); + %$self = %{ Storable::thaw($serialized) }; + $self->result_source($self->result_source_instance) if $self->can('result_source_instance'); } 1; @@ -22,12 +21,12 @@ __END__ =head1 NAME - DBIx::Class::Serialize - hooks for Storable freeze/thaw (EXPERIMENTAL) + DBIx::Class::Serialize::Storable - hooks for Storable freeze/thaw (EXPERIMENTAL) =head1 SYNOPSIS # in a table class definition - __PACKAGE__->load_components(qw/Serialize/); + __PACKAGE__->load_components(qw/Serialize::Storable/); # meanwhile, in a nearby piece of code my $obj = $schema->resultset('Foo')->find(12); diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index b415445..0a3e9c4 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -19,8 +19,10 @@ use base qw/SQL::Abstract::Limit/; sub select { my ($self, $table, $fields, $where, $order, @rest) = @_; @rest = (-1) unless defined $rest[0]; - $self->SUPER::select($table, $self->_recurse_fields($fields), - $where, $order, @rest); + local $self->{having_bind} = []; + my ($sql, @ret) = $self->SUPER::select($table, + $self->_recurse_fields($fields), $where, $order, @rest); + return wantarray ? ($sql, @ret, @{$self->{having_bind}}) : $sql; } sub _emulate_limit { @@ -51,11 +53,18 @@ sub _recurse_fields { sub _order_by { my $self = shift; my $ret = ''; + my @extra; if (ref $_[0] eq 'HASH') { if (defined $_[0]->{group_by}) { $ret = $self->_sqlcase(' group by ') .$self->_recurse_fields($_[0]->{group_by}); } + if (defined $_[0]->{having}) { + my $frag; + ($frag, @extra) = $self->_recurse_where($_[0]->{having}); + push(@{$self->{having_bind}}, @extra); + $ret .= $self->_sqlcase(' having ').$frag; + } if (defined $_[0]->{order_by}) { $ret .= $self->SUPER::_order_by($_[0]->{order_by}); } @@ -208,7 +217,7 @@ sub new { $new->transaction_depth(0); if (defined($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}) && ($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} =~ /=(.+)$/)) { - $new->debugfh(IO::File->new($1, 'w')||croak "Cannot open trace file $1"); + $new->debugfh(IO::File->new($1, 'w')) || $new->throw_exception("Cannot open trace file $1"); } else { $new->debugfh(IO::File->new('>&STDERR')); } @@ -216,6 +225,11 @@ sub new { return $new; } +sub throw_exception { + my ($self, $msg) = @_; + croak($msg); +} + =head1 NAME DBIx::Class::Storage::DBI - DBI storage handler @@ -309,7 +323,11 @@ sub _populate_dbh { my ($self) = @_; my @info = @{$self->connect_info || []}; $self->_dbh($self->_connect(@info)); - + my $driver = $self->_dbh->{Driver}->{Name}; + eval "require DBIx::Class::Storage::DBI::${driver}"; + unless ($@) { + bless $self, "DBIx::Class::Storage::DBI::${driver}"; + } # if on-connect sql statements are given execute them foreach my $sql_statement (@{$self->on_connect_do || []}) { $self->_dbh->do($sql_statement); @@ -329,7 +347,10 @@ sub _connect { return $dbh; } - DBI->connect(@info); + my $dbh = DBI->connect(@info); + $self->throw_exception("DBI Connection failed: $DBI::errstr") + unless $dbh; + $dbh; } =head2 txn_begin @@ -403,20 +424,20 @@ sub _execute { $self->debugfh->print("$sql: " . join(', ', @debug_bind) . "\n"); } my $sth = $self->sth($sql,$op); - croak "no sth generated via sql: $sql" unless $sth; + $self->throw_exception("no sth generated via sql: $sql") unless $sth; @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args my $rv; if ($sth) { $rv = $sth->execute(@bind); } else { - croak "'$sql' did not generate a statement."; + $self->throw_exception("'$sql' did not generate a statement."); } return (wantarray ? ($rv, $sth, @bind) : $rv); } sub insert { my ($self, $ident, $to_insert) = @_; - croak( "Couldn't insert ".join(', ', map "$_ => $to_insert->{$_}", keys %$to_insert)." into ${ident}" ) + $self->throw_exception( "Couldn't insert ".join(', ', map "$_ => $to_insert->{$_}", keys %$to_insert)." into ${ident}" ) unless ($self->_execute('insert' => [], $ident, $to_insert)); return $to_insert; } @@ -435,8 +456,9 @@ sub _select { if (ref $condition eq 'SCALAR') { $order = $1 if $$condition =~ s/ORDER BY (.*)$//i; } - if (exists $attrs->{group_by}) { + if (exists $attrs->{group_by} || $attrs->{having}) { $order = { group_by => $attrs->{group_by}, + having => $attrs->{having}, ($order ? (order_by => $order) : ()) }; } my @args = ('select', $attrs->{bind}, $ident, $select, $condition, $order); @@ -490,7 +512,7 @@ sub columns_info_for { $column_info{is_nullable} = $info->{NULLABLE}; $result{$info->{COLUMN_NAME}} = \%column_info; } - }else{ + } else { my $sth = $self->dbh->prepare("SELECT * FROM $table WHERE 1=0"); $sth->execute; my @columns = @{$sth->{NAME}}; @@ -501,6 +523,41 @@ sub columns_info_for { return \%result; } +sub last_insert_id { + my ($self, $row) = @_; + + return $self->dbh->func('last_insert_rowid'); + +} + +sub sqlt_type { + my ($self) = @_; + my $dsn = $self->connect_info->[0]; + $dsn =~ /^dbi:(.*?)\d*:/; + return $1; +} + +sub deployment_statements { + my ($self, $schema, $type) = @_; + $type ||= $self->sqlt_type; + eval "use SQL::Translator"; + $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; + eval "use SQL::Translator::Parser::DBIx::Class;"; + $self->throw_exception($@) if $@; + eval "use SQL::Translator::Producer::${type};"; + $self->throw_exception($@) if $@; + my $tr = SQL::Translator->new(); + SQL::Translator::Parser::DBIx::Class::parse( $tr, $schema ); + return "SQL::Translator::Producer::${type}"->can('produce')->($tr); +} + +sub deploy { + my ($self, $schema, $type) = @_; + foreach(split(";\n", $self->deployment_statements($schema, $type))) { + $self->dbh->do($_) or warn "SQL was:\n $_"; + } +} + sub DESTROY { shift->disconnect } 1; diff --git a/lib/DBIx/Class/Storage/DBI/DB2.pm b/lib/DBIx/Class/Storage/DBI/DB2.pm new file mode 100644 index 0000000..a6e1452 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/DB2.pm @@ -0,0 +1,48 @@ +package DBIx::Class::Storage::DBI::DB2; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id +{ + my ($self) = @_; + + my $dbh = $self->_dbh; + my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3); + $sth->execute(); + + my @res = $sth->fetchrow_array(); + + return @res ? $res[0] : undef; + +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::DB2 - Automatic primary key class for DB2 + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for DB2. + +=head1 AUTHORS + +Jess Robinson + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/MSSQL.pm b/lib/DBIx/Class/Storage/DBI/MSSQL.pm new file mode 100644 index 0000000..7a30b65 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/MSSQL.pm @@ -0,0 +1,39 @@ +package DBIx::Class::Storage::DBI::MSSQL; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my( $id ) = $_[0]->_dbh->selectrow_array('SELECT @@IDENTITY' ); + return $id; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::MSSQL - Automatic primary key class for MSSQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for MSSQL. + +=head1 AUTHORS + +Brian Cassidy + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/Oracle.pm b/lib/DBIx/Class/Storage/DBI/Oracle.pm new file mode 100644 index 0000000..5fa4fce --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/Oracle.pm @@ -0,0 +1,68 @@ +package DBIx::Class::Storage::DBI::Oracle; + +use strict; +use warnings; + +use Carp qw/croak/; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my ($self,$source,$col) = @_; + my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col)); + my $sql = "SELECT " . $seq . ".currval FROM DUAL"; + my ($id) = $self->_dbh->selectrow_array($sql); + return $id; +} + +sub get_autoinc_seq { + my ($self,$source,$col) = @_; + + # look up the correct sequence automatically + my $dbh = $self->_dbh; + my $sql = q{ + SELECT trigger_body FROM ALL_TRIGGERS t + WHERE t.table_name = ? + AND t.triggering_event = 'INSERT' + AND t.status = 'ENABLED' + }; + # trigger_body is a LONG + $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024); + my $sth = $dbh->prepare($sql); + $sth->execute( uc($source->name) ); + while (my ($insert_trigger) = $sth->fetchrow_array) { + return uc($1) if $insert_trigger =~ m!(\w+)\.nextval!i; # col name goes here??? + } + croak "Unable to find a sequence INSERT trigger on table '" . $source->name . "'."; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::Oracle - Automatic primary key class for Oracle + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + __PACKAGE__->sequence('mysequence'); + +=head1 DESCRIPTION + +This class implements autoincrements for Oracle. + +=head1 AUTHORS + +Andy Grundman + +Scott Connelly + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/Pg.pm b/lib/DBIx/Class/Storage/DBI/Pg.pm new file mode 100644 index 0000000..f9cbee9 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/Pg.pm @@ -0,0 +1,62 @@ +package DBIx::Class::Storage::DBI::Pg; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + my ($self,$source,$col) = @_; + my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col)); + $self->_dbh->last_insert_id(undef,undef,undef,undef, {sequence => $seq}); +} + +sub get_autoinc_seq { + my ($self,$source,$col) = @_; + + my @pri = $source->primary_columns; + my $dbh = $self->_dbh; + my ($schema,$table) = $source->name =~ /^(.+)\.(.+)$/ ? ($1,$2) + : (undef,$source->name); + while (my $col = shift @pri) { + my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref; + if (defined $info->[12] and $info->[12] =~ + /^nextval\('([^']+)'::(?:text|regclass)\)/) + { + return $1; # may need to strip quotes -- see if this works + } + } +} + +sub sqlt_type { + return 'PostgreSQL'; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::Pg - Automatic primary key class for PostgreSQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + __PACKAGE__->sequence('mysequence'); + +=head1 DESCRIPTION + +This class implements autoincrements for PostgreSQL. + +=head1 AUTHORS + +Marcus Ramberg + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/SQLite.pm b/lib/DBIx/Class/Storage/DBI/SQLite.pm new file mode 100644 index 0000000..e6175b5 --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/SQLite.pm @@ -0,0 +1,36 @@ +package DBIx::Class::Storage::DBI::SQLite; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +sub last_insert_id { + return $_[0]->dbh->func('last_insert_rowid'); +} + +1; + +=head1 NAME + +DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for SQLite. + +=head1 AUTHORS + +Matt S. Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/Storage/DBI/mysql.pm b/lib/DBIx/Class/Storage/DBI/mysql.pm new file mode 100644 index 0000000..d601f1d --- /dev/null +++ b/lib/DBIx/Class/Storage/DBI/mysql.pm @@ -0,0 +1,42 @@ +package DBIx::Class::Storage::DBI::mysql; + +use strict; +use warnings; + +use base qw/DBIx::Class::Storage::DBI/; + +# __PACKAGE__->load_components(qw/PK::Auto/); + +sub last_insert_id { + return $_[0]->_dbh->{mysql_insertid}; +} + +sub sqlt_type { + return 'MySQL'; +} + +1; + +=head1 NAME + +DBIx::Class::Storage::DBI::mysql - Automatic primary key class for MySQL + +=head1 SYNOPSIS + + # In your table classes + __PACKAGE__->load_components(qw/PK::Auto Core/); + __PACKAGE__->set_primary_key('id'); + +=head1 DESCRIPTION + +This class implements autoincrements for MySQL. + +=head1 AUTHORS + +Matt S. Trout + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. + +=cut diff --git a/lib/DBIx/Class/UTF8Columns.pm b/lib/DBIx/Class/UTF8Columns.pm new file mode 100644 index 0000000..d5a37df --- /dev/null +++ b/lib/DBIx/Class/UTF8Columns.pm @@ -0,0 +1,93 @@ +package DBIx::Class::UTF8Columns; +use strict; +use warnings; +use base qw/DBIx::Class/; + +use Encode; + +__PACKAGE__->mk_classdata( force_utf8_columns => [] ); + +=head1 NAME + +DBIx::Class::UTF8Columns - Force UTF8 (Unicode) flag on columns + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UTF8Columns Core/); + __PACKAGE__->utf8_columns(qw/name description/); + + # then belows return strings with utf8 flag + $artist->name; + $artist->get_column('description'); + +=head1 DESCRIPTION + +This module allows you to get columns data that have utf8 (Unicode) flag. + +=head1 SEE ALSO + +L, L. + +=head1 METHODS + +=head2 utf8_columns + +=cut + +sub utf8_columns { + my $self = shift; + for (@_) { + $self->throw_exception("column $_ doesn't exist") + unless $self->has_column($_); + } + $self->force_utf8_columns( \@_ ); +} + +=head1 EXTENDED METHODS + +=head2 get_column + +=cut + +sub get_column { + my ( $self, $column ) = @_; + my $value = $self->next::method($column); + + if ( { map { $_ => 1 } @{ $self->force_utf8_columns } }->{$column} ) { + Encode::_utf8_on($value) unless Encode::is_utf8($value); + } + + $value; +} + +=head2 store_column + +=cut + +sub store_column { + my ( $self, $column, $value ) = @_; + + if ( { map { $_ => 1 } @{ $self->force_utf8_columns } }->{$column} ) { + Encode::_utf8_off($value) if Encode::is_utf8($value); + } + + $self->next::method( $column, $value ); +} + +=head1 AUTHOR + +Daisuke Murase + +=head1 COPYRIGHT + +This program is free software; you can redistribute +it and/or modify it under the same terms as Perl itself. + +The full text of the license can be found in the +LICENSE file included with this module. + +=cut + +1; + diff --git a/lib/DBIx/Class/UUIDColumns.pm b/lib/DBIx/Class/UUIDColumns.pm index efbe3d4..8a58527 100644 --- a/lib/DBIx/Class/UUIDColumns.pm +++ b/lib/DBIx/Class/UUIDColumns.pm @@ -1,9 +1,73 @@ package DBIx::Class::UUIDColumns; use base qw/DBIx::Class/; -use Data::UUID; - __PACKAGE__->mk_classdata( 'uuid_auto_columns' => [] ); +__PACKAGE__->mk_classdata( 'uuid_maker' ); +__PACKAGE__->uuid_class( __PACKAGE__->_find_uuid_module ); + +# be compatible with Class::DBI::UUID +sub uuid_columns { + my $self = shift; + for (@_) { + $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_); + } + $self->uuid_auto_columns(\@_); +} + +sub uuid_class { + my ($self, $class) = @_; + + if ($class) { + $class = "DBIx::Class::UUIDMaker$class" if $class =~ /^::/; + + if (!eval "require $class") { + $self->throw_exception("$class could not be loaded: $@"); + } elsif (!$class->isa('DBIx::Class::UUIDMaker')) { + $self->throw_exception("$class is not a UUIDMaker subclass"); + } else { + $self->uuid_maker($class->new); + }; + }; + + return ref $self->uuid_maker; +}; + +sub insert { + my $self = shift; + for my $column (@{$self->uuid_auto_columns}) { + $self->store_column( $column, $self->get_uuid ) + unless defined $self->get_column( $column ); + } + $self->next::method(@_); +} + +sub get_uuid { + return shift->uuid_maker->as_string; +} + +sub _find_uuid_module { + if (eval{require Data::UUID}) { + return '::Data::UUID'; + } elsif ($^O ne 'openbsd' && eval{require APR::UUID}) { + # APR::UUID on openbsd causes some as yet unfound nastyness for XS + return '::APR::UUID'; + } elsif (eval{require UUID}) { + return '::UUID'; + } elsif (eval{ + # squelch the 'too late for INIT' warning in Win32::API::Type + local $^W = 0; + require Win32::Guidgen; + }) { + return '::Win32::Guidgen'; + } elsif (eval{require Win32API::GUID}) { + return '::Win32API::GUID'; + } else { + shift->throw_exception('no suitable uuid module could be found') + }; +}; + +1; +__END__ =head1 NAME @@ -11,7 +75,7 @@ DBIx::Class::UUIDColumns - Implicit uuid columns =head1 SYNOPSIS - pacakge Artist; + package Artist; __PACKAGE__->load_components(qw/UUIDColumns Core DB/); __PACKAGE__->uuid_columns( 'artist_id' ); @@ -20,44 +84,65 @@ DBIx::Class::UUIDColumns - Implicit uuid columns This L component resembles the behaviour of L, to make some columns implicitly created as uuid. +When loaded, C will search for a suitable uuid generation module +from the following list of supported modules: + + Data::UUID + APR::UUID* + UUID + Win32::Guidgen + Win32API::GUID + +If no supporting module can be found, an exception will be thrown. + +*APR::UUID will not be loaded under OpenBSD due to an as yet unidentified XS +issue. + +If you would like to use a specific module, you can set C: + + __PACKAGE__->uuid_class('::Data::UUID'); + __PACKAGE__->uuid_class('MyUUIDGenerator'); + Note that the component needs to be loaded before Core. =head1 METHODS -=head2 uuid_columns +=head2 uuid_columns(@columns) -=cut +Takes a list of columns to be filled with uuids during insert. -# be compatible with Class::DBI::UUID -sub uuid_columns { - my $self = shift; - for (@_) { - $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_); - } - $self->uuid_auto_columns(\@_); -} + __PACKAGE__->uuid_columns('id'); -sub insert { - my $self = shift; - for my $column (@{$self->uuid_auto_columns}) { - $self->store_column( $column, $self->get_uuid ) - unless defined $self->get_column( $column ); - } - $self->next::method(@_); -} +=head2 uuid_class($classname) -sub get_uuid { - return Data::UUID->new->to_string(Data::UUID->new->create), -} +Takes the name of a UUIDMaker subclass to be used for uuid value generation. +This can be a fully qualified class name, or a shortcut name starting with :: +that matches one of the available DBIx::Class::UUIDMaker subclasses: + + __PACKAGE__->uuid_class('CustomUUIDGenerator'); + # loads CustomeUUIDGenerator + + __PACKAGE->uuid_class('::Data::UUID'); + # loads DBIx::Class::UUIDMaker::Data::UUID; + +Note that C chacks to see that the specified class isa +DBIx::Class::UUIDMaker subbclass and throws and exception if it isn't. + +=head2 uuid_maker + +Returns the current UUIDMaker instance for the given module. + + my $uuid = __PACKAGE__->uuid_maker->as_string; + +=head1 SEE ALSO + +L =head1 AUTHORS Chia-liang Kao +Chris Laco =head1 LICENSE You may distribute this code under the same terms as Perl itself. - -=cut - -1; diff --git a/lib/DBIx/Class/UUIDMaker.pm b/lib/DBIx/Class/UUIDMaker.pm new file mode 100644 index 0000000..b9c196c --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker.pm @@ -0,0 +1,56 @@ +package DBIx::Class::UUIDMaker; + +sub new { + return bless {}, shift; +}; + +sub as_string { + return undef; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker - UUID wrapper module + +=head1 SYNOPSIS + + package CustomUUIDMaker; + use base qw/DBIx::Class::/; + + sub as_string { + my $uuid; + ...magic encantations... + return $uuid; + }; + +=head1 DESCRIPTION + +DBIx::Class::UUIDMaker is a base class used by the various uuid generation +subclasses. + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L, +L, +L, +L, +L, +L, +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/APR/UUID.pm b/lib/DBIx/Class/UUIDMaker/APR/UUID.pm new file mode 100644 index 0000000..136ec5f --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/APR/UUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::APR::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use APR::UUID (); + +sub as_string { + return APR::UUID->new->format; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::APR::UUID - Create uuids using APR::UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::APR::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses APR::UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Data/UUID.pm b/lib/DBIx/Class/UUIDMaker/Data/UUID.pm new file mode 100644 index 0000000..820669c --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Data/UUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::Data::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use Data::UUID (); + +sub as_string { + return Data::UUID->new->to_string(Data::UUID->new->create); +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Data::UUID - Create uuids using Data::UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Data::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Data::UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm b/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm new file mode 100644 index 0000000..8d9a29d --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Data/Uniqid.pm @@ -0,0 +1,44 @@ +package DBIx::Class::UUIDMaker::Data::Uniqid; +use base qw/DBIx::Class::UUIDMaker/; +use Data::Uniqid (); + +sub as_string { + return Data::Uniqid->luniqid; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Data::Uniqid - Create uuids using Data::Uniqid + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Data::Uniqid'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Data::Uniqid to generate uuid +strings using Data::Uniqid::luniqid. + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/UUID.pm b/lib/DBIx/Class/UUIDMaker/UUID.pm new file mode 100644 index 0000000..7a647a9 --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/UUID.pm @@ -0,0 +1,50 @@ +package DBIx::Class::UUIDMaker::UUID; +use base qw/DBIx::Class::UUIDMaker/; +use UUID (); + +sub as_string { + my ($uuid, $uuidstring); + UUID::generate($uuid); + UUID::unparse($uuid, $uuidstring); + + return $uuidstring; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::UUID - Create uuids using UUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::UUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses UUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm b/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm new file mode 100644 index 0000000..3c34b9a --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Win32/Guidgen.pm @@ -0,0 +1,49 @@ +package DBIx::Class::UUIDMaker::Win32::Guidgen; +use base qw/DBIx::Class::UUIDMaker/; +use Win32::Guidgen (); + +sub as_string { + my $uuid = Win32::Guidgen::create(); + $uuid =~ s/(^\{|\}$)//g; + + return $uuid; +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Win32:::Guidgen - Create uuids using Win32::Guidgen + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Win32::Guidgen'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Win32::Guidgen to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm b/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm new file mode 100644 index 0000000..85caad1 --- /dev/null +++ b/lib/DBIx/Class/UUIDMaker/Win32API/GUID.pm @@ -0,0 +1,46 @@ +package DBIx::Class::UUIDMaker::Win32API::GUID; +use base qw/DBIx::Class::UUIDMaker/; +use Win32API::GUID (); + +sub as_string { + return Win32API::GUID::CreateGuid(); +}; + +1; +__END__ + +=head1 NAME + +DBIx::Class::UUIDMaker::Win32API:::GUID - Create uuids using Win32API::GUID + +=head1 SYNOPSIS + + package Artist; + __PACKAGE__->load_components(qw/UUIDColumns Core DB/); + __PACKAGE__->uuid_columns( 'artist_id' ); + __PACKAGE__->uuid_class('::Win32API::GUID'); + +=head1 DESCRIPTION + +This DBIx::Class::UUIDMaker subclass uses Win32API::GUID to generate uuid +strings in the following format: + + 098f2470-bae0-11cd-b579-08002b30bfeb + +=head1 METHODS + +=head2 as_string + +Returns the new uuid as a string. + +=head1 SEE ALSO + +L + +=head1 AUTHOR + +Chris Laco + +=head1 LICENSE + +You may distribute this code under the same terms as Perl itself. diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 2d0bf00..448c651 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -72,7 +72,6 @@ sub parse { } $table->primary_key($source->primary_columns); - my @rels = $source->relationships(); foreach my $rel (@rels) { @@ -92,15 +91,16 @@ sub parse { my $rel_table = $source->related_source($rel)->name; my $cond = (keys (%{$rel_info->{cond}}))[0]; my ($refkey) = $cond =~ /^\w+\.(\w+)$/; + my ($key) = $rel_info->{cond}->{$cond} =~ /^\w+\.(\w+)$/; if($rel_table && $refkey) { $table->add_constraint( type => 'foreign_key', - name => "fk_${rel}_id", - fields => $rel, + name => "fk_${key}", + fields => $key, reference_fields => $refkey, reference_table => $rel_table, - ); + ); } } } diff --git a/maint/gen-schema.pl b/maint/gen-schema.pl index b16bd15..12b3aeb 100755 --- a/maint/gen-schema.pl +++ b/maint/gen-schema.pl @@ -4,19 +4,9 @@ use strict; use warnings; use lib qw(lib t/lib); -use UNIVERSAL::require; +use DBICTest; +use DBICTest::HelperRels; -my $from = 'SQL::Translator::Parser::DBIx::Class'; -my $to = 'SQL::Translator::Producer::SQLite'; -my $sqlt = 'SQL::Translator'; -my $schema = 'DBICTest::Schema'; +my $schema = DBICTest->initialise; -$from->require; -$to->require; -$sqlt->require; -$schema->require; - -my $tr = $sqlt->new; - -$from->can("parse")->($tr, $schema); -print $to->can("produce")->($tr); +print $schema->storage->deployment_statements($schema); diff --git a/maint/gen-tests.pl b/maint/gen-tests.pl new file mode 100755 index 0000000..0fc6180 --- /dev/null +++ b/maint/gen-tests.pl @@ -0,0 +1,25 @@ +#!/usr/bin/perl + +die "must be run from DBIx::Class root dir" unless -d 't/run'; + +gen_tests($_) for qw/BasicRels HelperRels/; + +sub gen_tests { + my $variant = shift; + my $dir = lc $variant; + system("rm -f t/$dir/*.t"); + + foreach my $test (map { m[^t/run/(.+)\.tl$]; $1 } split(/\n/, `ls t/run/*.tl`)) { + open(my $fh, '>', "t/$dir/${test}.t") or die $!; + print $fh <<"EOF"; +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::$variant; + +require "t/run/${test}.tl"; +run_tests(DBICTest->schema); +EOF + close $fh; + } +} \ No newline at end of file diff --git a/t/05components.t b/t/05components.t new file mode 100644 index 0000000..57bebd5 --- /dev/null +++ b/t/05components.t @@ -0,0 +1,14 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use Test::More; + +use lib qw(t/lib); +use DBICTest::ForeignComponent; + +plan tests => 1; + +# Tests if foreign component was loaded by calling foreign's method +ok( DBICTest::ForeignComponent->foreign_test_method, 'foreign component' ); + diff --git a/t/basicrels/145db2.t b/t/basicrels/145db2.t new file mode 100644 index 0000000..9573802 --- /dev/null +++ b/t/basicrels/145db2.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/145db2.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/20unique.t b/t/basicrels/20unique.t new file mode 100644 index 0000000..5a87ef1 --- /dev/null +++ b/t/basicrels/20unique.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/20unique.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/21transactions.t b/t/basicrels/21transactions.t new file mode 100644 index 0000000..cea95cf --- /dev/null +++ b/t/basicrels/21transactions.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/21transactions.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/22cascade_copy.t b/t/basicrels/22cascade_copy.t new file mode 100644 index 0000000..c670152 --- /dev/null +++ b/t/basicrels/22cascade_copy.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/22cascade_copy.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/23cache.t b/t/basicrels/23cache.t new file mode 100644 index 0000000..ca2efee --- /dev/null +++ b/t/basicrels/23cache.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/23cache.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/24serialize.t b/t/basicrels/24serialize.t new file mode 100644 index 0000000..1a11191 --- /dev/null +++ b/t/basicrels/24serialize.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/24serialize.tl"; +run_tests(DBICTest->schema); diff --git a/t/basicrels/25utf8.t b/t/basicrels/25utf8.t new file mode 100644 index 0000000..c5fe364 --- /dev/null +++ b/t/basicrels/25utf8.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::BasicRels; + +require "t/run/25utf8.tl"; +run_tests(DBICTest->schema); diff --git a/t/cdbi-t/12-filter.t b/t/cdbi-t/12-filter.t index c161602..979ad56 100644 --- a/t/cdbi-t/12-filter.t +++ b/t/cdbi-t/12-filter.t @@ -164,6 +164,8 @@ package main; Actor->iterator_class('Class::DBI::My::Iterator'); +delete $film->{related_resultsets}; + { my @acts = $film->actors->slice(1, 2); is @acts, 2, "Slice gives 2 results"; diff --git a/t/helperrels/145db2.t b/t/helperrels/145db2.t new file mode 100644 index 0000000..c6925ef --- /dev/null +++ b/t/helperrels/145db2.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/145db2.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/22cascade_copy.t b/t/helperrels/22cascade_copy.t new file mode 100644 index 0000000..bc124e1 --- /dev/null +++ b/t/helperrels/22cascade_copy.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/22cascade_copy.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/23cache.t b/t/helperrels/23cache.t new file mode 100644 index 0000000..73bc31a --- /dev/null +++ b/t/helperrels/23cache.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/23cache.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/24serialize.t b/t/helperrels/24serialize.t new file mode 100644 index 0000000..bc51393 --- /dev/null +++ b/t/helperrels/24serialize.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/24serialize.tl"; +run_tests(DBICTest->schema); diff --git a/t/helperrels/25utf8.t b/t/helperrels/25utf8.t new file mode 100644 index 0000000..ad3fe14 --- /dev/null +++ b/t/helperrels/25utf8.t @@ -0,0 +1,7 @@ +use Test::More; +use lib qw(t/lib); +use DBICTest; +use DBICTest::HelperRels; + +require "t/run/25utf8.tl"; +run_tests(DBICTest->schema); diff --git a/t/lib/DBICTest.pm b/t/lib/DBICTest.pm index 0afc604..a2eef1b 100755 --- a/t/lib/DBICTest.pm +++ b/t/lib/DBICTest.pm @@ -1 +1,20 @@ +package DBICTest; + +use strict; +use warnings; +use DBICTest::Schema; + +sub initialise { + + my $db_file = "t/var/DBIxClass.db"; + + unlink($db_file) if -e $db_file; + unlink($db_file . "-journal") if -e $db_file . "-journal"; + mkdir("t/var") unless -d "t/var"; + + my $dsn = "dbi:SQLite:${db_file}"; + + return DBICTest::Schema->compose_connection('DBICTest' => $dsn); +} + 1; diff --git a/t/lib/DBICTest/ForeignComponent.pm b/t/lib/DBICTest/ForeignComponent.pm new file mode 100644 index 0000000..a701459 --- /dev/null +++ b/t/lib/DBICTest/ForeignComponent.pm @@ -0,0 +1,10 @@ +# belongs to t/05components.t +package DBICTest::ForeignComponent; +use warnings; +use strict; + +use base qw/ DBIx::Class /; + +__PACKAGE__->load_components( qw/ +DBICTest::ForeignComponent::TestComp / ); + +1; diff --git a/t/lib/DBICTest/ForeignComponent/TestComp.pm b/t/lib/DBICTest/ForeignComponent/TestComp.pm new file mode 100644 index 0000000..ce024ba --- /dev/null +++ b/t/lib/DBICTest/ForeignComponent/TestComp.pm @@ -0,0 +1,8 @@ +# belongs to t/05components.t +package DBICTest::ForeignComponent::TestComp; +use warnings; +use strict; + +sub foreign_test_method { 1 } + +1; diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index 2200cd4..d05526f 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::Artist; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::Artist->table('artist'); DBICTest::Schema::Artist->add_columns( 'artistid' => { diff --git a/t/lib/DBICTest/Schema/BasicRels.pm b/t/lib/DBICTest/Schema/BasicRels.pm index 7a6f520..ecb9cef 100644 --- a/t/lib/DBICTest/Schema/BasicRels.pm +++ b/t/lib/DBICTest/Schema/BasicRels.pm @@ -5,11 +5,12 @@ use base 'DBIx::Class::Core'; DBICTest::Schema::Artist->add_relationship( cds => 'DBICTest::Schema::CD', { 'foreign.artist' => 'self.artistid' }, - { order_by => 'year', join_type => 'LEFT', cascade_delete => 1 } + { order_by => 'year', join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' } ); DBICTest::Schema::Artist->add_relationship( twokeys => 'DBICTest::Schema::TwoKeys', - { 'foreign.artist' => 'self.artistid' } + { 'foreign.artist' => 'self.artistid' }, + { cascade_copy => 1 } ); DBICTest::Schema::Artist->add_relationship( onekeys => 'DBICTest::Schema::OneKey', @@ -37,7 +38,7 @@ DBICTest::Schema::CD->add_relationship( DBICTest::Schema::CD->add_relationship( tags => 'DBICTest::Schema::Tag', { 'foreign.cd' => 'self.cdid' }, - { join_type => 'LEFT', cascade_delete => 1 } + { join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' } ); #DBICTest::Schema::CD->might_have(liner_notes => 'DBICTest::Schema::LinerNotes' => qw/notes/); DBICTest::Schema::CD->add_relationship( diff --git a/t/lib/DBICTest/Schema/CD.pm b/t/lib/DBICTest/Schema/CD.pm index 8e04c16..4eaba4f 100644 --- a/t/lib/DBICTest/Schema/CD.pm +++ b/t/lib/DBICTest/Schema/CD.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::CD; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::CD->table('cd'); DBICTest::Schema::CD->add_columns( 'cdid' => { diff --git a/t/lib/DBICTest/Schema/HelperRels.pm b/t/lib/DBICTest/Schema/HelperRels.pm index ff47640..1fb8886 100644 --- a/t/lib/DBICTest/Schema/HelperRels.pm +++ b/t/lib/DBICTest/Schema/HelperRels.pm @@ -42,7 +42,8 @@ DBICTest::Schema::CD_to_Producer->belongs_to( ); DBICTest::Schema::Artist->has_many( 'artist_undirected_maps', 'DBICTest::Schema::ArtistUndirectedMap', - [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}] + [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}], + { cascade_copy => 0 } # this would *so* not make sense ); DBICTest::Schema::ArtistUndirectedMap->belongs_to( 'artist1', 'DBICTest::Schema::Artist', 'id1'); diff --git a/t/lib/DBICTest/Schema/OneKey.pm b/t/lib/DBICTest/Schema/OneKey.pm index 081c94b..19387eb 100644 --- a/t/lib/DBICTest/Schema/OneKey.pm +++ b/t/lib/DBICTest/Schema/OneKey.pm @@ -2,6 +2,8 @@ package DBICTest::Schema::OneKey; use base 'DBIx::Class::Core'; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::OneKey->table('onekey'); DBICTest::Schema::OneKey->add_columns( 'id' => { diff --git a/t/lib/DBICTest/Schema/Tag.pm b/t/lib/DBICTest/Schema/Tag.pm index 0a303d6..5b4eb20 100644 --- a/t/lib/DBICTest/Schema/Tag.pm +++ b/t/lib/DBICTest/Schema/Tag.pm @@ -2,10 +2,12 @@ package DBICTest::Schema::Tag; use base qw/DBIx::Class::Core/; +__PACKAGE__->load_components('PK::Auto'); + DBICTest::Schema::Tag->table('tags'); DBICTest::Schema::Tag->add_columns( 'tagid' => { - data_type => 'varchar', + data_type => 'integer', is_auto_increment => 1, }, 'cd' => { diff --git a/t/lib/DBICTest/Setup.pm b/t/lib/DBICTest/Setup.pm index 6b2e3f2..a7efea5 100755 --- a/t/lib/DBICTest/Setup.pm +++ b/t/lib/DBICTest/Setup.pm @@ -1,30 +1,26 @@ use strict; use warnings; -use DBICTest::Schema; +use DBICTest; -my $db_file = "t/var/DBIxClass.db"; - -unlink($db_file) if -e $db_file; -unlink($db_file . "-journal") if -e $db_file . "-journal"; -mkdir("t/var") unless -d "t/var"; - -my $dsn = "dbi:SQLite:${db_file}"; - -my $schema = DBICTest::Schema->compose_connection('DBICTest' => $dsn); +my $schema = DBICTest->initialise; $schema->storage->on_connect_do([ "PRAGMA synchronous = OFF" ]); my $dbh = $schema->storage->dbh; -open IN, "t/lib/sqlite.sql"; +if ($ENV{"DBICTEST_SQLT_DEPLOY"}) { + $schema->deploy; +} else { + open IN, "t/lib/sqlite.sql"; -my $sql; + my $sql; -{ local $/ = undef; $sql = ; } + { local $/ = undef; $sql = ; } -close IN; + close IN; -$dbh->do($_) for split(/\n\n/, $sql); + $dbh->do($_) for split(/\n\n/, $sql); +} $schema->storage->dbh->do("PRAGMA synchronous = OFF"); diff --git a/t/lib/sqlite.sql b/t/lib/sqlite.sql index 1ee7c21..391de14 100644 --- a/t/lib/sqlite.sql +++ b/t/lib/sqlite.sql @@ -1,6 +1,6 @@ -- -- Created by SQL::Translator::Producer::SQLite --- Created on Mon Feb 6 01:07:16 2006 +-- Created on Fri Feb 24 15:13:57 2006 -- BEGIN TRANSACTION; @@ -78,14 +78,6 @@ CREATE TABLE artist_undirected_map ( ); -- --- Table: producer --- -CREATE TABLE producer ( - producerid INTEGER PRIMARY KEY NOT NULL, - name varchar NOT NULL -); - --- -- Table: onekey -- CREATE TABLE onekey ( @@ -105,6 +97,14 @@ CREATE TABLE track ( ); -- +-- Table: producer +-- +CREATE TABLE producer ( + producerid INTEGER PRIMARY KEY NOT NULL, + name varchar NOT NULL +); + +-- -- Table: treelike -- CREATE TABLE treelike ( @@ -117,10 +117,9 @@ CREATE TABLE treelike ( -- Table: tags -- CREATE TABLE tags ( - tagid varchar NOT NULL, + tagid INTEGER PRIMARY KEY NOT NULL, cd integer NOT NULL, - tag varchar NOT NULL, - PRIMARY KEY (tagid) + tag varchar NOT NULL ); -- diff --git a/t/run/10auto.tl b/t/run/10auto.tl index 6e474a5..7c795f4 100644 --- a/t/run/10auto.tl +++ b/t/run/10auto.tl @@ -4,6 +4,7 @@ my $schema = shift; plan tests => 2; $schema->class("Artist")->load_components(qw/PK::Auto::SQLite/); + # Should just be PK::Auto but this ensures the compat shim works # add an artist without primary key to test Auto my $artist = $schema->resultset("Artist")->create( { name => 'Auto' } ); diff --git a/t/run/11mysql.tl b/t/run/11mysql.tl index 2411b96..234474f 100644 --- a/t/run/11mysql.tl +++ b/t/run/11mysql.tl @@ -20,7 +20,7 @@ $dbh->do("CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY #'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', ''); -MySQLTest::Artist->load_components('PK::Auto::MySQL'); +MySQLTest::Artist->load_components('PK::Auto'); # test primary key handling my $new = MySQLTest::Artist->create({ name => 'foo' }); diff --git a/t/run/12pg.tl b/t/run/12pg.tl index 22c4008..5ffef5c 100644 --- a/t/run/12pg.tl +++ b/t/run/12pg.tl @@ -16,7 +16,7 @@ my $dbh = PgTest->schema->storage->dbh; $dbh->do("CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(255), charfield CHAR(10));"); -PgTest::Artist->load_components('PK::Auto::Pg'); +PgTest::Artist->load_components('PK::Auto'); my $new = PgTest::Artist->create({ name => 'foo' }); diff --git a/t/run/13oracle.tl b/t/run/13oracle.tl index 0bd3060..278e663 100644 --- a/t/run/13oracle.tl +++ b/t/run/13oracle.tl @@ -38,7 +38,7 @@ $dbh->do(qq{ END; }); -OraTest::Artist->load_components('PK::Auto::Oracle'); +OraTest::Artist->load_components('PK::Auto'); OraTest::CD->load_components('PK::Auto::Oracle'); OraTest::Track->load_components('PK::Auto::Oracle'); diff --git a/t/run/145db2.tl b/t/run/145db2.tl new file mode 100644 index 0000000..4c860bf --- /dev/null +++ b/t/run/145db2.tl @@ -0,0 +1,73 @@ +sub run_tests { +my $schema = shift; + +my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_${_}" } qw/DSN USER PASS/}; + +#warn "$dsn $user $pass"; + +plan skip_all, 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test' + unless ($dsn && $user); + +plan tests => 5; + +DBICTest::Schema->compose_connection('DB2Test' => $dsn, $user, $pass); + +my $dbh = DB2Test->schema->storage->dbh; + +$dbh->do("DROP TABLE artist;"); + +$dbh->do("CREATE TABLE artist (artistid INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), name VARCHAR(255), charfield CHAR(10));"); + +#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', ''); + +DB2Test::Artist->load_components('PK::Auto'); + +# test primary key handling +my $new = DB2Test::Artist->create({ name => 'foo' }); +ok($new->artistid, "Auto-PK worked"); + +# test LIMIT support +for (1..6) { + DB2Test::Artist->create({ name => 'Artist ' . $_ }); +} +my $it = DB2Test::Artist->search( {}, + { rows => 3, + order_by => 'artistid' + } +); +is( $it->count, 3, "LIMIT count ok" ); +is( $it->next->name, "Artist 2", "iterator->next ok" ); +$it->next; +$it->next; +is( $it->next, undef, "next past end of resultset ok" ); + +my $test_type_info = { + 'artistid' => { + 'data_type' => 'INTEGER', + 'is_nullable' => 0, + 'size' => 11 + }, + 'name' => { + 'data_type' => 'VARCHAR', + 'is_nullable' => 1, + 'size' => 255 + }, + 'charfield' => { + 'data_type' => 'VARCHAR', + 'is_nullable' => 1, + 'size' => 10 + }, +}; + + +my $type_info = DB2Test->schema->storage->columns_info_for('artist'); +is_deeply($type_info, $test_type_info, 'columns_info_for - column data types'); + + + +# clean up our mess +$dbh->do("DROP TABLE artist"); + +} + +1; diff --git a/t/run/16joins.tl b/t/run/16joins.tl index 8c8378b..091cf74 100644 --- a/t/run/16joins.tl +++ b/t/run/16joins.tl @@ -7,7 +7,7 @@ BEGIN { eval "use DBD::SQLite"; plan $@ ? ( skip_all => 'needs DBD::SQLite for testing' ) - : ( tests => 41 ); + : ( tests => 42 ); } # figure out if we've got a version of sqlite that is older than 3.2.6, in @@ -148,7 +148,7 @@ is($selects, 1, 'prefetch ran only 1 select statement'); my $cd = $schema->resultset('CD')->find(1, { cols => [qw/title artist.name/], - join => 'artist' + join => { 'artist' => {} } } ); ok(eval { $cd->artist->name eq 'Caterwauler McCrae' }, 'single related column prefetched'); @@ -253,7 +253,16 @@ SKIP: { cmp_ok( $rs->count, '==', 3, "count() ok after group_by on related column" ); } -cmp_ok( scalar $rs->all, '==', 3, "all() returns same count as count() after group_by on related column" ); +$rs = $schema->resultset("Artist")->search( + {}, + { join => [qw/ cds /], group_by => [qw/ me.name /], having =>{ 'MAX(cds.cdid)'=> \'< 5' } } +); + +cmp_ok( $rs->all, '==', 2, "results ok after group_by on related column with a having" ); + +$rs = $rs->search( undef, { having =>{ 'count(*)'=> \'> 2' }}); + +cmp_ok( $rs->all, '==', 1, "count() ok after group_by on related column with a having" ); $rs = $schema->resultset("Artist")->search( { 'cds.title' => 'Spoonful of bees', diff --git a/t/run/22cascade_copy.tl b/t/run/22cascade_copy.tl new file mode 100644 index 0000000..8c682e5 --- /dev/null +++ b/t/run/22cascade_copy.tl @@ -0,0 +1,23 @@ +use strict; +use warnings; + +sub run_tests { +my $schema = shift; + +plan tests => 4; +my $artist = $schema->resultset('Artist')->find(1); +my $artist_cds = $artist->search_related('cds'); +my $cover_band = $artist->copy; +my $cover_cds = $cover_band->search_related('cds'); +cmp_ok($cover_band->id, '!=', $artist->id, 'ok got new column id...'); +is($cover_cds->count, $artist_cds->count, 'duplicated rows count ok'); + +#check multi-keyed +cmp_ok($cover_band->search_related('twokeys')->count, '>', 0, 'duplicated multiPK ok'); + +#and check copying a few relations away +cmp_ok($cover_cds->search_related('tags')->count, '==', + $artist_cds->search_related('tags')->count , 'duplicated count ok'); + +} +1; diff --git a/t/run/23cache.tl b/t/run/23cache.tl new file mode 100644 index 0000000..a8cfffe --- /dev/null +++ b/t/run/23cache.tl @@ -0,0 +1,142 @@ +sub run_tests { +my $schema = shift; + +eval "use DBD::SQLite"; +plan skip_all => 'needs DBD::SQLite for testing' if $@; +plan tests => 12; + +warn " +This test WILL fail. That's because the has_many prefetch code is +only half re-written. However, it was utterly borken before, so +this is arguably an improvement. If you fancy having a go at making +_construct_object in resultset collapse multiple results into +appropriate nested structures for inflate_result, be my guest. + -- mst + +"; + +my $rs = $schema->resultset("Artist")->search( + { artistid => 1 } +); + +my $artist = $rs->first; + +is( scalar @{ $rs->get_cache }, 0, 'cache is not populated without cache attribute' ); + +$rs = $schema->resultset("Artist")->search( + { 'artistid' => 1 }, + { + join => [ qw/ cds /], + prefetch => [qw/ cds /], + } +); + +use Data::Dumper; $Data::Dumper::Deparse = 1; + +# start test for prefetch SELECT count +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +$artist = $rs->first; +$rs->reset(); + +# make sure artist contains a related resultset for cds +is( ref $artist->{related_resultsets}->{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' ); + +# check if $artist->cds->get_cache is populated +is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records'); + +# ensure that $artist->cds returns correct number of objects +is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' ); + +# ensure that $artist->cds->count returns correct value +is( $artist->cds->count, 3, 'artist->cds->count returns correct value' ); + +# ensure that $artist->count_related('cds') returns correct value +is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' ); + +# count the SELECTs +DBI->trace(0, undef); +my $selects = 0; +my $trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; +is($selects, 2, 'only one SQL statement for each cached table'); + +# make sure related_resultset is deleted after object is updated +$artist->set_column('name', 'New Name'); +$artist->update(); + +is( scalar keys %{$artist->{related_resultsets}}, 0, 'related resultsets deleted after update' ); + +# todo: make sure caching works with nested prefetch e.g. $artist->cds->tracks +$rs = $schema->resultset("Artist")->search( + { artistid => 1 }, + { + join => { cds => 'tags' }, + prefetch => { + cds => 'tags' + }, + } +); + +# SELECT count for nested has_many prefetch +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +$artist = $rs->first; + +# count the SELECTs +DBI->trace(0, undef); +$selects = 0; +$trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; +is($selects, 3, 'one SQL statement for each cached table with nested prefetch'); + +my @objs; +$artist = $rs->find(1); + +unlink 't/var/dbic.trace' if -e 't/var/dbic.trace'; +DBI->trace(1, 't/var/dbic.trace'); + +my $cds = $artist->cds; +my $tags = $cds->next->tags; +while( my $tag = $tags->next ) { + push @objs, $tag->tagid; #warn "tag:", $tag->ID; +} + +is_deeply( \@objs, [ 1 ], 'first cd has correct tags' ); + +$tags = $cds->next->tags; +@objs = (); +while( my $tag = $tags->next ) { + push @objs, $tag->id; #warn "tag: ", $tag->ID; +} + +is_deeply( \@objs, [ 2, 5, 8 ], 'second cd has correct tags' ); + +# count the SELECTs +DBI->trace(0, undef); +$selects = 0; +$trace = IO::File->new('t/var/dbic.trace', '<') + or die "Unable to read trace file"; +while (<$trace>) { + $selects++ if /SELECT/; +} +$trace->close; +unlink 't/var/dbic.trace'; + +is( $selects, 0, 'no additional SQL statements while checking nested data' ); + +} + +1; diff --git a/t/run/24serialize.tl b/t/run/24serialize.tl new file mode 100644 index 0000000..7c746f2 --- /dev/null +++ b/t/run/24serialize.tl @@ -0,0 +1,14 @@ +use Storable; + +sub run_tests { +my $schema = shift; + +plan tests => 1; + +my $artist = $schema->resultset('Artist')->find(1); +my $copy = eval { Storable::dclone($artist) }; +is_deeply($copy, $artist, 'serialize row object works'); + +} + +1; diff --git a/t/run/25utf8.tl b/t/run/25utf8.tl new file mode 100644 index 0000000..278dde4 --- /dev/null +++ b/t/run/25utf8.tl @@ -0,0 +1,23 @@ +sub run_tests { + my $schema = shift; + + eval 'use Encode ; 1' + or plan skip_all, 'Install Encode run this test'; + + plan tests => 2; + + DBICTest::Schema::Artist->load_components('UTF8Columns'); + DBICTest::Schema::Artist->utf8_columns('name'); + Class::C3->reinitialize(); + + my $artist = $schema->resultset("Artist")->create( { name => 'uni' } ); + ok( Encode::is_utf8( $artist->name ), 'got name with utf8 flag' ); + + my $utf8_char = 'uniuni'; + Encode::_utf8_on($utf8_char); + $artist->name($utf8_char); + ok( !Encode::is_utf8( $artist->{_column_data}->{name} ), + 'store utf8 less chars' ); +} + +1;