Revision history for DBIx::Class
+ - $schema->deploy
+ - HAVING support
+ - prefetch for has_many
+ - PK::Auto::* no longer required since Storage::DBI::* handle auto-inc
+ - minor tweak to tests for join edge case
+ - added cascade_copy relationship attribute
+ (sponsored by Airspace Software, http://www.airspace.co.uk/)
+ - clean up set_from_related
+ - made copy() automatically null out auto-inc columns
+
0.05007 2006-02-24 00:59:00
- tweak to Componentised for Class::C3 0.11
- fixes for auto-inc under MSSQL
keys of the related table are not fetched
- fix count for group_by as scalar
- add horrific fix to make Oracle's retarded limit syntax work
- - remove Carp require
+ - changed UUIDColumns to use new UUIDMaker classes for uuid creation
+ using whatever module may be available
0.05003 2006-02-08 17:50:20
- add component_class accessors and use them for *_class
- small fixes to Serialize and ResultSetManager
- - prevent accidental table-wide update/delete on row-object
- from PK-less table
- rollback on disconnect, and disconnect on DESTROY
- - fixes to deep search and search_relateduser
0.05002 2006-02-06 12:12:03
- Added recommends for Class::Inspector
0.03004
- Added an || '' to the CDBICompat stringify to avoid null warnings
- Updated name section for manual pods
-
0.03003 2005-11-03 17:00:00
- POD fixes.
- Changed use to require in Relationship/Base to avoid import.
Scotty Allen <scotty@scottyallen.com>
- Justin Guenther <guentherj@agr.gc.ca>
+ Justin Guenther <jguenther@gmail.com>
LICENSE
You may distribute this code under the same terms as Perl itself.
Brandon Black
+Christopher H. Laco
+
Scotty Allen <scotty@scottyallen.com>
sc_
-Justin Guenther <guentherj@agr.gc.ca>
-
=head1 LICENSE
You may distribute this code under the same terms as Perl itself.
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/
+ Serialize::Storable
InflateColumn
Relationship
PK
sub storage { shift->schema_instance(@_)->storage; }
sub resultset_instance {
- my $class = shift;
+ my $class = ref $_[0] || $_[0];
my $source = $class->result_source_instance;
if ($source->result_class ne $class) {
$source = $source->new($source);
--- /dev/null
+=head1 NAME
+
+DBIx::Class::Manual::Example - Simple CD database example
+
+=head1 DESCRIPTION
+
+This tutorial will guide you through the proeccess of setting up and testing a very basic CD database using Mysql, with DBIx::Class::Schema as the database frontend.
+
+The database consists of the following:
+
+ table 'artist' with columns: artistid, name
+ table 'cd' with columns: cdid, artist, title
+ table 'track' with columns: trackid, cd, title
+
+
+And these rules exists:
+
+ one artist can have many cds
+ one cd belongs to one artist
+ one cd can have many tracks
+ one track belongs to one cd
+
+
+=head2 Installation
+
+=head3 Create the database/tables and populate them with a few records
+
+ CREATE DATABASE cdtestdb ;
+ USE cdtestdb;
+
+ CREATE TABLE artist (
+ artistid INT NOT NULL AUTO_INCREMENT ,
+ name CHAR( 40 ) NOT NULL ,
+ PRIMARY KEY ( artistid )
+ );
+
+ CREATE TABLE cd (
+ cdid INT NOT NULL AUTO_INCREMENT ,
+ artist INT NOT NULL ,
+ title CHAR( 40 ) NOT NULL ,
+ PRIMARY KEY ( cdid )
+ );
+
+ CREATE TABLE track (
+ trackid INT NOT NULL AUTO_INCREMENT ,
+ cd INT NOT NULL ,
+ title CHAR( 40 ) NOT NULL ,
+ PRIMARY KEY ( trackid )
+ ;
+
+
+ INSERT INTO artist VALUES
+ (NULL,'Michael Jackson'),
+ (NULL,'Eminem');
+
+ INSERT INTO cd VALUES
+ (NULL,'1','Thriller'),
+ (NULL,'1','Bad'),
+ (NULL,'2','The Marshall Mathers LP');
+
+ INSERT INTO track VALUES
+ (NULL,'1','Beat it'),
+ (NULL,'1','Billie Jean'),
+ (NULL,'2','Dirty Diana'),
+ (NULL,'2','Smooth Criminal'),
+ (NULL,'2','Leave Me Alone'),
+ (NULL,'3','Stan'),
+ (NULL,'3','The Way I Am');
+
+
+
+=head3 Set up DBIx::Class::Schema
+
+First, create some dirs and change working directory:
+
+ mkdir app
+ mkdir app/DB
+ mkdir app/DB/Main
+ cd app
+
+
+Then, create the following DBIx::Class::Schema classes:
+
+DB/Main.pm:
+
+ package DB::Main;
+ use base qw/DBIx::Class::Schema/;
+ __PACKAGE__->load_classes(qw/Artist CD Track/);
+
+ 1;
+
+
+DB/Main/Artist.pm:
+
+ package DB::Main::Artist;
+ use base qw/DBIx::Class/;
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('artist');
+ __PACKAGE__->add_columns(qw/ artistid name /);
+ __PACKAGE__->set_primary_key('artistid');
+ __PACKAGE__->has_many('cds' => 'DB::Main::CD');
+
+ 1;
+
+
+DB/Main/CD.pm:
+
+ package DB::Main::CD;
+ use base qw/DBIx::Class/;
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('cd');
+ __PACKAGE__->add_columns(qw/ cdid artist title/);
+ __PACKAGE__->set_primary_key('cdid');
+ __PACKAGE__->belongs_to('artist' => 'DB::Main::Artist');
+ __PACKAGE__->has_many('tracks' => 'DB::Main::Track');
+
+ 1;
+
+
+DB/Main/Track.pm:
+
+ package DB::Main::Track;
+ use base qw/DBIx::Class/;
+ __PACKAGE__->load_components(qw/Core/);
+ __PACKAGE__->table('track');
+ __PACKAGE__->add_columns(qw/ trackid cd title/);
+ __PACKAGE__->set_primary_key('trackid');
+ __PACKAGE__->belongs_to('cd' => 'DB::Main::CD');
+
+ 1;
+
+
+=head3 Create and run the test script
+
+testdb.pl:
+
+ #!/usr/bin/perl -w
+
+ use DB::Main;
+ use strict;
+
+ my $schema = DB::Main->connect('dbi:mysql:cdtestdb', 'testuser', 'testpass');
+
+ get_tracks_by_cd('Bad');
+ get_tracks_by_artist('Michael Jackson');
+
+ get_cd_by_track('Stan');
+ get_cds_by_artist('Michael Jackson');
+
+ get_artist_by_track('Dirty Diana');
+ get_artist_by_cd('The Marshall Mathers LP');
+
+
+ sub get_tracks_by_cd {
+ my $cdtitle = shift;
+ print "get_tracks_by_cd($cdtitle):\n";
+ my $rs = $schema->resultset('Track')->search(
+ {
+ 'cd.title' => $cdtitle
+ },
+ {
+ join => [qw/ cd /],
+ prefetch => [qw/ cd /]
+ }
+ );
+ while (my $track = $rs->next) {
+ print $track->title . "\n";
+ }
+ print "\n";
+ }
+
+ sub get_tracks_by_artist {
+ my $artistname = shift;
+ print "get_tracks_by_artist($artistname):\n";
+ my $rs = $schema->resultset('Track')->search(
+ {
+ 'artist.name' => $artistname
+ },
+ {
+ join => {
+ 'cd' => 'artist'
+ },
+ }
+ );
+ while (my $track = $rs->next) {
+ print $track->title . "\n";
+ }
+ print "\n";
+ }
+
+
+
+ sub get_cd_by_track {
+ my $tracktitle = shift;
+ print "get_cd_by_track($tracktitle):\n";
+ my $rs = $schema->resultset('CD')->search(
+ {
+ 'tracks.title' => $tracktitle
+ },
+ {
+ join => [qw/ tracks /],
+ }
+ );
+ my $cd = $rs->first;
+ print $cd->title . "\n\n";
+ }
+
+ sub get_cds_by_artist {
+ my $artistname = shift;
+ print "get_cds_by_artist($artistname):\n";
+ my $rs = $schema->resultset('CD')->search(
+ {
+ 'artist.name' => $artistname
+ },
+ {
+ join => [qw/ artist /],
+ prefetch => [qw/ artist /]
+ }
+ );
+ while (my $cd = $rs->next) {
+ print $cd->title . "\n";
+ }
+ print "\n";
+ }
+
+
+
+ sub get_artist_by_track {
+ my $tracktitle = shift;
+ print "get_artist_by_track($tracktitle):\n";
+ my $rs = $schema->resultset('Artist')->search(
+ {
+ 'tracks.title' => $tracktitle
+ },
+ {
+ join => {
+ 'cds' => 'tracks'
+ }
+ }
+ );
+ my $artist = $rs->first;
+ print $artist->name . "\n\n";
+ }
+
+ sub get_artist_by_cd {
+ my $cdtitle = shift;
+ print "get_artist_by_cd($cdtitle):\n";
+ my $rs = $schema->resultset('Artist')->search(
+ {
+ 'cds.title' => $cdtitle
+ },
+ {
+ join => [qw/ cds /],
+ }
+ );
+ my $artist = $rs->first;
+ print $artist->name . "\n\n";
+ }
+
+
+
+It should output:
+
+ get_tracks_by_cd(Bad):
+ Dirty Diana
+ Smooth Criminal
+ Leave Me Alone
+
+ get_tracks_by_artist(Michael Jackson):
+ Beat it
+ Billie Jean
+ Dirty Diana
+ Smooth Criminal
+ Leave Me Alone
+
+ get_cd_by_track(Stan):
+ The Marshall Mathers LP
+
+ get_cds_by_artist(Michael Jackson):
+ Thriller
+ Bad
+
+ get_artist_by_track(Dirty Diana):
+ Michael Jackson
+
+ get_artist_by_cd(The Marshall Mathers LP):
+ Eminem
+
+=head1 AUTHOR
+
+ sc_
+
+=cut
my ($self, @rest) = @_;
my $ret = $self->next::method(@rest);
- # if all primaries are already populated, skip auto-inc
- my $populated = 0;
- map { $populated++ if defined $self->get_column($_) } $self->primary_columns;
- return $ret if ( $populated == scalar $self->primary_columns );
-
- my ($pri, $too_many) =
- (grep { $self->column_info($_)->{'auto_increment'} }
- $self->primary_columns)
- || $self->primary_columns;
+ my ($pri, $too_many) = grep { !defined $self->get_column($_) } $self->primary_columns;
+ return $ret unless defined $pri; # if all primaries are already populated, skip auto-inc
$self->throw_exception( "More than one possible key found for auto-inc on ".ref $self )
- if $too_many;
- unless (defined $self->get_column($pri)) {
- $self->throw_exception( "Can't auto-inc for $pri on ".ref $self.": no _last_insert_id method" )
- unless $self->can('last_insert_id');
- my $id = $self->last_insert_id;
- $self->throw_exception( "Can't get last insert id" ) unless $id;
- $self->store_column($pri => $id);
- }
+ if defined $too_many;
+
+ my $storage = $self->result_source->storage;
+ $self->throw_exception( "Missing primary key but Storage doesn't support last_insert_id" ) unless $storage->can('last_insert_id');
+ my $id = $storage->last_insert_id($self->result_source,$pri);
+ $self->throw_exception( "Can't get last insert id" ) unless $id;
+ $self->store_column($pri => $id);
+
return $ret;
}
=cut
-__PACKAGE__->mk_classdata('sequence');
+sub sequence {
+ my ($self,$seq) = @_;
+ foreach my $pri ($self->primary_columns) {
+ $self->column_info($pri)->{sequence} = $seq;
+ }
+}
1;
__PACKAGE__->load_components(qw/PK::Auto/);
-sub last_insert_id
-{
- my ($self) = @_;
-
- my $dbh = $self->result_source->storage->dbh;
- my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3);
- $sth->execute();
-
- my @res = $sth->fetchrow_array();
-
- return @res ? $res[0] : undef;
-
-}
-
1;
=head1 NAME
-DBIx::Class::PK::Auto::DB2 - Automatic primary key class for DB2
+DBIx::Class::PK::Auto::DB2 - (DEPRECATED) Automatic primary key class for DB2
=head1 SYNOPSIS
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto::DB2 Core/);
- __PACKAGE__->set_primary_key('id');
-
-=head1 DESCRIPTION
-
-This class implements autoincrements for DB2.
+Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 AUTHORS
-Jess Robinson
+Matt S Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
-package DBIx::Class::PK::Auto::MSSQL;\r
-\r
-use strict;\r
-use warnings;\r
-\r
-use base qw/DBIx::Class/;\r
-\r
-__PACKAGE__->load_components(qw/PK::Auto/);\r
-\r
-sub last_insert_id {\r
- my( $id ) = $_[0]->result_source->storage->dbh->selectrow_array(\r
- 'SELECT @@IDENTITY' );\r
- return $id;\r
-}\r
-\r
-1;\r
-\r
-=head1 NAME \r
-\r
-DBIx::Class::PK::Auto::MSSQL - Automatic primary key class for MSSQL\r
-\r
-=head1 SYNOPSIS\r
-\r
- # In your table classes\r
- __PACKAGE__->load_components(qw/PK::Auto::MSSQL Core/);\r
- __PACKAGE__->set_primary_key('id');\r
-\r
-=head1 DESCRIPTION\r
-\r
-This class implements autoincrements for MSSQL.\r
-\r
-=head1 AUTHORS\r
-\r
-Brian Cassidy <bricas@cpan.org>\r
-\r
-=head1 LICENSE\r
-\r
-You may distribute this code under the same terms as Perl itself.\r
-\r
-=cut\r
+package DBIx::Class::PK::Auto::MSSQL;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class/;
+
+__PACKAGE__->load_components(qw/PK::Auto/);
+
+1;
+
+=head1 NAME
+
+DBIx::Class::PK::Auto::MSSQL - (DEPRECATED) Automatic primary key class for MSSQL
+
+=head1 SYNOPSIS
+
+Just load PK::Auto instead; auto-inc is now handled by Storage.
+
+=head1 AUTHORS
+
+Matt S Trout <mst@shadowcatsystems.co.uk>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
__PACKAGE__->load_components(qw/PK::Auto/);
-sub last_insert_id {
- return $_[0]->result_source->storage->dbh->{mysql_insertid};
-}
-
1;
=head1 NAME
-DBIx::Class::PK::Auto::MySQL - Automatic primary key class for MySQL
+DBIx::Class::PK::Auto::MySQL - (DEPRECATED) Automatic primary key class for MySQL
=head1 SYNOPSIS
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto::MySQL Core/);
- __PACKAGE__->set_primary_key('id');
-
-=head1 DESCRIPTION
-
-This class implements autoincrements for MySQL.
+Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 AUTHORS
-Matt S. Trout <mst@shadowcatsystems.co.uk>
+Matt S Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
use strict;
use warnings;
-use Carp qw/croak/;
-
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/PK::Auto/);
-sub last_insert_id {
- my $self = shift;
- $self->get_autoinc_seq unless $self->{_autoinc_seq};
- my $sql = "SELECT " . $self->{_autoinc_seq} . ".currval FROM DUAL";
- my ($id) = $self->result_source->storage->dbh->selectrow_array($sql);
- return $id;
-}
-
-sub get_autoinc_seq {
- my $self = shift;
-
- # return the user-defined sequence if known
- if ($self->sequence) {
- return $self->{_autoinc_seq} = $self->sequence;
- }
-
- # look up the correct sequence automatically
- my $dbh = $self->result_source->storage->dbh;
- my $sql = qq{
- SELECT trigger_body FROM ALL_TRIGGERS t
- WHERE t.table_name = ?
- AND t.triggering_event = 'INSERT'
- AND t.status = 'ENABLED'
- };
- # trigger_body is a LONG
- $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024);
- my $sth = $dbh->prepare($sql);
- $sth->execute( uc($self->result_source->name) );
- while (my ($insert_trigger) = $sth->fetchrow_array) {
- if ($insert_trigger =~ m!(\w+)\.nextval!i ) {
- $self->{_autoinc_seq} = uc($1);
- }
- }
- unless ($self->{_autoinc_seq}) {
- croak "Unable to find a sequence INSERT trigger on table '" . $self->_table_name . "'.";
- }
-}
-
1;
=head1 NAME
-DBIx::Class::PK::Auto::Oracle - Automatic primary key class for Oracle
+DBIx::Class::PK::Auto::Oracle - (DEPRECATED) Automatic primary key class for Oracle
=head1 SYNOPSIS
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto::Oracle Core/);
- __PACKAGE__->set_primary_key('id');
-
-=head1 DESCRIPTION
-
-This class implements autoincrements for Oracle.
+Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 AUTHORS
-Andy Grundman <andy@hybridized.org>
-
-Scott Connelly <scottsweep@yahoo.com>
+Matt S Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
__PACKAGE__->load_components(qw/PK::Auto/);
-sub last_insert_id {
- my $self = shift;
- $self->get_autoinc_seq unless $self->{_autoinc_seq};
- $self->result_source->storage->dbh->last_insert_id(undef,undef,undef,undef,
- {sequence=>$self->{_autoinc_seq}});
-}
-
-sub get_autoinc_seq {
- my $self = shift;
-
- # return the user-defined sequence if known
- if ($self->sequence) {
- return $self->{_autoinc_seq} = $self->sequence;
- }
-
- my @pri = $self->primary_columns;
- my $dbh = $self->result_source->storage->dbh;
- my ($schema,$table) = $self->table =~ /^(.+)\.(.+)$/ ? ($1,$2) : (undef,$self->table);
- while (my $col = shift @pri) {
- my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref;
- if (defined $info->[12] and $info->[12] =~
- /^nextval\('([^']+)'::(?:text|regclass)\)/)
- {
- $self->{_autoinc_seq} = $1;
- #$self->{_autoinc_seq} =~ s/"//g;
- last;
- }
- }
-}
-
1;
=head1 NAME
-DBIx::Class::PK::Auto::Pg - Automatic primary key class for PostgreSQL
+DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg
=head1 SYNOPSIS
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto::Pg Core/);
- __PACKAGE__->set_primary_key('id');
-
-=head1 DESCRIPTION
-
-This class implements autoincrements for PostgreSQL.
+Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 AUTHORS
-Marcus Ramberg <m.ramberg@cpan.org>
+Matt S Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
__PACKAGE__->load_components(qw/PK::Auto/);
-sub last_insert_id {
- return $_[0]->result_source->storage->dbh->func('last_insert_rowid');
-}
-
1;
=head1 NAME
-DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite
+DBIx::Class::PK::Auto::SQLite - (DEPRECATED) Automatic primary key class for SQLite
=head1 SYNOPSIS
- # In your table classes
- __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/);
- __PACKAGE__->set_primary_key('id');
-
-=head1 DESCRIPTION
-
-This class implements autoincrements for SQLite.
+Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 AUTHORS
-Matt S. Trout <mst@shadowcatsystems.co.uk>
+Matt S Trout <mst@shadowcatsystems.co.uk>
=head1 LICENSE
=head2 many_to_many
- __PACKAGE__->many_to_many( 'accessorname' => 'a_to_b', 'table_b' );
- my @f_objs = $obj_a->accessorname;
+ __PACKAGE__->many_to_many( 'accessorname' => 'a_to_b', 'table_b' );
+ my @f_objs = $obj_a->accessorname;
+
+Creates an accessor bridging two relationships; not strictly a relationship
+in its own right, although the accessor will return a resultset or collection
+of objects just as a has_many would.
=cut
=cut
sub search_related {
- my $self = shift;
- die "Can't call *_related as class methods" unless ref $self;
- my $rel = shift;
- my $attrs = { };
- if (@_ > 1 && ref $_[$#_] eq 'HASH') {
- $attrs = { %{ pop(@_) } };
- }
- my $rel_obj = $self->relationship_info($rel);
- $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj;
- $attrs = { %{$rel_obj->{attrs} || {}}, %{$attrs || {}} };
-
- $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1));
- my $query = ((@_ > 1) ? {@_} : shift);
-
- my ($cond) = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self);
- if (ref $cond eq 'ARRAY') {
- $cond = [ map { my %hash;
- foreach my $key (keys %{$_}) {
- unless ($key =~ m/\./) {
- $hash{"me.$key"} = $_->{$key};
- } else {
- $hash{$key} = $_->{$key};
- }
- }; \%hash; } @$cond ];
- } else {
- foreach my $key (keys %$cond) {
- unless ($key =~ m/\./) {
- $cond->{"me.$key"} = delete $cond->{$key};
- }
- }
- }
- $query = ($query ? { '-and' => [ $cond, $query ] } : $cond);
- #use Data::Dumper; warn Dumper($cond);
- #warn $rel_obj->{class}." $meth $cond ".join(', ', @{$attrs->{bind}||[]});
- return $self->result_source->related_source($rel
- )->resultset->search($query, $attrs);
+ return shift->related_resultset(shift)->search(@_);
}
=head2 count_related
sub create_related {
my $self = shift;
my $rel = shift;
- return $self->search_related($rel)->create(@_);
+ my $obj = $self->search_related($rel)->create(@_);
+ delete $self->{related_resultsets}->{$rel};
+ return $obj;
}
=head2 new_related
my $f_class = $self->result_source->schema->class($rel_obj->{class});
$self->throw_exception( "Object $f_obj isn't a ".$f_class )
unless $f_obj->isa($f_class);
- foreach my $key (keys %$cond) {
- next if ref $cond->{$key}; # Skip literals and complex conditions
- $self->throw_exception("set_from_related can't handle $key as key")
- unless $key =~ m/^foreign\.([^\.]+)$/;
- my $val = $f_obj->get_column($1);
- $self->throw_exception("set_from_related can't handle ".$cond->{$key}." as value")
- unless $cond->{$key} =~ m/^self\.([^\.]+)$/;
- $self->set_column($1 => $val);
- }
+ $self->set_columns(
+ $self->result_source->resolve_condition(
+ $rel_obj->{cond}, $f_obj, $rel));
return 1;
}
sub delete_related {
my $self = shift;
- return $self->search_related(@_)->delete;
+ my $obj = $self->search_related(@_)->delete;
+ delete $self->{related_resultsets}->{$_[0]};
+ return $obj;
}
1;
+=head2 related_resultset($name)
+
+Returns a L<DBIx::Class::ResultSet> for the relationship named $name.
+
+ $rs = $obj->related_resultset('related_table');
+
+=cut
+
+sub related_resultset {
+ my $self = shift;
+ $self->throw_exception("Can't call *_related as class methods") unless ref $self;
+ my $rel = shift;
+ my $rel_obj = $self->relationship_info($rel);
+ $self->throw_exception( "No such relationship ${rel}" ) unless $rel_obj;
+
+ return $self->{related_resultsets}{$rel} ||= do {
+ my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {});
+ $attrs = { %{$rel_obj->{attrs} || {}}, %$attrs };
+
+ $self->throw_exception( "Invalid query: @_" ) if (@_ > 1 && (@_ % 2 == 1));
+ my $query = ((@_ > 1) ? {@_} : shift);
+
+ my $cond = $self->result_source->resolve_condition($rel_obj->{cond}, $rel, $self);
+ if (ref $cond eq 'ARRAY') {
+ $cond = [ map { my $hash;
+ foreach my $key (keys %$_) {
+ my $newkey = $key =~ /\./ ? "me.$key" : $key;
+ $hash->{$newkey} = $_->{$key};
+ }; $hash } @$cond ];
+ } else {
+ foreach my $key (grep { ! /\./ } keys %$cond) {
+ $cond->{"me.$key"} = delete $cond->{$key};
+ }
+ }
+ $query = ($query ? { '-and' => [ $cond, $query ] } : $cond);
+ $self->result_source->related_source($rel)->resultset->search($query, $attrs);
+ };
+}
+
=head1 AUTHORS
Matt S. Trout <mst@shadowcatsystems.co.uk>
{ accessor => 'multi',
join_type => 'LEFT',
cascade_delete => 1,
+ cascade_copy => 1,
%{$attrs||{}} } );
}
sub search {
my $self = shift;
- #use Data::Dumper;warn Dumper(@_);
-
- my $attrs = { %{$self->{attrs}} };
- if (@_ > 1 && ref $_[$#_] eq 'HASH') {
- $attrs = { %$attrs, %{ pop(@_) } };
- }
+ my $rs;
+ if( @_ ) {
+
+ my $attrs = { %{$self->{attrs}} };
+ my $having = delete $attrs->{having};
+ if (@_ > 1 && ref $_[$#_] eq 'HASH') {
+ $attrs = { %$attrs, %{ pop(@_) } };
+ }
- my $where = (@_ ? ((@_ == 1 || ref $_[0] eq "HASH") ? shift : {@_}) : undef());
- if (defined $where) {
- $where = (defined $attrs->{where}
+ my $where = (@_
+ ? ((@_ == 1 || ref $_[0] eq "HASH")
+ ? shift
+ : ((@_ % 2)
+ ? $self->throw_exception(
+ "Odd number of arguments to search")
+ : {@_}))
+ : undef());
+ if (defined $where) {
+ $where = (defined $attrs->{where}
? { '-and' =>
[ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
$where, $attrs->{where} ] }
: $where);
- $attrs->{where} = $where;
- }
+ $attrs->{where} = $where;
+ }
- my $rs = (ref $self)->new($self->result_source, $attrs);
+ if (defined $having) {
+ $having = (defined $attrs->{having}
+ ? { '-and' =>
+ [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
+ $having, $attrs->{having} ] }
+ : $having);
+ $attrs->{having} = $having;
+ }
+ $rs = (ref $self)->new($self->result_source, $attrs);
+ }
+ else {
+ $rs = $self;
+ $rs->reset();
+ }
return (wantarray ? $rs->all : $rs);
}
$query->{$self->{attrs}{alias}.'.'.$_} = delete $query->{$_};
}
#warn Dumper($query);
- return $self->search($query,$attrs)->next;
+ return (keys %$attrs
+ ? $self->search($query,$attrs)->single
+ : $self->single($query));
}
=head2 search_related
=cut
sub search_related {
- my ($self, $rel, @rest) = @_;
- my $rel_obj = $self->result_source->relationship_info($rel);
- $self->throw_exception(
- "No such relationship ${rel} in search_related")
- unless $rel_obj;
- my $rs = $self->search(undef, { join => $rel });
- my $alias = ($rs->{attrs}{seen_join}{$rel} > 1
- ? join('_', $rel, $rs->{attrs}{seen_join}{$rel})
- : $rel);
- return $self->result_source->schema->resultset($rel_obj->{class}
- )->search( undef,
- { %{$rs->{attrs}},
- alias => $alias,
- select => undef(),
- as => undef() }
- )->search(@rest);
+ return shift->related_resultset(shift)->search(@_);
}
=head2 cursor
$attrs->{where},$attrs);
}
+=head2 single
+
+Inflates the first result without creating a cursor
+
+=cut
+
+sub single {
+ my ($self, $extra) = @_;
+ my ($attrs) = $self->{attrs};
+ $attrs = { %$attrs };
+ if ($extra) {
+ if (defined $attrs->{where}) {
+ $attrs->{where} = {
+ '-and'
+ => [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
+ delete $attrs->{where}, $extra ]
+ };
+ } else {
+ $attrs->{where} = $extra;
+ }
+ }
+ my @data = $self->result_source->storage->select_single(
+ $self->{from}, $attrs->{select},
+ $attrs->{where},$attrs);
+ return (@data ? $self->_construct_object(@data) : ());
+}
+
+
=head2 search_like
Perform a search, but use C<LIKE> instead of equality as the condition. Note
sub next {
my ($self) = @_;
+ my $cache;
+ if( @{$cache = $self->{all_cache} || []}) {
+ $self->{all_cache_position} ||= 0;
+ my $obj = $cache->[$self->{all_cache_position}];
+ $self->{all_cache_position}++;
+ return $obj;
+ }
+ if ($self->{attrs}{cache}) {
+ $self->{all_cache_position} = 0;
+ return ($self->all)[0];
+ }
my @row = $self->cursor->next;
# warn Dumper(\@row); use Data::Dumper;
return unless (@row);
sub _construct_object {
my ($self, @row) = @_;
+ my @row_orig = @row; # copy @row for key comparison later, because @row will change
my @as = @{ $self->{attrs}{as} };
+#use Data::Dumper; warn Dumper \@as;
#warn "@cols -> @row";
my $info = [ {}, {} ];
foreach my $as (@as) {
+ my $rs = $self;
my $target = $info;
my @parts = split(/\./, $as);
my $col = pop(@parts);
foreach my $p (@parts) {
$target = $target->[1]->{$p} ||= [];
+
+ $rs = $rs->related_resultset($p) if $rs->{attrs}->{cache};
}
- $target->[0]->{$col} = shift @row;
+
+ $target->[0]->{$col} = shift @row
+ if ref($target->[0]) ne 'ARRAY'; # arrayref is pre-inflated objects, do not overwrite
}
#use Data::Dumper; warn Dumper(\@as, $info);
my $new = $self->result_source->result_class->inflate_result(
$self->result_source, @$info);
$new = $self->{attrs}{record_filter}->($new)
if exists $self->{attrs}{record_filter};
+
+ if( $self->{attrs}->{cache} ) {
+ while( my( $rel, $rs ) = each( %{$self->{related_resultsets}} ) ) {
+ $rs->all;
+ #warn "$rel:", @{$rs->get_cache};
+ }
+ $self->build_rr( $self, $new );
+ }
+
return $new;
}
+
+sub build_rr {
+ # build related resultsets for supplied object
+ my ( $self, $context, $obj ) = @_;
+
+ my $re = qr/^\w+\./;
+ while( my ($rel, $rs) = each( %{$context->{related_resultsets}} ) ) {
+ #warn "context:", $context->result_source->name, ", rel:$rel, rs:", $rs->result_source->name;
+ my @objs = ();
+ my $map = {};
+ my $cond = $context->result_source->relationship_info($rel)->{cond};
+ keys %$cond;
+ while( my( $rel_key, $pk ) = each(%$cond) ) {
+ $rel_key =~ s/$re//;
+ $pk =~ s/$re//;
+ $map->{$rel_key} = $pk;
+ }
+
+ $rs->reset();
+ while( my $rel_obj = $rs->next ) {
+ while( my( $rel_key, $pk ) = each(%$map) ) {
+ if( $rel_obj->get_column($rel_key) eq $obj->get_column($pk) ) {
+ push @objs, $rel_obj;
+ }
+ }
+ }
+
+ my $rel_rs = $obj->related_resultset($rel);
+ $rel_rs->{attrs}->{cache} = 1;
+ $rel_rs->set_cache( \@objs );
+
+ while( my $rel_obj = $rel_rs->next ) {
+ $self->build_rr( $rs, $rel_obj );
+ }
+
+ }
+
+}
=head2 result_source
my $self = shift;
return $self->search(@_)->count if @_ && defined $_[0];
unless (defined $self->{count}) {
+ return scalar @{ $self->get_cache }
+ if @{ $self->get_cache };
my $group_by;
my $select = { 'count' => '*' };
- if( $group_by = delete $self->{attrs}{group_by} ) {
+ my $attrs = { %{ $self->{attrs} } };
+ if( $group_by = delete $attrs->{group_by} ) {
+ delete $attrs->{having};
my @distinct = (ref $group_by ? @$group_by : ($group_by));
# todo: try CONCAT for multi-column pk
my @pk = $self->result_source->primary_columns;
if( scalar(@pk) == 1 ) {
my $pk = shift(@pk);
- my $alias = $self->{attrs}{alias};
+ my $alias = $attrs->{alias};
my $re = qr/^($alias\.)?$pk$/;
foreach my $column ( @distinct) {
if( $column =~ $re ) {
#use Data::Dumper; die Dumper $select;
}
- my $attrs = { %{ $self->{attrs} },
- select => $select,
- as => [ 'count' ] };
+ $attrs->{select} = $select;
+ $attrs->{as} = [ 'count' ];
# offset, order by and page are not needed to count. record_filter is cdbi
delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/;
($self->{count}) = (ref $self)->new($self->result_source, $attrs)->cursor->next;
- $self->{attrs}{group_by} = $group_by;
}
return 0 unless $self->{count};
my $count = $self->{count};
sub all {
my ($self) = @_;
+ return @{ $self->get_cache }
+ if @{ $self->get_cache };
+ if( $self->{attrs}->{cache} ) {
+ my @obj = map { $self->_construct_object(@$_); }
+ $self->cursor->all;
+ $self->set_cache( \@obj );
+ return @obj;
+ }
return map { $self->_construct_object(@$_); }
$self->cursor->all;
}
sub reset {
my ($self) = @_;
+ $self->{all_cache_position} = 0;
$self->cursor->reset;
return $self;
}
return $row;
}
+=head2 get_cache
+
+Gets the contents of the cache for the resultset.
+
+=cut
+
+sub get_cache {
+ my $self = shift;
+ return $self->{all_cache} || [];
+}
+
+=head2 set_cache
+
+Sets the contents of the cache for the resultset. Expects an arrayref of objects of the same class as those produced by the resultset.
+
+=cut
+
+sub set_cache {
+ my ( $self, $data ) = @_;
+ $self->throw_exception("set_cache requires an arrayref")
+ if ref $data ne 'ARRAY';
+ my $result_class = $self->result_source->result_class;
+ foreach( @$data ) {
+ $self->throw_exception("cannot cache object of type '$_', expected '$result_class'")
+ if ref $_ ne $result_class;
+ }
+ $self->{all_cache} = $data;
+}
+
+=head2 clear_cache
+
+Clears the cache for the resultset.
+
+=cut
+
+sub clear_cache {
+ my $self = shift;
+ $self->set_cache([]);
+}
+
+=head2 related_resultset
+
+Returns a related resultset for the supplied relationship name.
+
+ $rs = $rs->related_resultset('foo');
+
+=cut
+
+sub related_resultset {
+ my ( $self, $rel, @rest ) = @_;
+ $self->{related_resultsets} ||= {};
+ my $resultsets = $self->{related_resultsets};
+ if( !exists $resultsets->{$rel} ) {
+ #warn "fetching related resultset for rel '$rel'";
+ my $rel_obj = $self->result_source->relationship_info($rel);
+ $self->throw_exception(
+ "search_related: result source '" . $self->result_source->name .
+ "' has no such relationship ${rel}")
+ unless $rel_obj; #die Dumper $self->{attrs};
+ my $rs;
+ if( $self->{attrs}->{cache} ) {
+ $rs = $self->search(undef);
+ }
+ else {
+ $rs = $self->search(undef, { join => $rel });
+ }
+ #use Data::Dumper; die Dumper $rs->{attrs};#$rs = $self->search( undef );
+ #use Data::Dumper; warn Dumper $self->{attrs}, Dumper $rs->{attrs};
+ my $alias = (defined $rs->{attrs}{seen_join}{$rel}
+ && $rs->{attrs}{seen_join}{$rel} > 1
+ ? join('_', $rel, $rs->{attrs}{seen_join}{$rel})
+ : $rel);
+ $resultsets->{$rel} =
+ $self->result_source->schema->resultset($rel_obj->{class}
+ )->search( undef,
+ { %{$rs->{attrs}},
+ alias => $alias,
+ select => undef(),
+ as => undef() }
+ )->search(@rest);
+ }
+ return $resultsets->{$rel};
+}
+
=head2 throw_exception
See Schema's throw_exception
use Carp::Clan qw/^DBIx::Class/;
use Storable;
+use Scalar::Util qw/weaken/;
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/AccessorGroup/);
sub new {
my ($class, $attrs) = @_;
$class = ref $class if ref $class;
- my $new = bless({ %{$attrs || {}} }, $class);
+ my $new = bless({ %{$attrs || {}}, _resultset => undef }, $class);
$new->{resultset_class} ||= 'DBIx::Class::ResultSet';
$new->{resultset_attributes} = { %{$new->{resultset_attributes} || {}} };
$new->{_ordered_columns} = [ @{$new->{_ordered_columns}||[]}];
#warn "$self $k $for $v";
$ret{$k} = $for->get_column($v);
#warn %ret;
+ } elsif (ref $as) { # reverse object
+ $ret{$v} = $as->get_column($k);
} else {
$ret{"${as}.${k}"} = "${for}.${v}";
}
sub resultset {
my $self = shift;
- return $self->resultset_class->new($self, $self->{resultset_attributes});
+ return $self->{_resultset} if ref $self->{_resultset} eq $self->resultset_class;
+ return $self->{_resultset} = do {
+ my $rs = $self->resultset_class->new($self, $self->{resultset_attributes});
+ weaken $rs->result_source;
+ $rs;
+ };
}
=head2 throw_exception
$source->storage->insert($source->from, { $self->get_columns });
$self->in_storage(1);
$self->{_dirty_columns} = {};
+ $self->{related_resultsets} = {};
return $self;
}
$self->throw_exception("Can't update ${self}: updated more than one row");
}
$self->{_dirty_columns} = {};
+ $self->{related_resultsets} = {};
return $self;
}
sub copy {
my ($self, $changes) = @_;
- my $new = bless({ _column_data => { %{$self->{_column_data}}} }, ref $self);
- $new->set_column($_ => $changes->{$_}) for keys %$changes;
- return $new->insert;
+ $changes ||= {};
+ my $col_data = { %{$self->{_column_data}} };
+ foreach my $col (keys %$col_data) {
+ delete $col_data->{$col}
+ if $self->result_source->column_info($col)->{is_auto_increment};
+ }
+ my $new = bless({ _column_data => $col_data }, ref $self);
+ $new->set_columns($changes);
+ $new->insert;
+ foreach my $rel ($self->result_source->relationships) {
+ my $rel_info = $self->result_source->relationship_info($rel);
+ if ($rel_info->{attrs}{cascade_copy}) {
+ my $resolved = $self->result_source->resolve_condition(
+ $rel_info->{cond}, $rel, $new);
+ foreach my $related ($self->search_related($rel)) {
+ $related->copy($resolved);
+ }
+ }
+ }
+ return $new;
}
=head2 store_column
},
ref $class || $class);
my $schema;
- PRE: foreach my $pre (keys %{$prefetch||{}}) {
+ foreach my $pre (keys %{$prefetch||{}}) {
+ my $pre_val = $prefetch->{$pre};
my $pre_source = $source->related_source($pre);
- $class->throw_exception("Can't prefetch non-existant relationship ${pre}") unless $pre_source;
+ $class->throw_exception("Can't prefetch non-existent relationship ${pre}") unless $pre_source;
my $fetched;
unless ($pre_source->primary_columns == grep { exists $prefetch->{$pre}[0]{$_}
and !defined $prefetch->{$pre}[0]{$_} } $pre_source->primary_columns)
{
$fetched = $pre_source->result_class->inflate_result(
- $pre_source, @{$prefetch->{$pre}});
+ $pre_source, @{$prefetch->{$pre}});
}
my $accessor = $source->relationship_info($pre)->{attrs}{accessor};
$class->throw_exception("No accessor for prefetched $pre")
- unless defined $accessor;
+ unless defined $accessor;
if ($accessor eq 'single') {
$new->{_relationship_data}{$pre} = $fetched;
} elsif ($accessor eq 'filter') {
- $new->{_inflated_column}{$pre} = $fetched;
+ $new->{_inflated_column}{$pre} = $fetched;
+ } elsif ($accessor eq 'multi') {
+
} else {
- $class->throw_exception("Don't know how to store prefetched $pre");
+ $class->throw_exception("Prefetch not supported with accessor '$accessor'");
}
}
return $new;
=head2 txn_do
-=head3 Arguments: <$coderef>, [@coderef_args]
+=head3 Arguments: <coderef>, [@coderef_args]
-Executes C<$coderef> with (optional) arguments C<@coderef_args>
-transactionally, returning its result (if any). If an exception is
-caught, a rollback is issued and the exception is rethrown. If the
-rollback fails, (i.e. throws an exception) an exception is thrown that
-includes a "Rollback failed" message.
+Executes <coderef> with (optional) arguments <@coderef_args> transactionally,
+returning its result (if any). If an exception is caught, a rollback is issued
+and the exception is rethrown. If the rollback fails, (i.e. throws an
+exception) an exception is thrown that includes a "Rollback failed" message.
For example,
}
}
-Nested transactions work as expected (i.e. only the outermost
+Nested transactions should work as expected (i.e. only the outermost
transaction will issue a txn_commit on the Schema's storage)
=cut
croak @_;
}
+=head2 deploy
+
+Attempts to deploy the schema to the current storage
+
+=cut
+
+sub deploy {
+ my ($self) = shift;
+ $self->throw_exception("Can't deploy without storage") unless $self->storage;
+ $self->storage->deploy($self);
+}
+
1;
=head1 AUTHORS
-package DBIx::Class::Serialize;
+package DBIx::Class::Serialize::Storable;
use strict;
-use Storable qw/freeze thaw/;
+use Storable;
sub STORABLE_freeze {
my ($self,$cloning) = @_;
- #return if $cloning;
my $to_serialize = { %$self };
delete $to_serialize->{result_source};
- return (freeze($to_serialize));
+ return (Storable::freeze($to_serialize));
}
sub STORABLE_thaw {
my ($self,$cloning,$serialized) = @_;
- %$self = %{ thaw($serialized) };
- $self->result_source($self->result_source_instance);
+ %$self = %{ Storable::thaw($serialized) };
+ $self->result_source($self->result_source_instance) if $self->can('result_source_instance');
}
1;
=head1 NAME
- DBIx::Class::Serialize - hooks for Storable freeze/thaw (EXPERIMENTAL)
+ DBIx::Class::Serialize::Storable - hooks for Storable freeze/thaw (EXPERIMENTAL)
=head1 SYNOPSIS
# in a table class definition
- __PACKAGE__->load_components(qw/Serialize/);
+ __PACKAGE__->load_components(qw/Serialize::Storable/);
# meanwhile, in a nearby piece of code
my $obj = $schema->resultset('Foo')->find(12);
sub select {
my ($self, $table, $fields, $where, $order, @rest) = @_;
@rest = (-1) unless defined $rest[0];
- $self->SUPER::select($table, $self->_recurse_fields($fields),
- $where, $order, @rest);
+ local $self->{having_bind} = [];
+ my ($sql, @ret) = $self->SUPER::select($table,
+ $self->_recurse_fields($fields), $where, $order, @rest);
+ return wantarray ? ($sql, @ret, @{$self->{having_bind}}) : $sql;
}
sub _emulate_limit {
sub _order_by {
my $self = shift;
my $ret = '';
+ my @extra;
if (ref $_[0] eq 'HASH') {
if (defined $_[0]->{group_by}) {
$ret = $self->_sqlcase(' group by ')
.$self->_recurse_fields($_[0]->{group_by});
}
+ if (defined $_[0]->{having}) {
+ my $frag;
+ ($frag, @extra) = $self->_recurse_where($_[0]->{having});
+ push(@{$self->{having_bind}}, @extra);
+ $ret .= $self->_sqlcase(' having ').$frag;
+ }
if (defined $_[0]->{order_by}) {
$ret .= $self->SUPER::_order_by($_[0]->{order_by});
}
sub _quote {
my ($self, $label) = @_;
return '' unless defined $label;
- return "*" if $label eq '*';
return $label unless $self->{quote_char};
- if(ref $self->{quote_char} eq "ARRAY"){
- return $self->{quote_char}->[0] . $label . $self->{quote_char}->[1]
- if !defined $self->{name_sep};
- my $sep = $self->{name_sep};
- return join($self->{name_sep},
- map { $self->{quote_char}->[0] . $_ . $self->{quote_char}->[1] }
- split(/\Q$sep\E/,$label));
- }
return $self->SUPER::_quote($label);
}
$new->transaction_depth(0);
if (defined($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}) &&
($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} =~ /=(.+)$/)) {
- $new->debugfh(IO::File->new($1, 'w')||croak "Cannot open trace file $1");
+ $new->debugfh(IO::File->new($1, 'w')) || $new->throw_exception("Cannot open trace file $1");
} else {
$new->debugfh(IO::File->new('>&STDERR'));
}
return $new;
}
+sub throw_exception {
+ my ($self, $msg) = @_;
+ croak($msg);
+}
+
=head1 NAME
DBIx::Class::Storage::DBI - DBI storage handler
my ($self) = @_;
my @info = @{$self->connect_info || []};
$self->_dbh($self->_connect(@info));
-
+ my $driver = $self->_dbh->{Driver}->{Name};
+ eval "require DBIx::Class::Storage::DBI::${driver}";
+ unless ($@) {
+ bless $self, "DBIx::Class::Storage::DBI::${driver}";
+ }
# if on-connect sql statements are given execute them
foreach my $sql_statement (@{$self->on_connect_do || []}) {
$self->_dbh->do($sql_statement);
return $dbh;
}
- DBI->connect(@info);
+ my $dbh = DBI->connect(@info);
+ $self->throw_exception("DBI Connection failed: $DBI::errstr")
+ unless $dbh;
+ $dbh;
}
=head2 txn_begin
$self->debugfh->print("$sql: " . join(', ', @debug_bind) . "\n");
}
my $sth = $self->sth($sql,$op);
- croak "no sth generated via sql: $sql" unless $sth;
+ $self->throw_exception("no sth generated via sql: $sql") unless $sth;
@bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args
my $rv;
if ($sth) {
$rv = $sth->execute(@bind);
} else {
- croak "'$sql' did not generate a statement.";
+ $self->throw_exception("'$sql' did not generate a statement.");
}
return (wantarray ? ($rv, $sth, @bind) : $rv);
}
sub insert {
my ($self, $ident, $to_insert) = @_;
- croak( "Couldn't insert ".join(', ', map "$_ => $to_insert->{$_}", keys %$to_insert)." into ${ident}" )
+ $self->throw_exception( "Couldn't insert ".join(', ', map "$_ => $to_insert->{$_}", keys %$to_insert)." into ${ident}" )
unless ($self->_execute('insert' => [], $ident, $to_insert));
return $to_insert;
}
if (ref $condition eq 'SCALAR') {
$order = $1 if $$condition =~ s/ORDER BY (.*)$//i;
}
- if (exists $attrs->{group_by}) {
+ if (exists $attrs->{group_by} || $attrs->{having}) {
$order = { group_by => $attrs->{group_by},
+ having => $attrs->{having},
($order ? (order_by => $order) : ()) };
}
my @args = ('select', $attrs->{bind}, $ident, $select, $condition, $order);
$column_info{is_nullable} = $info->{NULLABLE};
$result{$info->{COLUMN_NAME}} = \%column_info;
}
- }else{
+ } else {
my $sth = $self->dbh->prepare("SELECT * FROM $table WHERE 1=0");
$sth->execute;
my @columns = @{$sth->{NAME}};
return \%result;
}
+sub last_insert_id {
+ my ($self, $row) = @_;
+
+ return $self->dbh->func('last_insert_rowid');
+
+}
+
+sub sqlt_type {
+ my ($self) = @_;
+ my $dsn = $self->connect_info->[0];
+ $dsn =~ /^dbi:(.*?)\d*:/;
+ return $1;
+}
+
+sub deployment_statements {
+ my ($self, $schema, $type) = @_;
+ $type ||= $self->sqlt_type;
+ eval "use SQL::Translator";
+ $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@;
+ eval "use SQL::Translator::Parser::DBIx::Class;";
+ $self->throw_exception($@) if $@;
+ eval "use SQL::Translator::Producer::${type};";
+ $self->throw_exception($@) if $@;
+ my $tr = SQL::Translator->new();
+ SQL::Translator::Parser::DBIx::Class::parse( $tr, $schema );
+ return "SQL::Translator::Producer::${type}"->can('produce')->($tr);
+}
+
+sub deploy {
+ my ($self, $schema, $type) = @_;
+ foreach(split(";\n", $self->deployment_statements($schema, $type))) {
+ $self->dbh->do($_) or warn "SQL was:\n $_";
+ }
+}
+
sub DESTROY { shift->disconnect }
1;
--- /dev/null
+package DBIx::Class::Storage::DBI::DB2;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+# __PACKAGE__->load_components(qw/PK::Auto/);
+
+sub last_insert_id
+{
+ my ($self) = @_;
+
+ my $dbh = $self->_dbh;
+ my $sth = $dbh->prepare_cached("VALUES(IDENTITY_VAL_LOCAL())", {}, 3);
+ $sth->execute();
+
+ my @res = $sth->fetchrow_array();
+
+ return @res ? $res[0] : undef;
+
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::DB2 - Automatic primary key class for DB2
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->set_primary_key('id');
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for DB2.
+
+=head1 AUTHORS
+
+Jess Robinson
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::MSSQL;\r
+\r
+use strict;\r
+use warnings;\r
+\r
+use base qw/DBIx::Class::Storage::DBI/;\r
+\r
+# __PACKAGE__->load_components(qw/PK::Auto/);\r
+\r
+sub last_insert_id {\r
+ my( $id ) = $_[0]->_dbh->selectrow_array('SELECT @@IDENTITY' );\r
+ return $id;\r
+}\r
+\r
+1;\r
+\r
+=head1 NAME \r
+\r
+DBIx::Class::Storage::DBI::MSSQL - Automatic primary key class for MSSQL\r
+\r
+=head1 SYNOPSIS\r
+\r
+ # In your table classes\r
+ __PACKAGE__->load_components(qw/PK::Auto Core/);\r
+ __PACKAGE__->set_primary_key('id');\r
+\r
+=head1 DESCRIPTION\r
+\r
+This class implements autoincrements for MSSQL.\r
+\r
+=head1 AUTHORS\r
+\r
+Brian Cassidy <bricas@cpan.org>\r
+\r
+=head1 LICENSE\r
+\r
+You may distribute this code under the same terms as Perl itself.\r
+\r
+=cut\r
--- /dev/null
+package DBIx::Class::Storage::DBI::Oracle;
+
+use strict;
+use warnings;
+
+use Carp qw/croak/;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+# __PACKAGE__->load_components(qw/PK::Auto/);
+
+sub last_insert_id {
+ my ($self,$source,$col) = @_;
+ my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
+ my $sql = "SELECT " . $seq . ".currval FROM DUAL";
+ my ($id) = $self->_dbh->selectrow_array($sql);
+ return $id;
+}
+
+sub get_autoinc_seq {
+ my ($self,$source,$col) = @_;
+
+ # look up the correct sequence automatically
+ my $dbh = $self->_dbh;
+ my $sql = q{
+ SELECT trigger_body FROM ALL_TRIGGERS t
+ WHERE t.table_name = ?
+ AND t.triggering_event = 'INSERT'
+ AND t.status = 'ENABLED'
+ };
+ # trigger_body is a LONG
+ $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024);
+ my $sth = $dbh->prepare($sql);
+ $sth->execute( uc($source->name) );
+ while (my ($insert_trigger) = $sth->fetchrow_array) {
+ return uc($1) if $insert_trigger =~ m!(\w+)\.nextval!i; # col name goes here???
+ }
+ croak "Unable to find a sequence INSERT trigger on table '" . $source->name . "'.";
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Oracle - Automatic primary key class for Oracle
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->set_primary_key('id');
+ __PACKAGE__->sequence('mysequence');
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for Oracle.
+
+=head1 AUTHORS
+
+Andy Grundman <andy@hybridized.org>
+
+Scott Connelly <scottsweep@yahoo.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::Pg;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+# __PACKAGE__->load_components(qw/PK::Auto/);
+
+sub last_insert_id {
+ my ($self,$source,$col) = @_;
+ my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
+ $self->_dbh->last_insert_id(undef,undef,undef,undef, {sequence => $seq});
+}
+
+sub get_autoinc_seq {
+ my ($self,$source,$col) = @_;
+
+ my @pri = $source->primary_columns;
+ my $dbh = $self->_dbh;
+ my ($schema,$table) = $source->name =~ /^(.+)\.(.+)$/ ? ($1,$2)
+ : (undef,$source->name);
+ while (my $col = shift @pri) {
+ my $info = $dbh->column_info(undef,$schema,$table,$col)->fetchrow_arrayref;
+ if (defined $info->[12] and $info->[12] =~
+ /^nextval\('"?([^"']+)"?'::(?:text|regclass)\)/)
+ {
+ return $1;
+ }
+ }
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Pg - Automatic primary key class for PostgreSQL
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->set_primary_key('id');
+ __PACKAGE__->sequence('mysequence');
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for PostgreSQL.
+
+=head1 AUTHORS
+
+Marcus Ramberg <m.ramberg@cpan.org>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::SQLite;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+sub last_insert_id {
+ return $_[0]->dbh->func('last_insert_rowid');
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::PK::Auto::SQLite - Automatic primary key class for SQLite
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto::SQLite Core/);
+ __PACKAGE__->set_primary_key('id');
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for SQLite.
+
+=head1 AUTHORS
+
+Matt S. Trout <mst@shadowcatsystems.co.uk>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
--- /dev/null
+package DBIx::Class::Storage::DBI::mysql;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+# __PACKAGE__->load_components(qw/PK::Auto/);
+
+sub last_insert_id {
+ return $_[0]->_dbh->{mysql_insertid};
+}
+
+sub sqlt_type {
+ return 'MySQL';
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::mysql - Automatic primary key class for MySQL
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->set_primary_key('id');
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for MySQL.
+
+=head1 AUTHORS
+
+Matt S. Trout <mst@shadowcatsystems.co.uk>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
package DBIx::Class::UUIDColumns;
use base qw/DBIx::Class/;
-use Data::UUID;
-
__PACKAGE__->mk_classdata( 'uuid_auto_columns' => [] );
+__PACKAGE__->mk_classdata( 'uuid_maker' );
+__PACKAGE__->uuid_class( __PACKAGE__->_find_uuid_module );
+
+# be compatible with Class::DBI::UUID
+sub uuid_columns {
+ my $self = shift;
+ for (@_) {
+ $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_);
+ }
+ $self->uuid_auto_columns(\@_);
+}
+
+sub uuid_class {
+ my ($self, $class) = @_;
+
+ if ($class) {
+ $class = "DBIx::Class::UUIDMaker$class" if $class =~ /^::/;
+
+ if (!eval "require $class") {
+ $self->throw_exception("$class could not be loaded: $@");
+ } elsif (!$class->isa('DBIx::Class::UUIDMaker')) {
+ $self->throw_exception("$class is not a UUIDMaker subclass");
+ } else {
+ $self->uuid_maker($class->new);
+ };
+ };
+
+ return ref $self->uuid_maker;
+};
+
+sub insert {
+ my $self = shift;
+ for my $column (@{$self->uuid_auto_columns}) {
+ $self->store_column( $column, $self->get_uuid )
+ unless defined $self->get_column( $column );
+ }
+ $self->next::method(@_);
+}
+
+sub get_uuid {
+ return shift->uuid_maker->as_string;
+}
+
+sub _find_uuid_module {
+ if (eval{require Data::UUID}) {
+ return '::Data::UUID';
+ } elsif ($^O ne 'openbsd' && eval{require APR::UUID}) {
+ # APR::UUID on openbsd causes some as yet unfound nastyness for XS
+ return '::APR::UUID';
+ } elsif (eval{require UUID}) {
+ return '::UUID';
+ } elsif (eval{
+ # squelch the 'too late for INIT' warning in Win32::API::Type
+ local $^W = 0;
+ require Win32::Guidgen;
+ }) {
+ return '::Win32::Guidgen';
+ } elsif (eval{require Win32API::GUID}) {
+ return '::Win32API::GUID';
+ } else {
+ shift->throw_exception('no suitable uuid module could be found')
+ };
+};
+
+1;
+__END__
=head1 NAME
=head1 SYNOPSIS
- pacakge Artist;
+ package Artist;
__PACKAGE__->load_components(qw/UUIDColumns Core DB/);
__PACKAGE__->uuid_columns( 'artist_id' );
This L<DBIx::Class> component resembles the behaviour of
L<Class::DBI::UUID>, to make some columns implicitly created as uuid.
+When loaded, C<UUIDColumns> will search for a suitable uuid generation module
+from the following list of supported modules:
+
+ Data::UUID
+ APR::UUID*
+ UUID
+ Win32::Guidgen
+ Win32API::GUID
+
+If no supporting module can be found, an exception will be thrown.
+
+*APR::UUID will not be loaded under OpenBSD due to an as yet unidentified XS
+issue.
+
+If you would like to use a specific module, you can set C<uuid_class>:
+
+ __PACKAGE__->uuid_class('::Data::UUID');
+ __PACKAGE__->uuid_class('MyUUIDGenerator');
+
Note that the component needs to be loaded before Core.
=head1 METHODS
-=head2 uuid_columns
+=head2 uuid_columns(@columns)
-=cut
+Takes a list of columns to be filled with uuids during insert.
-# be compatible with Class::DBI::UUID
-sub uuid_columns {
- my $self = shift;
- for (@_) {
- $self->throw_exception("column $_ doesn't exist") unless $self->has_column($_);
- }
- $self->uuid_auto_columns(\@_);
-}
+ __PACKAGE__->uuid_columns('id');
-sub insert {
- my $self = shift;
- for my $column (@{$self->uuid_auto_columns}) {
- $self->store_column( $column, $self->get_uuid )
- unless defined $self->get_column( $column );
- }
- $self->next::method(@_);
-}
+=head2 uuid_class($classname)
-sub get_uuid {
- return Data::UUID->new->to_string(Data::UUID->new->create),
-}
+Takes the name of a UUIDMaker subclass to be used for uuid value generation.
+This can be a fully qualified class name, or a shortcut name starting with ::
+that matches one of the available DBIx::Class::UUIDMaker subclasses:
+
+ __PACKAGE__->uuid_class('CustomUUIDGenerator');
+ # loads CustomeUUIDGenerator
+
+ __PACKAGE->uuid_class('::Data::UUID');
+ # loads DBIx::Class::UUIDMaker::Data::UUID;
+
+Note that C<uuid_class> chacks to see that the specified class isa
+DBIx::Class::UUIDMaker subbclass and throws and exception if it isn't.
+
+=head2 uuid_maker
+
+Returns the current UUIDMaker instance for the given module.
+
+ my $uuid = __PACKAGE__->uuid_maker->as_string;
+
+=head1 SEE ALSO
+
+L<DBIx::Class::UUIDMaker>
=head1 AUTHORS
Chia-liang Kao <clkao@clkao.org>
+Chris Laco <claco@chrislaco.com>
=head1 LICENSE
You may distribute this code under the same terms as Perl itself.
-
-=cut
-
-1;
--- /dev/null
+package DBIx::Class::UUIDMaker;
+
+sub new {
+ return bless {}, shift;
+};
+
+sub as_string {
+ return undef;
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker - UUID wrapper module
+
+=head1 SYNOPSIS
+
+ package CustomUUIDMaker;
+ use base qw/DBIx::Class::/;
+
+ sub as_string {
+ my $uuid;
+ ...magic encantations...
+ return $uuid;
+ };
+
+=head1 DESCRIPTION
+
+DBIx::Class::UUIDMaker is a base class used by the various uuid generation
+subclasses.
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<DBIx::Class::UUIDMaker>,
+L<DBIx::Class::UUIDMaker::UUID>,
+L<DBIx::Class::UUIDMaker::APR::UUID>,
+L<DBIx::Class::UUIDMaker::Data::UUID>,
+L<DBIx::Class::UUIDMaker::Win32::Guidgen>,
+L<DBIx::Class::UUIDMaker::Win32API::GUID>,
+L<DBIx::Class::UUIDMaker::Data::Uniqid>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::APR::UUID;
+use base qw/DBIx::Class::UUIDMaker/;
+use APR::UUID ();
+
+sub as_string {
+ return APR::UUID->new->format;
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::APR::UUID - Create uuids using APR::UUID
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::APR::UUID');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses APR::UUID to generate uuid
+strings in the following format:
+
+ 098f2470-bae0-11cd-b579-08002b30bfeb
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<APR::UUID>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::Data::UUID;
+use base qw/DBIx::Class::UUIDMaker/;
+use Data::UUID ();
+
+sub as_string {
+ return Data::UUID->new->to_string(Data::UUID->new->create);
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::Data::UUID - Create uuids using Data::UUID
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::Data::UUID');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses Data::UUID to generate uuid
+strings in the following format:
+
+ 098f2470-bae0-11cd-b579-08002b30bfeb
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<Data::UUID>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::Data::Uniqid;
+use base qw/DBIx::Class::UUIDMaker/;
+use Data::Uniqid ();
+
+sub as_string {
+ return Data::Uniqid->luniqid;
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::Data::Uniqid - Create uuids using Data::Uniqid
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::Data::Uniqid');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses Data::Uniqid to generate uuid
+strings using Data::Uniqid::luniqid.
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<Data::Data::Uniqid>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::UUID;
+use base qw/DBIx::Class::UUIDMaker/;
+use UUID ();
+
+sub as_string {
+ my ($uuid, $uuidstring);
+ UUID::generate($uuid);
+ UUID::unparse($uuid, $uuidstring);
+
+ return $uuidstring;
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::UUID - Create uuids using UUID
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::UUID');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses UUID to generate uuid
+strings in the following format:
+
+ 098f2470-bae0-11cd-b579-08002b30bfeb
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<UUID>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::Win32::Guidgen;
+use base qw/DBIx::Class::UUIDMaker/;
+use Win32::Guidgen ();
+
+sub as_string {
+ my $uuid = Win32::Guidgen::create();
+ $uuid =~ s/(^\{|\}$)//g;
+
+ return $uuid;
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::Win32:::Guidgen - Create uuids using Win32::Guidgen
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::Win32::Guidgen');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses Win32::Guidgen to generate uuid
+strings in the following format:
+
+ 098f2470-bae0-11cd-b579-08002b30bfeb
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<Win32::Guidgen>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
--- /dev/null
+package DBIx::Class::UUIDMaker::Win32API::GUID;
+use base qw/DBIx::Class::UUIDMaker/;
+use Win32API::GUID ();
+
+sub as_string {
+ return Win32API::GUID::CreateGuid();
+};
+
+1;
+__END__
+
+=head1 NAME
+
+DBIx::Class::UUIDMaker::Win32API:::GUID - Create uuids using Win32API::GUID
+
+=head1 SYNOPSIS
+
+ package Artist;
+ __PACKAGE__->load_components(qw/UUIDColumns Core DB/);
+ __PACKAGE__->uuid_columns( 'artist_id' );
+ __PACKAGE__->uuid_class('::Win32API::GUID');
+
+=head1 DESCRIPTION
+
+This DBIx::Class::UUIDMaker subclass uses Win32API::GUID to generate uuid
+strings in the following format:
+
+ 098f2470-bae0-11cd-b579-08002b30bfeb
+
+=head1 METHODS
+
+=head2 as_string
+
+Returns the new uuid as a string.
+
+=head1 SEE ALSO
+
+L<Win32API::GUID>
+
+=head1 AUTHOR
+
+Chris Laco <claco@chrislaco.com>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
}
$table->primary_key($source->primary_columns);
-
my @rels = $source->relationships();
foreach my $rel (@rels)
{
my $rel_table = $source->related_source($rel)->name;
my $cond = (keys (%{$rel_info->{cond}}))[0];
my ($refkey) = $cond =~ /^\w+\.(\w+)$/;
+ my ($key) = $rel_info->{cond}->{$cond} =~ /^\w+\.(\w+)$/;
if($rel_table && $refkey)
{
$table->add_constraint(
type => 'foreign_key',
- name => "fk_${rel}_id",
- fields => $rel,
+ name => "fk_${key}",
+ fields => $key,
reference_fields => $refkey,
reference_table => $rel_table,
- );
+ );
}
}
}
use warnings;
use lib qw(lib t/lib);
-use UNIVERSAL::require;
+use DBICTest;
+use DBICTest::HelperRels;
-my $from = 'SQL::Translator::Parser::DBIx::Class';
-my $to = 'SQL::Translator::Producer::SQLite';
-my $sqlt = 'SQL::Translator';
-my $schema = 'DBICTest::Schema';
+my $schema = DBICTest->initialise;
-$from->require;
-$to->require;
-$sqlt->require;
-$schema->require;
-
-my $tr = $sqlt->new;
-
-$from->can("parse")->($tr, $schema);
-print $to->can("produce")->($tr);
+print $schema->storage->deployment_statements($schema);
--- /dev/null
+#!/usr/bin/perl
+
+die "must be run from DBIx::Class root dir" unless -d 't/run';
+
+gen_tests($_) for qw/BasicRels HelperRels/;
+
+sub gen_tests {
+ my $variant = shift;
+ my $dir = lc $variant;
+ system("rm -f t/$dir/*.t");
+
+ foreach my $test (map { m[^t/run/(.+)\.tl$]; $1 } split(/\n/, `ls t/run/*.tl`)) {
+ open(my $fh, '>', "t/$dir/${test}.t") or die $!;
+ print $fh <<"EOF";
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::$variant;
+
+require "t/run/${test}.tl";
+run_tests(DBICTest->schema);
+EOF
+ close $fh;
+ }
+}
\ No newline at end of file
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/145db2.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/20unique.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/21transactions.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/22cascade_copy.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/23cache.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::BasicRels;
+
+require "t/run/24serialize.tl";
+run_tests(DBICTest->schema);
Actor->iterator_class('Class::DBI::My::Iterator');
+delete $film->{related_resultsets};
+
{
my @acts = $film->actors->slice(1, 2);
is @acts, 2, "Slice gives 2 results";
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::HelperRels;
+
+require "t/run/145db2.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::HelperRels;
+
+require "t/run/22cascade_copy.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::HelperRels;
+
+require "t/run/23cache.tl";
+run_tests(DBICTest->schema);
--- /dev/null
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::HelperRels;
+
+require "t/run/24serialize.tl";
+run_tests(DBICTest->schema);
+package DBICTest;
+
+use strict;
+use warnings;
+use DBICTest::Schema;
+
+sub initialise {
+
+ my $db_file = "t/var/DBIxClass.db";
+
+ unlink($db_file) if -e $db_file;
+ unlink($db_file . "-journal") if -e $db_file . "-journal";
+ mkdir("t/var") unless -d "t/var";
+
+ my $dsn = "dbi:SQLite:${db_file}";
+
+ return DBICTest::Schema->compose_connection('DBICTest' => $dsn);
+}
+
1;
use base 'DBIx::Class::Core';
+__PACKAGE__->load_components('PK::Auto');
+
DBICTest::Schema::Artist->table('artist');
DBICTest::Schema::Artist->add_columns(
'artistid' => {
DBICTest::Schema::Artist->add_relationship(
cds => 'DBICTest::Schema::CD',
{ 'foreign.artist' => 'self.artistid' },
- { order_by => 'year', join_type => 'LEFT', cascade_delete => 1 }
+ { order_by => 'year', join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' }
);
DBICTest::Schema::Artist->add_relationship(
twokeys => 'DBICTest::Schema::TwoKeys',
- { 'foreign.artist' => 'self.artistid' }
+ { 'foreign.artist' => 'self.artistid' },
+ { cascade_copy => 1 }
);
DBICTest::Schema::Artist->add_relationship(
onekeys => 'DBICTest::Schema::OneKey',
DBICTest::Schema::CD->add_relationship(
tags => 'DBICTest::Schema::Tag',
{ 'foreign.cd' => 'self.cdid' },
- { join_type => 'LEFT', cascade_delete => 1 }
+ { join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' }
);
#DBICTest::Schema::CD->might_have(liner_notes => 'DBICTest::Schema::LinerNotes' => qw/notes/);
DBICTest::Schema::CD->add_relationship(
use base 'DBIx::Class::Core';
+__PACKAGE__->load_components('PK::Auto');
+
DBICTest::Schema::CD->table('cd');
DBICTest::Schema::CD->add_columns(
'cdid' => {
);
DBICTest::Schema::Artist->has_many(
'artist_undirected_maps', 'DBICTest::Schema::ArtistUndirectedMap',
- [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}]
+ [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}],
+ { cascade_copy => 0 } # this would *so* not make sense
);
DBICTest::Schema::ArtistUndirectedMap->belongs_to(
'artist1', 'DBICTest::Schema::Artist', 'id1');
use base 'DBIx::Class::Core';
+__PACKAGE__->load_components('PK::Auto');
+
DBICTest::Schema::OneKey->table('onekey');
DBICTest::Schema::OneKey->add_columns(
'id' => {
use base qw/DBIx::Class::Core/;
+__PACKAGE__->load_components('PK::Auto');
+
DBICTest::Schema::Tag->table('tags');
DBICTest::Schema::Tag->add_columns(
'tagid' => {
- data_type => 'varchar',
+ data_type => 'integer',
is_auto_increment => 1,
},
'cd' => {
use strict;
use warnings;
-use DBICTest::Schema;
+use DBICTest;
-my $db_file = "t/var/DBIxClass.db";
-
-unlink($db_file) if -e $db_file;
-unlink($db_file . "-journal") if -e $db_file . "-journal";
-mkdir("t/var") unless -d "t/var";
-
-my $dsn = "dbi:SQLite:${db_file}";
-
-my $schema = DBICTest::Schema->compose_connection('DBICTest' => $dsn);
+my $schema = DBICTest->initialise;
$schema->storage->on_connect_do([ "PRAGMA synchronous = OFF" ]);
my $dbh = $schema->storage->dbh;
-open IN, "t/lib/sqlite.sql";
+if ($ENV{"DBICTEST_SQLT_DEPLOY"}) {
+ $schema->deploy;
+} else {
+ open IN, "t/lib/sqlite.sql";
-my $sql;
+ my $sql;
-{ local $/ = undef; $sql = <IN>; }
+ { local $/ = undef; $sql = <IN>; }
-close IN;
+ close IN;
-$dbh->do($_) for split(/\n\n/, $sql);
+ $dbh->do($_) for split(/\n\n/, $sql);
+}
$schema->storage->dbh->do("PRAGMA synchronous = OFF");
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Mon Feb 6 01:07:16 2006
+-- Created on Fri Feb 24 15:13:57 2006
--
BEGIN TRANSACTION;
);
--
--- Table: producer
---
-CREATE TABLE producer (
- producerid INTEGER PRIMARY KEY NOT NULL,
- name varchar NOT NULL
-);
-
---
-- Table: onekey
--
CREATE TABLE onekey (
);
--
+-- Table: producer
+--
+CREATE TABLE producer (
+ producerid INTEGER PRIMARY KEY NOT NULL,
+ name varchar NOT NULL
+);
+
+--
-- Table: treelike
--
CREATE TABLE treelike (
-- Table: tags
--
CREATE TABLE tags (
- tagid varchar NOT NULL,
+ tagid INTEGER PRIMARY KEY NOT NULL,
cd integer NOT NULL,
- tag varchar NOT NULL,
- PRIMARY KEY (tagid)
+ tag varchar NOT NULL
);
--
plan tests => 2;
$schema->class("Artist")->load_components(qw/PK::Auto::SQLite/);
+ # Should just be PK::Auto but this ensures the compat shim works
# add an artist without primary key to test Auto
my $artist = $schema->resultset("Artist")->create( { name => 'Auto' } );
#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', '');
-MySQLTest::Artist->load_components('PK::Auto::MySQL');
+MySQLTest::Artist->load_components('PK::Auto');
# test primary key handling
my $new = MySQLTest::Artist->create({ name => 'foo' });
$dbh->do("CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(255), charfield CHAR(10));");
-PgTest::Artist->load_components('PK::Auto::Pg');
+PgTest::Artist->load_components('PK::Auto');
my $new = PgTest::Artist->create({ name => 'foo' });
END;
});
-OraTest::Artist->load_components('PK::Auto::Oracle');
+OraTest::Artist->load_components('PK::Auto');
OraTest::CD->load_components('PK::Auto::Oracle');
OraTest::Track->load_components('PK::Auto::Oracle');
--- /dev/null
+sub run_tests {
+my $schema = shift;
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_${_}" } qw/DSN USER PASS/};
+
+#warn "$dsn $user $pass";
+
+plan skip_all, 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test'
+ unless ($dsn && $user);
+
+plan tests => 5;
+
+DBICTest::Schema->compose_connection('DB2Test' => $dsn, $user, $pass);
+
+my $dbh = DB2Test->schema->storage->dbh;
+
+$dbh->do("DROP TABLE artist;");
+
+$dbh->do("CREATE TABLE artist (artistid INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), name VARCHAR(255), charfield CHAR(10));");
+
+#'dbi:mysql:host=localhost;database=dbic_test', 'dbic_test', '');
+
+DB2Test::Artist->load_components('PK::Auto');
+
+# test primary key handling
+my $new = DB2Test::Artist->create({ name => 'foo' });
+ok($new->artistid, "Auto-PK worked");
+
+# test LIMIT support
+for (1..6) {
+ DB2Test::Artist->create({ name => 'Artist ' . $_ });
+}
+my $it = DB2Test::Artist->search( {},
+ { rows => 3,
+ order_by => 'artistid'
+ }
+);
+is( $it->count, 3, "LIMIT count ok" );
+is( $it->next->name, "Artist 2", "iterator->next ok" );
+$it->next;
+$it->next;
+is( $it->next, undef, "next past end of resultset ok" );
+
+my $test_type_info = {
+ 'artistid' => {
+ 'data_type' => 'INTEGER',
+ 'is_nullable' => 0,
+ 'size' => 11
+ },
+ 'name' => {
+ 'data_type' => 'VARCHAR',
+ 'is_nullable' => 1,
+ 'size' => 255
+ },
+ 'charfield' => {
+ 'data_type' => 'VARCHAR',
+ 'is_nullable' => 1,
+ 'size' => 10
+ },
+};
+
+
+my $type_info = DB2Test->schema->storage->columns_info_for('artist');
+is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
+
+
+
+# clean up our mess
+$dbh->do("DROP TABLE artist");
+
+}
+
+1;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 41 );
+ : ( tests => 42 );
}
# figure out if we've got a version of sqlite that is older than 3.2.6, in
my $cd = $schema->resultset('CD')->find(1,
{
cols => [qw/title artist.name/],
- join => 'artist'
+ join => { 'artist' => {} }
}
);
ok(eval { $cd->artist->name eq 'Caterwauler McCrae' }, 'single related column prefetched');
cmp_ok( $rs->count, '==', 3, "count() ok after group_by on related column" );
}
-cmp_ok( scalar $rs->all, '==', 3, "all() returns same count as count() after group_by on related column" );
+$rs = $schema->resultset("Artist")->search(
+ {},
+ { join => [qw/ cds /], group_by => [qw/ me.name /], having =>{ 'MAX(cds.cdid)'=> \'< 5' } }
+);
+
+cmp_ok( $rs->all, '==', 2, "results ok after group_by on related column with a having" );
+
+$rs = $rs->search( undef, { having =>{ 'count(*)'=> \'> 2' }});
+
+cmp_ok( $rs->all, '==', 1, "count() ok after group_by on related column with a having" );
$rs = $schema->resultset("Artist")->search(
{ 'cds.title' => 'Spoonful of bees',
--- /dev/null
+use strict;
+use warnings;
+
+sub run_tests {
+my $schema = shift;
+
+plan tests => 4;
+my $artist = $schema->resultset('Artist')->find(1);
+my $artist_cds = $artist->search_related('cds');
+my $cover_band = $artist->copy;
+my $cover_cds = $cover_band->search_related('cds');
+cmp_ok($cover_band->id, '!=', $artist->id, 'ok got new column id...');
+is($cover_cds->count, $artist_cds->count, 'duplicated rows count ok');
+
+#check multi-keyed
+cmp_ok($cover_band->search_related('twokeys')->count, '>', 0, 'duplicated multiPK ok');
+
+#and check copying a few relations away
+cmp_ok($cover_cds->search_related('tags')->count, '==',
+ $artist_cds->search_related('tags')->count , 'duplicated count ok');
+
+}
+1;
--- /dev/null
+sub run_tests {
+my $schema = shift;
+
+eval "use DBD::SQLite";
+plan skip_all => 'needs DBD::SQLite for testing' if $@;
+plan tests => 12;
+
+my $rs = $schema->resultset("Artist")->search(
+ { artistid => 1 }
+);
+
+my $artist = $rs->first;
+
+is( scalar @{ $rs->get_cache }, 0, 'cache is not populated without cache attribute' );
+
+$rs = $schema->resultset("Artist")->search(
+ { 'artistid' => 1 },
+ {
+ join => [ qw/ cds /],
+ prefetch => [qw/ cds /],
+ cache => 1,
+ }
+);
+
+use Data::Dumper; $Data::Dumper::Deparse = 1;
+
+# start test for prefetch SELECT count
+unlink 't/var/dbic.trace' if -e 't/var/dbic.trace';
+DBI->trace(1, 't/var/dbic.trace');
+
+$artist = $rs->first;
+$rs->reset();
+
+# make sure artist contains a related resultset for cds
+is( ref $artist->{related_resultsets}->{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' );
+
+# check if $artist->cds->get_cache is populated
+is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records');
+
+# ensure that $artist->cds returns correct number of objects
+is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' );
+
+# ensure that $artist->cds->count returns correct value
+is( $artist->cds->count, 3, 'artist->cds->count returns correct value' );
+
+# ensure that $artist->count_related('cds') returns correct value
+is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' );
+
+# count the SELECTs
+DBI->trace(0, undef);
+my $selects = 0;
+my $trace = IO::File->new('t/var/dbic.trace', '<')
+ or die "Unable to read trace file";
+while (<$trace>) {
+ $selects++ if /SELECT/;
+}
+$trace->close;
+unlink 't/var/dbic.trace';
+is($selects, 2, 'only one SQL statement for each cached table');
+
+# make sure related_resultset is deleted after object is updated
+$artist->set_column('name', 'New Name');
+$artist->update();
+
+is( scalar keys %{$artist->{related_resultsets}}, 0, 'related resultsets deleted after update' );
+
+# todo: make sure caching works with nested prefetch e.g. $artist->cds->tracks
+$rs = $schema->resultset("Artist")->search(
+ { artistid => 1 },
+ {
+ join => { cds => 'tags' },
+ prefetch => {
+ cds => 'tags'
+ },
+ cache => 1
+ }
+);
+
+# SELECT count for nested has_many prefetch
+unlink 't/var/dbic.trace' if -e 't/var/dbic.trace';
+DBI->trace(1, 't/var/dbic.trace');
+
+$artist = $rs->first;
+
+# count the SELECTs
+DBI->trace(0, undef);
+$selects = 0;
+$trace = IO::File->new('t/var/dbic.trace', '<')
+ or die "Unable to read trace file";
+while (<$trace>) {
+ $selects++ if /SELECT/;
+}
+$trace->close;
+unlink 't/var/dbic.trace';
+is($selects, 3, 'one SQL statement for each cached table with nested prefetch');
+
+my @objs;
+$artist = $rs->find(1);
+
+unlink 't/var/dbic.trace' if -e 't/var/dbic.trace';
+DBI->trace(1, 't/var/dbic.trace');
+
+my $cds = $artist->cds;
+my $tags = $cds->next->tags;
+while( my $tag = $tags->next ) {
+ push @objs, $tag->tagid; #warn "tag:", $tag->ID;
+}
+
+is_deeply( \@objs, [ 1 ], 'first cd has correct tags' );
+
+$tags = $cds->next->tags;
+@objs = ();
+while( my $tag = $tags->next ) {
+ push @objs, $tag->id; #warn "tag: ", $tag->ID;
+}
+
+is_deeply( \@objs, [ 2, 5, 8 ], 'second cd has correct tags' );
+
+# count the SELECTs
+DBI->trace(0, undef);
+$selects = 0;
+$trace = IO::File->new('t/var/dbic.trace', '<')
+ or die "Unable to read trace file";
+while (<$trace>) {
+ $selects++ if /SELECT/;
+}
+$trace->close;
+unlink 't/var/dbic.trace';
+
+is( $selects, 0, 'no additional SQL statements while checking nested data' );
+
+}
+
+1;
--- /dev/null
+use Storable;
+
+sub run_tests {
+my $schema = shift;
+
+plan tests => 1;
+
+my $artist = $schema->resultset('Artist')->find(1);
+my $copy = eval { Storable::dclone($artist) };
+is_deeply($copy, $artist, 'serialize row object works');
+
+}
+
+1;