Revision history for DBIx::Class
+0.06999_01 2006-05-28 17:19:30
+ - add automatic naming of unique constraints
+ - marked DB.pm as deprecated and noted it will be removed by 1.0
+ - add ResultSetColumn
+ - refactor ResultSet code to resolve attrs as late as poss
+ - merge prefetch attrs into join attrs
+ - add +select and +as attributes to ResultSet
+ - added InflateColumn::DateTime component
- refactor debugging to allow for profiling using Storage::Statistics
- removed Data::UUID from deps, made other optionals required
- modified SQLT parser to skip dupe table names
loaded
- CDBICompat: override find_or_create to fix column casing when
ColumnCase is loaded
+ - reorganized and simplified tests
+ - added Ordered
-0.06003
+0.06003 2006-05-19 15:37:30
- make find_or_create_related check defined() instead of truth
- don't unnecessarily fetch rels for cascade_update
- don't set_columns explicitly in update_or_create; instead use
# Save this 'cause CPAN will chdir all over the place.
my $cwd = Cwd::cwd();
- my $makefile = File::Spec->rel2abs($0);
- CPAN::Shell->install('Module::Build::Compat')
- or die " *** Cannot install without Module::Build. Exiting ...\n";
+ CPAN::Shell->install('Module::Build::Compat');
+ CPAN::Shell->expand("Module", "Module::Build::Compat")->uptodate
+ or die "Couldn't install Module::Build, giving up.\n";
chdir $cwd or die "Cannot chdir() back to $cwd: $!";
}
eval "use Module::Build::Compat 0.02; 1" or die $@;
- use lib '_build/lib';
+
Module::Build::Compat->run_build_pl(args => \@ARGV);
require Module::Build;
Module::Build::Compat->write_makefile(build_class => 'Module::Build');
SQLT modules so an app can do its own deploy without SQLT on the target
system
+2006-05-25 by mst (TODOed by bluefeet)
+ Add the search attributes "limit" and "rows_per_page".
+ limit: work as expected just like offset does
+ rows_per_page: only be used if you used the page attr or called $rs->page
+ rows: modify to be an alias that gets used to populate either as appropriate,
+ if you haven't specified one of the others
# i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
# brain damage and presumably various other packaging systems too
-$VERSION = '0.06002';
+$VERSION = '0.06999_01';
sub MODIFY_CODE_ATTRIBUTES {
my ($class,$code,@attrs) = @_;
blblack: Brandon Black
+bluefeet: Aran Deltac <bluefeet@cpan.org>
+
LTJake: Brian Cassidy <bricas@cpan.org>
claco: Christopher H. Laco
jguenther: Justin Guenther <guentherj@agr.gc.ca>
+captainL: Luke Saunders <luke.saunders@gmail.com>
+
draven: Marcus Ramberg <mramberg@cpan.org>
nigel: Nigel Metheringham <nigelm@cpan.org>
sszabo: Stephan Szabo <sszabo@bigpanda.com>
-captainL: Luke Saunders <luke.saunders@gmail.com>
-
Todd Lipcon
wdh: Will Hawes
=head1 NAME
-DBIx::Class::DB - Non-recommended classdata schema component
+DBIx::Class::DB - (DEPRECATED) classdata schema component
=head1 SYNOPSIS
This class is designed to support the Class::DBI connection-as-classdata style
for DBIx::Class. You are *strongly* recommended to use a DBIx::Class::Schema
-instead; DBIx::Class::DB will continue to be supported but new development
-will be focused on Schema-based DBIx::Class setups.
+instead; DBIx::Class::DB will not undergo new development and will be moved
+to being a CDBICompat-only component before 1.0.
=head1 METHODS
--- /dev/null
+package DBIx::Class::InflateColumn::DateTime;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class/;
+
+=head1 NAME
+
+DBIx::Class::InflateColumn::DateTime - Auto-create DateTime objects from datetime columns.
+
+=head1 SYNOPSIS
+
+Load this component and then declare one or more
+columns to be of the datetime datatype.
+
+ package Event;
+ __PACKAGE__->load_components(qw/InflateColumn::DateTime/);
+ __PACKAGE__->add_columns(
+ starts_when => { data_type => 'datetime' }
+ );
+
+Then you can treat the specified column as a L<DateTime> object.
+
+ print "This event starts the month of ".
+ $event->starts_when->month_name();
+
+=head1 DESCRIPTION
+
+This module figures out the type of DateTime::Format::* class to
+inflate/deflate with based on the type of DBIx::Class::Storage::DBI::*
+that you are using. If you switch from one database to a different
+one your code will continue to work without modification.
+
+=cut
+
+__PACKAGE__->load_components(qw/InflateColumn/);
+
+__PACKAGE__->mk_group_accessors('simple' => '__datetime_parser');
+
+sub register_column {
+ my ($self, $column, $info, @rest) = @_;
+ $self->next::method($column, $info, @rest);
+ if ($info->{data_type} =~ /^datetime$/i) {
+ $self->inflate_column(
+ $column =>
+ {
+ inflate => sub {
+ my ($value, $obj) = @_;
+ $obj->_datetime_parser->parse_datetime($value);
+ },
+ deflate => sub {
+ my ($value, $obj) = @_;
+ $obj->_datetime_parser->format_datetime($value);
+ },
+ }
+ );
+ }
+}
+
+sub _datetime_parser {
+ my $self = shift;
+ if (my $parser = $self->__datetime_parser) {
+ return $parser;
+ }
+ my $parser = $self->result_source->storage->datetime_parser(@_);
+ return $self->__datetime_parser($parser);
+}
+
+1;
+__END__
+
+=head1 SEE ALSO
+
+=over 4
+
+=item More information about the add_columns method, and column metadata,
+ can be found in the documentation for L<DBIx::Class::ResultSource>.
+
+=back
+
+=head1 AUTHOR
+
+Matt S. Trout <mst@shadowcatsystems.co.uk>
+
+=head1 CONTRIBUTORS
+
+Aran Deltac <bluefeet@cpan.org>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
so no additional SQL statements are executed. You now have a much more
efficient query.
-Note that as of L<DBIx::Class> 0.04, C<prefetch> cannot be used with
-C<has_many> relationships. You will get an error along the lines of "No
-accessor for prefetched ..." if you try.
+Note that as of L<DBIx::Class> 0.05999_01, C<prefetch> I<can> be used with
+C<has_many> relationships.
Also note that C<prefetch> should only be used when you know you will
definitely use data from a related table. Pre-fetching related tables when you
my $genus = $schema->resultset('Genus')->find(12);
+ my $coderef2 = sub {
+ $genus->extinct(1);
+ $genus->update;
+ };
+
my $coderef1 = sub {
- my ($schema, $genus, $code) = @_;
$genus->add_to_species({ name => 'troglodyte' });
$genus->wings(2);
$genus->update;
- $schema->txn_do($code, $genus); # Can have a nested transaction
+ $schema->txn_do($coderef2); # Can have a nested transaction
return $genus->species;
};
- my $coderef2 = sub {
- my ($genus) = @_;
- $genus->extinct(1);
- $genus->update;
- };
-
my $rs;
eval {
- $rs = $schema->txn_do($coderef1, $schema, $genus, $coderef2);
+ $rs = $schema->txn_do($coderef1);
};
if ($@) { # Transaction failed
'0+' => \&count,
'bool' => sub { 1; },
fallback => 1;
+use Carp::Clan qw/^DBIx::Class/;
use Data::Page;
use Storable;
+use Data::Dumper;
use Scalar::Util qw/weaken/;
use DBIx::Class::ResultSetColumn;
my ($source, $attrs) = @_;
weaken $source;
- $attrs = Storable::dclone($attrs || {}); # { %{ $attrs || {} } };
- #use Data::Dumper; warn Dumper($attrs);
- my $alias = ($attrs->{alias} ||= 'me');
-
- $attrs->{columns} ||= delete $attrs->{cols} if $attrs->{cols};
- delete $attrs->{as} if $attrs->{columns};
- $attrs->{columns} ||= [ $source->columns ] unless $attrs->{select};
- $attrs->{select} = [
- map { m/\./ ? $_ : "${alias}.$_" } @{delete $attrs->{columns}}
- ] if $attrs->{columns};
- $attrs->{as} ||= [
- map { m/^\Q$alias.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
- ];
- if (my $include = delete $attrs->{include_columns}) {
- push(@{$attrs->{select}}, @$include);
- push(@{$attrs->{as}}, map { m/([^.]+)$/; $1; } @$include);
- }
- #use Data::Dumper; warn Dumper(@{$attrs}{qw/select as/});
-
- $attrs->{from} ||= [ { $alias => $source->from } ];
- $attrs->{seen_join} ||= {};
- my %seen;
- if (my $join = delete $attrs->{join}) {
- foreach my $j (ref $join eq 'ARRAY' ? @$join : ($join)) {
- if (ref $j eq 'HASH') {
- $seen{$_} = 1 foreach keys %$j;
- } else {
- $seen{$j} = 1;
- }
- }
- push(@{$attrs->{from}}, $source->resolve_join(
- $join, $attrs->{alias}, $attrs->{seen_join})
- );
- }
-
- $attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct};
- $attrs->{order_by} = [ $attrs->{order_by} ] if
- $attrs->{order_by} and !ref($attrs->{order_by});
- $attrs->{order_by} ||= [];
-
- my $collapse = $attrs->{collapse} || {};
- if (my $prefetch = delete $attrs->{prefetch}) {
- my @pre_order;
- foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
- if ( ref $p eq 'HASH' ) {
- foreach my $key (keys %$p) {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$key};
- }
- } else {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$p};
- }
- my @prefetch = $source->resolve_prefetch(
- $p, $attrs->{alias}, {}, \@pre_order, $collapse);
- push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
- push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
- }
- push(@{$attrs->{order_by}}, @pre_order);
- }
- $attrs->{collapse} = $collapse;
-# use Data::Dumper; warn Dumper($collapse) if keys %{$collapse};
if ($attrs->{page}) {
$attrs->{rows} ||= 10;
$attrs->{offset} += ($attrs->{rows} * ($attrs->{page} - 1));
}
+ $attrs->{alias} ||= 'me';
+
bless {
result_source => $source,
result_class => $attrs->{result_class} || $source->result_class,
cond => $attrs->{where},
- from => $attrs->{from},
- collapse => $collapse,
+# from => $attrs->{from},
+# collapse => $collapse,
count => undef,
page => delete $attrs->{page},
pager => undef,
sub search_rs {
my $self = shift;
- my $attrs = { %{$self->{attrs}} };
- my $having = delete $attrs->{having};
- $attrs = { %$attrs, %{ pop(@_) } } if @_ > 1 and ref $_[$#_] eq 'HASH';
+ my $our_attrs = { %{$self->{attrs}} };
+ my $having = delete $our_attrs->{having};
+ my $attrs = {};
+ $attrs = pop(@_) if @_ > 1 and ref $_[$#_] eq 'HASH';
+
+ # merge new attrs into old
+ foreach my $key (qw/join prefetch/) {
+ next unless (exists $attrs->{$key});
+ if (exists $our_attrs->{$key}) {
+ $our_attrs->{$key} = $self->_merge_attr($our_attrs->{$key}, $attrs->{$key});
+ } else {
+ $our_attrs->{$key} = $attrs->{$key};
+ }
+ delete $attrs->{$key};
+ }
+
+ if (exists $our_attrs->{prefetch}) {
+ $our_attrs->{join} = $self->_merge_attr($our_attrs->{join}, $our_attrs->{prefetch}, 1);
+ }
+
+ my $new_attrs = { %{$our_attrs}, %{$attrs} };
+ # merge new where and having into old
my $where = (@_
? ((@_ == 1 || ref $_[0] eq "HASH")
? shift
: {@_}))
: undef());
if (defined $where) {
- $attrs->{where} = (defined $attrs->{where}
+ $new_attrs->{where} = (defined $new_attrs->{where}
? { '-and' =>
[ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $where, $attrs->{where} ] }
+ $where, $new_attrs->{where} ] }
: $where);
}
if (defined $having) {
- $attrs->{having} = (defined $attrs->{having}
+ $new_attrs->{having} = (defined $new_attrs->{having}
? { '-and' =>
[ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $having, $attrs->{having} ] }
+ $having, $new_attrs->{having} ] }
: $having);
}
- my $rs = (ref $self)->new($self->result_source, $attrs);
+ my $rs = (ref $self)->new($self->result_source, $new_attrs);
+ $rs->{_parent_rs} = $self->{_parent_rs} if ($self->{_parent_rs}); #XXX - hack to pass through parent of related resultsets
unless (@_) { # no search, effectively just a clone
my $rows = $self->get_cache;
You can also find a row by a specific unique constraint using the C<key>
attribute. For example:
- my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', { key => 'artist_title' });
+ my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', { key => 'cd_artist_title' });
Additionally, you can specify the columns explicitly by name:
artist => 'Massive Attack',
title => 'Mezzanine',
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
-If no C<key> is specified and you explicitly name columns, it searches on all
-unique constraints defined on the source, including the primary key.
-
If the C<key> is specified as C<primary>, it searches only on the primary key.
+If no C<key> is specified, it searches on all unique constraints defined on the
+source, including the primary key.
+
See also L</find_or_create> and L</update_or_create>. For information on how to
declare unique constraints, see
L<DBIx::Class::ResultSource/add_unique_constraint>.
my $self = shift;
my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {});
- # Parse out a hash from input
+ # Default to the primary key, but allow a specific key
my @cols = exists $attrs->{key}
? $self->result_source->unique_constraint_columns($attrs->{key})
: $self->result_source->primary_columns;
+ $self->throw_exception(
+ "Can't find unless a primary key or unique constraint is defined"
+ ) unless @cols;
- my $hash;
+ # Parse out a hashref from input
+ my $input_query;
if (ref $_[0] eq 'HASH') {
- $hash = { %{$_[0]} };
+ $input_query = { %{$_[0]} };
}
elsif (@_ == @cols) {
- $hash = {};
- @{$hash}{@cols} = @_;
+ $input_query = {};
+ @{$input_query}{@cols} = @_;
}
- elsif (@_) {
- # For backwards compatibility
- $hash = {@_};
+ else {
+ # Compatibility: Allow e.g. find(id => $value)
+ carp "Find by key => value deprecated; please use a hashref instead";
+ $input_query = {@_};
+ }
+
+ my @unique_queries = $self->_unique_queries($input_query, $attrs);
+# use Data::Dumper; warn Dumper $self->result_source->name, $input_query, \@unique_queries, $self->{attrs}->{where};
+
+ # Handle cases where the ResultSet defines the query, or where the user is
+ # abusing find
+ my $query = @unique_queries ? \@unique_queries : $input_query;
+
+ # Run the query
+ if (keys %$attrs) {
+ my $rs = $self->search($query, $attrs);
+ $rs->_resolve;
+ return keys %{$rs->{_attrs}->{collapse}} ? $rs->next : $rs->single;
}
else {
- $self->throw_exception(
- "Arguments to find must be a hashref or match the number of columns in the "
- . (exists $attrs->{key} ? "$attrs->{key} unique constraint" : "primary key")
- );
+ $self->_resolve;
+ return (keys %{$self->{_attrs}->{collapse}})
+ ? $self->search($query)->next
+ : $self->single($query);
}
+}
+
+# _unique_queries
+#
+# Build a list of queries which satisfy unique constraints.
+
+sub _unique_queries {
+ my ($self, $query, $attrs) = @_;
- # Check the hash we just parsed against our source's unique constraints
my @constraint_names = exists $attrs->{key}
? ($attrs->{key})
: $self->result_source->unique_constraint_names;
- $self->throw_exception(
- "Can't find unless a primary key or unique constraint is defined"
- ) unless @constraint_names;
my @unique_queries;
foreach my $name (@constraint_names) {
my @unique_cols = $self->result_source->unique_constraint_columns($name);
- my $unique_query = $self->_build_unique_query($hash, \@unique_cols);
+ my $unique_query = $self->_build_unique_query($query, \@unique_cols);
+
+ next unless scalar keys %$unique_query;
# Add the ResultSet's alias
foreach my $key (grep { ! m/\./ } keys %$unique_query) {
- $unique_query->{"$self->{attrs}{alias}.$key"} = delete $unique_query->{$key};
+ $unique_query->{"$self->{attrs}->{alias}.$key"} = delete $unique_query->{$key};
}
- push @unique_queries, $unique_query if %$unique_query;
+ push @unique_queries, $unique_query;
}
- # Handle cases where the ResultSet already defines the query
- my $query = @unique_queries ? \@unique_queries : undef;
-
- # Run the query
- if (keys %$attrs) {
- my $rs = $self->search($query, $attrs);
- return keys %{$rs->{collapse}} ? $rs->next : $rs->single;
- }
- else {
- return keys %{$self->{collapse}}
- ? $self->search($query)->next
- : $self->single($query);
- }
+ return @unique_queries;
}
# _build_unique_query
sub cursor {
my ($self) = @_;
- my $attrs = { %{$self->{attrs}} };
+
+ $self->_resolve;
+ my $attrs = { %{$self->{_attrs}} };
return $self->{cursor}
- ||= $self->result_source->storage->select($self->{from}, $attrs->{select},
+ ||= $self->result_source->storage->select($attrs->{from}, $attrs->{select},
$attrs->{where},$attrs);
}
sub single {
my ($self, $where) = @_;
- my $attrs = { %{$self->{attrs}} };
+ $self->_resolve;
+ my $attrs = { %{$self->{_attrs}} };
if ($where) {
if (defined $attrs->{where}) {
$attrs->{where} = {
$attrs->{where} = $where;
}
}
+
+ unless ($self->_is_unique_query($attrs->{where})) {
+ carp "Query not guarnteed to return a single row"
+ . "; please declare your unique constraints or use search instead";
+ }
+
my @data = $self->result_source->storage->select_single(
- $self->{from}, $attrs->{select},
+ $attrs->{from}, $attrs->{select},
$attrs->{where},$attrs);
return (@data ? $self->_construct_object(@data) : ());
}
+# _is_unique_query
+#
+# Try to determine if the specified query is guaranteed to be unique, based on
+# the declared unique constraints.
+
+sub _is_unique_query {
+ my ($self, $query) = @_;
+
+ my $collapsed = $self->_collapse_query($query);
+# use Data::Dumper; warn Dumper $query, $collapsed;
+
+ foreach my $name ($self->result_source->unique_constraint_names) {
+ my @unique_cols = map { "$self->{attrs}->{alias}.$_" }
+ $self->result_source->unique_constraint_columns($name);
+
+ # Count the values for each unique column
+ my %seen = map { $_ => 0 } @unique_cols;
+
+ foreach my $key (keys %$collapsed) {
+ my $aliased = $key;
+ $aliased = "$self->{attrs}->{alias}.$key" unless $key =~ /\./;
+
+ next unless exists $seen{$aliased}; # Additional constraints are okay
+ $seen{$aliased} = scalar @{ $collapsed->{$key} };
+ }
+
+ # If we get 0 or more than 1 value for a column, it's not necessarily unique
+ return 1 unless grep { $_ != 1 } values %seen;
+ }
+
+ return 0;
+}
+
+# _collapse_query
+#
+# Recursively collapse the query, accumulating values for each column.
+
+sub _collapse_query {
+ my ($self, $query, $collapsed) = @_;
+
+ $collapsed ||= {};
+
+ if (ref $query eq 'ARRAY') {
+ foreach my $subquery (@$query) {
+ next unless ref $subquery; # -or
+# warn "ARRAY: " . Dumper $subquery;
+ $collapsed = $self->_collapse_query($subquery, $collapsed);
+ }
+ }
+ elsif (ref $query eq 'HASH') {
+ if (keys %$query and (keys %$query)[0] eq '-and') {
+ foreach my $subquery (@{$query->{-and}}) {
+# warn "HASH: " . Dumper $subquery;
+ $collapsed = $self->_collapse_query($subquery, $collapsed);
+ }
+ }
+ else {
+# warn "LEAF: " . Dumper $query;
+ foreach my $key (keys %$query) {
+ push @{$collapsed->{$key}}, $query->{$key};
+ }
+ }
+ }
+
+ return $collapsed;
+}
+
=head2 get_column
=over 4
@{delete $self->{stashed_row}} :
$self->cursor->next
);
-# warn Dumper(\@row); use Data::Dumper;
return unless (@row);
return $self->_construct_object(@row);
}
+sub _resolve {
+ my $self = shift;
+
+ return if(exists $self->{_attrs}); #return if _resolve has already been called
+
+ my $attrs = $self->{attrs};
+ my $source = ($self->{_parent_rs}) ? $self->{_parent_rs} : $self->{result_source};
+
+ # XXX - lose storable dclone
+ my $record_filter = delete $attrs->{record_filter} if (defined $attrs->{record_filter});
+ $attrs = Storable::dclone($attrs || {}); # { %{ $attrs || {} } };
+ $attrs->{record_filter} = $record_filter if ($record_filter);
+ $self->{attrs}->{record_filter} = $record_filter if ($record_filter);
+
+ my $alias = $attrs->{alias};
+
+ $attrs->{columns} ||= delete $attrs->{cols} if $attrs->{cols};
+ delete $attrs->{as} if $attrs->{columns};
+ $attrs->{columns} ||= [ $self->{result_source}->columns ] unless $attrs->{select};
+ my $select_alias = ($self->{_parent_rs}) ? $self->{attrs}->{_live_join} : $alias;
+ $attrs->{select} = [
+ map { m/\./ ? $_ : "${select_alias}.$_" } @{delete $attrs->{columns}}
+ ] if $attrs->{columns};
+ $attrs->{as} ||= [
+ map { m/^\Q$alias.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
+ ];
+ if (my $include = delete $attrs->{include_columns}) {
+ push(@{$attrs->{select}}, @$include);
+ push(@{$attrs->{as}}, map { m/([^.]+)$/; $1; } @$include);
+ }
+
+ $attrs->{from} ||= [ { $alias => $source->from } ];
+ $attrs->{seen_join} ||= {};
+ my %seen;
+ if (my $join = delete $attrs->{join}) {
+ foreach my $j (ref $join eq 'ARRAY' ? @$join : ($join)) {
+ if (ref $j eq 'HASH') {
+ $seen{$_} = 1 foreach keys %$j;
+ } else {
+ $seen{$j} = 1;
+ }
+ }
+
+ push(@{$attrs->{from}}, $source->resolve_join($join, $attrs->{alias}, $attrs->{seen_join}));
+ }
+ $attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct};
+ $attrs->{order_by} = [ $attrs->{order_by} ] if
+ $attrs->{order_by} and !ref($attrs->{order_by});
+ $attrs->{order_by} ||= [];
+
+ if(my $seladds = delete($attrs->{'+select'})) {
+ my @seladds = (ref($seladds) eq 'ARRAY' ? @$seladds : ($seladds));
+ $attrs->{select} = [
+ @{ $attrs->{select} },
+ map { (m/\./ || ref($_)) ? $_ : "${alias}.$_" } $seladds
+ ];
+ }
+ if(my $asadds = delete($attrs->{'+as'})) {
+ my @asadds = (ref($asadds) eq 'ARRAY' ? @$asadds : ($asadds));
+ $attrs->{as} = [ @{ $attrs->{as} }, @asadds ];
+ }
+
+ my $collapse = $attrs->{collapse} || {};
+ if (my $prefetch = delete $attrs->{prefetch}) {
+ my @pre_order;
+ foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
+ if ( ref $p eq 'HASH' ) {
+ foreach my $key (keys %$p) {
+ push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
+ unless $seen{$key};
+ }
+ } else {
+ push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
+ unless $seen{$p};
+ }
+ my @prefetch = $source->resolve_prefetch(
+ $p, $attrs->{alias}, {}, \@pre_order, $collapse);
+ push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
+ push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
+ }
+ push(@{$attrs->{order_by}}, @pre_order);
+ }
+ $attrs->{collapse} = $collapse;
+ $self->{_attrs} = $attrs;
+}
+
+sub _merge_attr {
+ my ($self, $a, $b, $is_prefetch) = @_;
+
+ return $b unless $a;
+ if (ref $b eq 'HASH' && ref $a eq 'HASH') {
+ foreach my $key (keys %{$b}) {
+ if (exists $a->{$key}) {
+ $a->{$key} = $self->_merge_attr($a->{$key}, $b->{$key}, $is_prefetch);
+ } else {
+ $a->{$key} = delete $b->{$key};
+ }
+ }
+ return $a;
+ } else {
+ $a = [$a] unless (ref $a eq 'ARRAY');
+ $b = [$b] unless (ref $b eq 'ARRAY');
+
+ my $hash = {};
+ my $array = [];
+ foreach ($a, $b) {
+ foreach my $element (@{$_}) {
+ if (ref $element eq 'HASH') {
+ $hash = $self->_merge_attr($hash, $element, $is_prefetch);
+ } elsif (ref $element eq 'ARRAY') {
+ $array = [@{$array}, @{$element}];
+ } else {
+ if (($b == $_) && $is_prefetch) {
+ $self->_merge_array($array, $element, $is_prefetch);
+ } else {
+ push(@{$array}, $element);
+ }
+ }
+ }
+ }
+
+ if ((keys %{$hash}) && (scalar(@{$array} > 0))) {
+ return [$hash, @{$array}];
+ } else {
+ return (keys %{$hash}) ? $hash : $array;
+ }
+ }
+}
+
+sub _merge_array {
+ my ($self, $a, $b) = @_;
+
+ $b = [$b] unless (ref $b eq 'ARRAY');
+ # add elements from @{$b} to @{$a} which aren't already in @{$a}
+ foreach my $b_element (@{$b}) {
+ push(@{$a}, $b_element) unless grep {$b_element eq $_} @{$a};
+ }
+}
+
sub _construct_object {
my ($self, @row) = @_;
- my @as = @{ $self->{attrs}{as} };
-
+ my @as = @{ $self->{_attrs}{as} };
+
my $info = $self->_collapse_result(\@as, \@row);
-
my $new = $self->result_class->inflate_result($self->result_source, @$info);
-
- $new = $self->{attrs}{record_filter}->($new)
- if exists $self->{attrs}{record_filter};
+ $new = $self->{_attrs}{record_filter}->($new)
+ if exists $self->{_attrs}{record_filter};
return $new;
}
sub _collapse_result {
my ($self, $as, $row, $prefix) = @_;
+ my $live_join = $self->{attrs}->{_live_join} ||="";
my %const;
my @copy = @$row;
my $info = [ {}, {} ];
foreach my $key (keys %const) {
- if (length $key) {
+ if (length $key && $key ne $live_join) {
my $target = $info;
my @parts = split(/\./, $key);
foreach my $p (@parts) {
if (defined $prefix) {
@collapse = map {
m/^\Q${prefix}.\E(.+)$/ ? ($1) : ()
- } keys %{$self->{collapse}}
+ } keys %{$self->{_attrs}->{collapse}}
} else {
- @collapse = keys %{$self->{collapse}};
+ @collapse = keys %{$self->{_attrs}->{collapse}};
};
if (@collapse) {
$target = $target->[1]->{$p} ||= [];
}
my $c_prefix = (defined($prefix) ? "${prefix}.${c}" : $c);
- my @co_key = @{$self->{collapse}{$c_prefix}};
+ my @co_key = @{$self->{_attrs}->{collapse}{$c_prefix}};
my %co_check = map { ($_, $target->[0]->{$_}); } @co_key;
my $tree = $self->_collapse_result($as, $row, $c_prefix);
my (@final, @raw);
$row = $self->{stashed_row} = \@raw;
$tree = $self->_collapse_result($as, $row, $c_prefix);
}
- @$target = (@final ? @final : [ {}, {} ]);
+ @$target = (@final ? @final : [ {}, {} ]);
# single empty result to indicate an empty prefetched has_many
}
-
return $info;
}
sub _count { # Separated out so pager can get the full count
my $self = shift;
my $select = { count => '*' };
- my $attrs = { %{ $self->{attrs} } };
+
+ $self->_resolve;
+ my $attrs = { %{ $self->{_attrs} } };
if (my $group_by = delete $attrs->{group_by}) {
delete $attrs->{having};
my @distinct = (ref $group_by ? @$group_by : ($group_by));
}
$select = { count => { distinct => \@distinct } };
- #use Data::Dumper; die Dumper $select;
}
$attrs->{select} = $select;
# offset, order by and page are not needed to count. record_filter is cdbi
delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/;
-
my ($count) = (ref $self)->new($self->result_source, $attrs)->cursor->next;
return $count;
}
my @obj;
- if (keys %{$self->{collapse}}) {
+ # TODO: don't call resolve here
+ $self->_resolve;
+ if (keys %{$self->{_attrs}->{collapse}}) {
+# if ($self->{attrs}->{prefetch}) {
# Using $self->cursor->all is really just an optimisation.
# If we're collapsing has_many prefetches it probably makes
# very little difference, and this is cleaner than hacking
# _construct_object to survive the approach
- $self->cursor->reset;
my @row = $self->cursor->next;
while (@row) {
push(@obj, $self->_construct_object(@row));
sub reset {
my ($self) = @_;
+ delete $self->{_attrs} if (exists $self->{_attrs});
+
$self->{all_cache_position} = 0;
$self->cursor->reset;
return $self;
$cond->{-and} = [];
my @cond = @{$self->{cond}{-and}};
- for (my $i = 0; $i < @cond - 1; $i++) {
+ for (my $i = 0; $i <= @cond - 1; $i++) {
my $entry = $cond[$i];
my %hash;
}
else {
$entry =~ /([^.]+)$/;
- $hash{$entry} = $cond[++$i];
+ $hash{$1} = $cond[++$i];
}
push @{$cond->{-and}}, \%hash;
$class->find_or_create({ key => $val, ... });
-Searches for a record matching the search condition; if it doesn't find one,
-creates one and returns that instead.
+Tries to find a record based on its primary key or unique constraint; if none
+is found, creates one and returns that instead.
my $cd = $schema->resultset('CD')->find_or_create({
cdid => 5,
artist => 'Massive Attack',
title => 'Mezzanine',
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
See also L</find> and L</update_or_create>. For information on how to declare
title => 'Mezzanine',
year => 1998,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
If no C<key> is specified, it searches on all unique constraints defined on the
sub update_or_create {
my $self = shift;
my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {});
- my $hash = ref $_[0] eq 'HASH' ? shift : {@_};
+ my $cond = ref $_[0] eq 'HASH' ? shift : {@_};
- my $row = $self->find($hash, $attrs);
+ my $row = $self->find($cond);
if (defined $row) {
- $row->update($hash);
+ $row->update($cond);
return $row;
}
- return $self->create($hash);
+ return $self->create($cond);
}
=head2 get_cache
sub set_cache {
my ( $self, $data ) = @_;
$self->throw_exception("set_cache requires an arrayref")
- if defined($data) && (ref $data ne 'ARRAY');
+ if defined($data) && (ref $data ne 'ARRAY');
$self->{all_cache} = $data;
}
sub related_resultset {
my ( $self, $rel ) = @_;
+
$self->{related_resultsets} ||= {};
return $self->{related_resultsets}{$rel} ||= do {
- #warn "fetching related resultset for rel '$rel'";
+ #warn "fetching related resultset for rel '$rel' " . $self->result_source->{name};
my $rel_obj = $self->result_source->relationship_info($rel);
$self->throw_exception(
"search_related: result source '" . $self->result_source->name .
"' has no such relationship ${rel}")
unless $rel_obj; #die Dumper $self->{attrs};
- my $rs = $self->search(undef, { join => $rel });
- my $alias = defined $rs->{attrs}{seen_join}{$rel}
- && $rs->{attrs}{seen_join}{$rel} > 1
- ? join('_', $rel, $rs->{attrs}{seen_join}{$rel})
- : $rel;
-
- $self->result_source->schema->resultset($rel_obj->{class}
+ my $rs = $self->result_source->schema->resultset($rel_obj->{class}
)->search( undef,
- { %{$rs->{attrs}},
- alias => $alias,
+ { %{$self->{attrs}},
select => undef,
- as => undef }
+ as => undef,
+ join => $rel,
+ _live_join => $rel }
);
+
+ # keep reference of the original resultset
+ $rs->{_parent_rs} = $self->result_source;
+ return $rs;
};
}
through directly to SQL, so you can give e.g. C<year DESC> for a
descending order on the column `year'.
+Please note that if you have quoting enabled (see
+L<DBIx::Class::Storage/quote_char>) you will need to do C<\'year DESC' > to
+specify an order. (The scalar ref causes it to be passed as raw sql to the DB,
+so you will need to manually quote things as appropriate.)
+
=head2 columns
=over 4
attribute, the column names returned are storage-dependent. E.g. MySQL would
return a column named C<count(employeeid)> in the above example.
+=head2 +select
+
+=over 4
+
+Indicates additional columns to be selected from storage. Works the same as
+L<select> but adds columns to the selection.
+
+=back
+
+=head2 +as
+
+=over 4
+
+Indicates additional column names for those added via L<+select>.
+
+=back
+
=head2 as
=over 4
Makes the resultset paged and specifies the page to retrieve. Effectively
identical to creating a non-pages resultset and then calling ->page($page)
-on it.
+on it.
+
+If L<rows> attribute is not specified it defualts to 10 rows per page.
=head2 rows
Specifes the maximum number of rows for direct retrieval or the number of
rows per page if the page attribute or method is used.
+=head2 offset
+
+=over 4
+
+=item Value: $offset
+
+=back
+
+Specifies the (zero-based) row number for the first row to be returned, or the
+of the first row of the first page if paging is used.
+
=head2 group_by
=over 4
my $cond = shift;
my $attrs = shift || {};
$attrs->{order_by} = 'year DESC';
- $self->next::method($cond, $attrs);
+ $self->search($cond, $attrs);
}
$rs = $schema->resultset('CD')->search_by_year_desc({ artist => 'Tool' });
constraint_name => [ qw/column1 column2/ ],
);
+Alternatively, you can specify only the columns:
+
+ __PACKAGE__->add_unique_constraint([ qw/column1 column2/ ]);
+
+This will result in a unique constraint named C<table_column1_column2>, where
+C<table> is replaced with the table name.
+
Unique constraints are used, for example, when you call
L<DBIx::Class::ResultSet/find>. Only columns in the constraint are searched.
=cut
sub add_unique_constraint {
- my ($self, $name, $cols) = @_;
+ my $self = shift;
+ my $cols = pop @_;
+ my $name = shift;
+
+ $name ||= $self->name_unique_constraint($cols);
foreach my $col (@$cols) {
$self->throw_exception("No such column $col on table " . $self->name)
$self->_unique_constraints(\%unique_constraints);
}
+=head2 name_unique_constraint
+
+Return a name for a unique constraint containing the specified columns. These
+names consist of the table name and each column name, separated by underscores.
+
+For example, a constraint on a table named C<cd> containing the columns
+C<artist> and C<title> would result in a constraint name of C<cd_artist_title>.
+
+=cut
+
+sub name_unique_constraint {
+ my ($self, $cols) = @_;
+
+ return join '_', $self->name, @$cols;
+}
+
=head2 unique_constraints
Read-only accessor which returns the list of unique constraints on this source.
For example,
my $author_rs = $schema->resultset('Author')->find(1);
+ my @titles = qw/Night Day It/;
my $coderef = sub {
- my ($author, @titles) = @_;
-
# If any one of these fails, the entire transaction fails
- $author->create_related('books', {
+ $author_rs->create_related('books', {
title => $_
}) foreach (@titles);
my $rs;
eval {
- $rs = $schema->txn_do($coderef, $author_rs, qw/Night Day It/);
+ $rs = $schema->txn_do($coderef);
};
- if ($@) {
- my $error = $@;
- if ($error =~ /Rollback failed/) {
- die "something terrible has happened!";
- } else {
- deal_with_failed_transaction();
- }
+ if ($@) { # Transaction failed
+ die "something terrible has happened!" #
+ if ($@ =~ /Rollback failed/); # Rollback failed
+
+ deal_with_failed_transaction();
}
In a nested transaction (calling txn_do() from within a txn_do() coderef) only
Executes the sql statements given as a listref on every db connect.
+=head2 quote_char
+
+Specifies what characters to use to quote table and column names. If
+you use this you will want to specify L<name_sep> as well.
+
+quote_char expectes either a single character, in which case is it is placed
+on either side of the table/column, or an array of length 2 in which case the
+table/column name is placed between the elements.
+
+For example under MySQL you'd use C<quote_char('`')>, and user SQL Server you'd
+use C<quote_char(qw/[ ]/)>.
+
+=head2 name_sep
+
+This only needs to be used in conjunction with L<quote_char>, and is used to
+specify the charecter that seperates elements (schemas, tables, columns) from
+each other. In most cases this is simply a C<.>.
+
=head2 debug
Causes SQL trace information to be emitted on the C<debugobj> object.
sub txn_commit {
my $self = shift;
+ my $dbh = $self->dbh;
if ($self->{transaction_depth} == 0) {
- my $dbh = $self->dbh;
unless ($dbh->{AutoCommit}) {
$self->debugobj->txn_commit()
if ($self->debug);
if (--$self->{transaction_depth} == 0) {
$self->debugobj->txn_commit()
if ($self->debug);
- $self->dbh->commit;
+ $dbh->commit;
}
}
}
my $self = shift;
eval {
+ my $dbh = $self->dbh;
if ($self->{transaction_depth} == 0) {
- my $dbh = $self->dbh;
unless ($dbh->{AutoCommit}) {
$self->debugobj->txn_rollback()
if ($self->debug);
if (--$self->{transaction_depth} == 0) {
$self->debugobj->txn_rollback()
if ($self->debug);
- $self->dbh->rollback;
+ $dbh->rollback;
}
else {
die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
my $sth = eval { $self->sth($sql,$op) };
if (!$sth || $@) {
- $self->throw_exception('no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql");
+ $self->throw_exception(
+ 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql"
+ );
}
-
@bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args
my $rv;
if ($sth) {
}
}
+sub datetime_parser {
+ my $self = shift;
+ return $self->{datetime_parser} ||= $self->build_datetime_parser(@_);
+}
+
+sub datetime_parser_type { "DateTime::Format::MySQL"; }
+
+sub build_datetime_parser {
+ my $self = shift;
+ my $type = $self->datetime_parser_type(@_);
+ eval "use ${type}";
+ $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ return $type;
+}
+
sub DESTROY { shift->disconnect }
1;
}
+sub datetime_parser_type { "DateTime::Format::DB2"; }
+
1;
=head1 NAME
my( $id ) = $_[0]->_dbh->selectrow_array('SELECT @@IDENTITY' );
return $id;
}
+
+sub build_datetime_parser {
+ my $self = shift;
+ my $type = "DateTime::Format::Strptime";
+ eval "use ${type}";
+ $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ return $type->new( pattern => '%m/%d/%Y %H:%M:%S' );
+}
\r
1;
\r
--- /dev/null
+package DBIx::Class::Storage::DBI::ODBC400;
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+sub last_insert_id
+{
+ my ($self) = @_;
+
+ my $dbh = $self->_dbh;
+
+ # get the schema/table separator:
+ # '.' when SQL naming is active
+ # '/' when system naming is active
+ my $sep = $dbh->get_info(41);
+ my $sth = $dbh->prepare_cached(
+ "SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM${sep}SYSDUMMY1", {}, 3);
+ $sth->execute();
+
+ my @res = $sth->fetchrow_array();
+
+ return @res ? $res[0] : undef;
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::ODBC400 - Automatic primary key class for DB2/400
+over ODBC
+
+=head1 SYNOPSIS
+
+ # In your table classes
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->set_primary_key('id');
+
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for DB2/400 over ODBC.
+
+
+=head1 AUTHORS
+
+Marc Mims C<< <marc@questright.com> >>
+
+Based on DBIx::Class::Storage::DBI::DB2 by Jess Robinson.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
# __PACKAGE__->load_components(qw/PK::Auto/);
+# Warn about problematic versions of DBD::Pg
+warn "DBD::Pg 1.49 is strongly recommended"
+ if ($DBD::Pg::VERSION < 1.49);
+
sub last_insert_id {
my ($self,$source,$col) = @_;
my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
return 'PostgreSQL';
}
+sub datetime_parser_type { return "DateTime::Format::Pg"; }
+
1;
=head1 NAME
use strict;
+use warnings;
+
use Test::More;
use IO::File;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 7 );
+ : ( tests => 6 );
}
use lib qw(t/lib);
use_ok('DBICTest');
-
-use_ok('DBICTest::HelperRels');
+DBICTest->init_schema();
DBICTest->schema->storage->sql_maker->quote_char("'");
DBICTest->schema->storage->sql_maker->name_sep('.');
BEGIN {
eval "use DBD::SQLite";
plan $ENV{DATA_DUMPER_TEST}
- ? ( tests => 3 )
+ ? ( tests => 2 )
: ( skip_all => 'Set $ENV{DATA_DUMPER_TEST} to run this test' );
}
use_ok('DBICTest');
-use_ok('DBICTest::HelperRels');
-
-my $rs = DBICTest::CD->search(
- { 'artist.name' => 'We Are Goth',
- 'liner_notes.notes' => 'Kill Yourself!' },
- { join => [ qw/artist liner_notes/ ] });
+my $schema = DBICTest->init_schema();
+my $rs = $schema->resultset('CD')->search({
+ 'artist.name' => 'We Are Goth',
+ 'liner_notes.notes' => 'Kill Yourself!',
+}, {
+ join => [ qw/artist liner_notes/ ],
+});
Dumper($rs);
-$rs = DBICTest::CD->search(
- { 'artist.name' => 'We Are Goth',
- 'liner_notes.notes' => 'Kill Yourself!' },
- { join => [ qw/artist liner_notes/ ] });
+$rs = $schema->resultset('CD')->search({
+ 'artist.name' => 'We Are Goth',
+ 'liner_notes.notes' => 'Kill Yourself!',
+}, {
+ join => [ qw/artist liner_notes/ ],
+});
-cmp_ok( $rs + 0, '==', 1, "Single record in after death with dumper");
+cmp_ok( $rs->count(), '==', 1, "Single record in after death with dumper");
1;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 13 );
+ : ( tests => 12 );
}
use lib qw(t/lib);
use_ok('DBICTest');
-use_ok('DBICTest::HelperRels');
+DBICTest->init_schema();
my $cbworks = 0;
--- /dev/null
+use Test::More;
+use strict;
+use warnings;
+use lib qw(t/lib);
+use DBICTest;
+
+plan tests => 7;
+
+my $schema = DBICTest->init_schema();
+my $total_cds = $schema->resultset('CD')->count;
+cmp_ok($total_cds, '>', 0, 'need cd records');
+
+# test that delete_related w/o conditions deletes all related records only
+my $artist = $schema->resultset("Artist")->find(3);
+my $artist_cds = $artist->cds->count;
+cmp_ok($artist_cds, '<', $total_cds, 'need more cds than just related cds');
+
+ok($artist->delete_related('cds'));
+cmp_ok($schema->resultset('CD')->count, '==', ($total_cds - $artist_cds), 'too many cds were deleted');
+
+$total_cds -= $artist_cds;
+
+# test that delete_related w/conditions deletes just the matched related records only
+my $artist2 = $schema->resultset("Artist")->find(2);
+my $artist2_cds = $artist2->search_related('cds')->count;
+cmp_ok($artist2_cds, '<', $total_cds, 'need more cds than related cds');
+
+ok($artist2->delete_related('cds', {title => {like => '%'}}));
+cmp_ok($schema->resultset('CD')->count, '==', ($total_cds - $artist2_cds), 'too many cds were deleted');
+
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
-plan tests => 58;
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 60;
# figure out if we've got a version of sqlite that is older than 3.2.6, in
# which case COUNT(DISTINCT()) doesn't work
ok($art->update, 'Update run');
+my $record_jp = $schema->resultset("Artist")->search(undef, { join => 'cds' })->search(undef, { prefetch => 'cds' })->next;
+
+ok($record_jp, "prefetch on same rel okay");
+
+my $record_fn = $schema->resultset("Artist")->search(undef, { join => 'cds' })->search({'cds.cdid' => '1'}, {join => 'artist_undirected_maps'})->next;
+
+ok($record_fn, "funny join is okay");
+
@art = $schema->resultset("Artist")->search({ name => 'We Are In Rehab' });
cmp_ok(@art, '==', 1, "Changed artist returned by search");
$new->update;
-$new_again = $schema->resultset("Artist")->find(4);
+my $new_again = $schema->resultset("Artist")->find(4);
is($new_again->name, 'Man With A Spoon', 'Retrieved correctly');
is_deeply([$schema->source('CD')->columns], [qw/cdid artist title/]);
}
-}
-
-1;
-sub run_tests {
-my $schema = shift;
-
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
plan tests => 3;
# add some rows inside a transaction and commit it
};
is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
-plan tests => 4;
-$artist = DBICTest::Artist->find(1);
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 5;
+
+my $artist = DBICTest::Artist->find(1);
ok($artist->find_related('twokeys', {cd => 1}), "find multiple pks using relationships + args");
+
+ok($schema->resultset("FourKeys")->search({ foo => 1, bar => 2 })->find({ hello => 3, goodbye => 4 }), "search on partial key followed by a find");
ok($schema->resultset("FourKeys")->find(1,2,3,4), "find multiple pks without hash");
ok($schema->resultset("FourKeys")->find(5,4,3,6), "find multiple pks without hash");
is($schema->resultset("FourKeys")->find(1,2,3,4)->ID, 'DBICTest::FourKeys|fourkeys|bar=2|foo=1|goodbye=4|hello=3', 'unique object id ok for multiple pks');
-}
-
-1;
-sub run_tests {
-my $schema = shift;
-
use strict;
use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
plan tests => 32;
# has_a test
} );
$track->set_from_related( cd => $cd );
-if ($INC{'DBICTest/HelperRels.pm'}) { # expect inflated object
- is($track->disc->cdid, 4, 'set_from_related ok, including alternative accessor' );
-} else {
- is( $track->cd, 4, 'set_from_related ok' );
-}
+is($track->disc->cdid, 4, 'set_from_related ok, including alternative accessor' );
$track->set_from_related( cd => undef );
my $t_cd = ($schema->resultset("Track")->search( cd => 4, position => 99 ))[0]->cd;
-if ($INC{'DBICTest/HelperRels.pm'}) { # except inflated object
- is( $t_cd->cdid, 4, 'update_from_related ok' );
-} else {
- is( $t_cd, 4, 'update_from_related ok' );
-}
+is( $t_cd->cdid, 4, 'update_from_related ok' );
# find_or_create_related with an existing record
$cd = $artist->find_or_create_related( 'cds', { title => 'Big Flop' } );
cmp_ok($artist->cds->count, '==', 0, "Correct new #cds for artist");
cmp_ok($nartist->cds->count, '==', 2, "Correct new #cds for artist");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
plan tests => 12;
is( $it->next->title, "Generic Manufactured Singles", "software iterator->next ok" );
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
eval { require DateTime };
plan skip_all => "Need DateTime for inflation tests" if $@;
($cd) = $schema->resultset("CD")->search( year => $now->year );
is( $cd->year->year, $now->year, 'deflate ok' );
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
eval { require DateTime };
plan skip_all => "Need DateTime for inflation tests" if $@;
($cd) = $schema->resultset("CD")->search( year => $now->year );
is( $cd->year->year, $now->year, 'deflate ok' );
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
use Data::Dumper;
ok($inflated = $entry->serialized, 'arrayref inflation ok');
is_deeply($inflated, $complex2->{serialized}, 'inflated array matches original');
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
BEGIN {
eval "use DBD::SQLite";
$art->discard_changes;
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
plan tests => 2;
my $copied = $artist->copy({ name => 'Don\'t tell the RIAA', artistid => undef });
is($copied->name, 'Don\'t tell the RIAA', "Copied with PKs ok.");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
#warn "$dsn $user $pass";
-plan skip_all, 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
+plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
plan tests => 5;
# clean up our mess
$dbh->do("DROP TABLE artist");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+{
+ package DBICTest::Schema::Casecheck;
+
+ use strict;
+ use warnings;
+ use base 'DBIx::Class';
+
+ __PACKAGE__->load_components(qw/PK::Auto Core/);
+ __PACKAGE__->table('casecheck');
+ __PACKAGE__->add_columns(qw/id name NAME uc_name/);
+ __PACKAGE__->set_primary_key('id');
+
+}
+
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
#warn "$dsn $user $pass";
-plan skip_all, 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
- . ' (note: creates and drops tables named artist and casecheck!)' unless ($dsn && $user);
+plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
+ . ' (note: creates and drops tables named artist and casecheck!)' unless ($dsn && $user);
plan tests => 8;
+DBICTest::Schema->load_classes( 'Casecheck' );
DBICTest::Schema->compose_connection('PgTest' => $dsn, $user, $pass);
my $dbh = PgTest->schema->storage->dbh;
PgTest->schema->source("Artist")->name("testschema.artist");
$dbh->do("CREATE SCHEMA testschema;");
-
$dbh->do("CREATE TABLE testschema.artist (artistid serial PRIMARY KEY, name VARCHAR(100), charfield CHAR(10));");
ok ( $dbh->do('CREATE TABLE testschema.casecheck (id serial PRIMARY KEY, "name" VARCHAR(1), "NAME" VARCHAR(2), "UC_NAME" VARCHAR(3));'), 'Creation of casecheck table');
$dbh->do("DROP TABLE testschema.casecheck;");
$dbh->do("DROP SCHEMA testschema;");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
-plan skip_all, 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
+plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
'Warning: This test drops and creates tables called \'artist\', \'cd\' and \'track\''
unless ($dsn && $user && $pass);
my $tcount = OraTest::Track->search(
{},
{
- select => [{count => {distinct => ['position', 'title']}}],
- as => ['count']
+ select => [{count => {distinct => ['position', 'title']}}],
+ as => ['count']
}
);
$dbh->do("DROP TABLE cd");
$dbh->do("DROP TABLE track");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_${_}" } qw/DSN USER PASS/};
#warn "$dsn $user $pass";
-plan skip_all, 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test'
+plan skip_all => 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
plan tests => 6;
# clean up our mess
$dbh->do("DROP TABLE artist");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_400_${_}" } qw/DSN USER PASS/};
# Probably best to pass the DBQ option in the DSN to specify a specific
# libray. Something like:
# DBICTEST_DB2_400_DSN='dbi:ODBC:dsn=MyAS400;DBQ=MYLIB'
-plan skip_all, 'Set $ENV{DBICTEST_DB2_400_DSN}, _USER and _PASS to run this test'
+plan skip_all => 'Set $ENV{DBICTEST_DB2_400_DSN}, _USER and _PASS to run this test'
unless ($dsn && $user);
plan tests => 6;
# clean up our mess
$dbh->do("DROP TABLE artist");
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
#warn "$dsn $user $pass";
-plan skip_all, 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
+plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn);
plan tests => 4;
-$schema->compose_connection( 'MSSQLTest' => $dsn, $user, $pass );
+DBICTest::Schema->compose_connection( 'MSSQLTest' => $dsn, $user, $pass );
my $dbh = MSSQLTest->schema->storage->dbh;
$it->next;
is( $it->next, undef, "next past end of resultset ok" );
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
BEGIN {
eval "use DBD::SQLite";
);
is( $it->count, 1, "complex abstract count ok" );
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
use IO::File;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 44 );
+ : ( tests => 42 );
}
# figure out if we've got a version of sqlite that is older than 3.2.6, in
);
cmp_ok( scalar $rs->all, '==', scalar $rs->slice(0, $rs->count - 1), 'slice() with join has same count as all()' );
-eval { $rs->search(undef, { rows => 0, offset => 3 })->all; };
-
-ok($@, "rows => 0 errors: $@");
-
$rs = $schema->resultset("Artist")->search(
{ 'liner_notes.notes' => 'Kill Yourself!' },
{ join => { 'cds' => 'liner_notes' } });
cmp_ok($queries, '==', 1, 'Only one query run');
-# has_many resulting in an additional select if no records available despite prefetch
-my $track = $schema->resultset("Artist")->create( {
- artistid => 4,
- name => 'Artist without CDs',
-} );
-
-$queries = 0;
-$schema->storage->debug(1);
-
-my $artist_without_cds = $schema->resultset("Artist")->find(4, {
- join => [qw/ cds /],
- prefetch => [qw/ cds /],
-});
-my @no_cds = $artist_without_cds->cds;
-
-is($queries, 1, 'prefetch ran only 1 sql statement');
-
-$schema->storage->debug(0);
-
-} # end run_tests
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
eval "use DBD::SQLite";
plan skip_all => 'needs DBD::SQLite for testing' if $@;
{ join => [ qw/tags liner_notes/ ] } ),
'==', 2, "Mixed count ok");
-}
-
-1;
-sub run_tests {
-my $schema = shift;\r
-\r
-# this test will check to see if you can have 2 columns\r
-# in the same class pointing at the same other class\r
-#\r
-# example:\r
-#\r
-# +---------+ +--------------+\r
-# | SelfRef | | SelfRefAlias |\r
-# +---------+ 1-M +--------------+\r
-# | id |-------| self_ref | --+\r
-# | name | | alias | --+\r
-# +---------+ +--------------+ |\r
-# /|\ |\r
-# | |\r
-# +--------------------------------+\r
-#\r
-# see http://use.perl.org/~LTjake/journal/24876 for the\r
-# issue with CDBI\r
-\r
-plan tests => 4;\r
-\r
-my $item = $schema->resultset("SelfRef")->find( 1 );\r
-is( $item->name, 'First', 'proper start item' );\r
-\r
-my @aliases = $item->aliases;\r
-\r
-is( scalar @aliases, 1, 'proper number of aliases' );\r
-\r
-my $orig = $aliases[ 0 ]->self_ref;\r
-my $alias = $aliases[ 0 ]->alias;\r
-\r
-is( $orig->name, 'First', 'proper original' );\r
-is( $alias->name, 'Second', 'proper alias' );\r
-\r
-}\r
-\r
-1;\r
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+# this test will check to see if you can have 2 columns
+# in the same class pointing at the same other class
+#
+# example:
+#
+# +---------+ +--------------+
+# | SelfRef | | SelfRefAlias |
+# +---------+ 1-M +--------------+
+# | id |-------| self_ref | --+
+# | name | | alias | --+
+# +---------+ +--------------+ |
+# /|\ |
+# | |
+# +--------------------------------+
+#
+# see http://use.perl.org/~LTjake/journal/24876 for the
+# issue with CDBI
+
+plan tests => 4;
+
+my $item = $schema->resultset("SelfRef")->find( 1 );
+is( $item->name, 'First', 'proper start item' );
+
+my @aliases = $item->aliases;
+
+is( scalar @aliases, 1, 'proper number of aliases' );
+
+my $orig = $aliases[ 0 ]->self_ref;
+my $alias = $aliases[ 0 ]->alias;
+
+is( $orig->name, 'First', 'proper original' );
+is( $alias->name, 'Second', 'proper alias' );
+
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
eval 'use Data::UUID ; 1'
- or plan skip_all, 'Install Data::UUID run this test';
+ or plan skip_all => 'Install Data::UUID run this test';
plan tests => 1;
DBICTest::Schema::Artist->load_components('UUIDColumns');
my $artist = $schema->resultset("Artist")->create( { artistid => 100 } );
like $artist->name, qr/[\w-]{36}/, 'got something like uuid';
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
-plan tests => 34;
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 36;
+
+is_deeply([ sort $schema->source('CD')->unique_constraint_names ], [ qw/cd_artist_title primary/ ], 'CD source has an automatically named unique constraint');
+is_deeply([ sort $schema->source('Producer')->unique_constraint_names ], [ qw/primary prod_name/ ], 'Producer source has a named unique constraint');
my $artistid = 1;
my $title = 'UNIQUE Constraint';
artist => $artistid,
title => $title,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
is($cd2->get_column('artist'), $cd1->get_column('artist'), 'find by specific key: artist is correct');
is($cd2->title, $cd1->title, 'title is correct');
is($cd2->year, $cd1->year, 'year is correct');
-my $cd3 = $schema->resultset('CD')->find($artistid, $title, { key => 'artist_title' });
+my $cd3 = $schema->resultset('CD')->find($artistid, $title, { key => 'cd_artist_title' });
is($cd3->get_column('artist'), $cd1->get_column('artist'), 'find by specific key, ordered columns: artist is correct');
is($cd3->title, $cd1->title, 'title is correct');
title => $title,
year => 2007,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
ok(! $cd5->is_changed, 'update_or_create by specific key: row is clean');
title => $title,
year => 2010,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
is($cd7->cdid, $cd1->cdid, 'find_or_create by specific key: cdid is correct');
my $artist = $schema->resultset('Artist')->find($artistid);
my $cd8 = $artist->find_or_create_related('cds',
{
- artist => $artistid,
title => $title,
year => 2020,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
is($cd8->cdid, $cd1->cdid, 'find_or_create related by specific key: cdid is correct');
my $cd9 = $artist->update_or_create_related('cds',
{
- artist => $artistid,
title => $title,
year => 2021,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
ok(! $cd9->is_changed, 'update_or_create by specific key: row is clean');
is($cd9->title, $cd1->title, 'title is correct');
is($cd9->year, 2021, 'year is correct');
-}
-
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
plan tests => 39;
my $code = sub {
# Force txn_rollback() to throw an exception
no warnings 'redefine';
+ no strict 'refs';
local *{"DBIx::Class::Schema::txn_rollback"} = sub{die 'FAILED'};
eval {
})->first;
ok(!defined($cd), q{failed txn_do didn't add failed txn's cd});
}
-}
-1;
use strict;
-use warnings;
+use warnings;
-sub run_tests {
-my $schema = shift;
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
plan tests => 4;
my $artist = $schema->resultset('Artist')->find(1);
my $cover_band;
{
- no warnings 'redefine';
+ no warnings qw(redefine once);
local *DBICTest::Artist::result_source_instance = \&DBICTest::Schema::Artist::result_source_instance;
$cover_band = $artist->copy;
cmp_ok($cover_cds->search_related('tags')->count, '==',
$artist_cds->search_related('tags')->count , 'duplicated count ok');
-}
-1;
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
my $queries;
$schema->storage->debugcb( sub{ $queries++ } );
is( scalar @{$rs->get_cache}, 2, 'set_cache() is functional' );
-$cd = $schema->resultset('CD')->find(1);
+my $cd = $schema->resultset('CD')->find(1);
$rs->clear_cache;
$schema->storage->debug(0);
-}
-
-1;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
use Storable;
-sub run_tests {
-my $schema = shift;
+my $schema = DBICTest->init_schema();
plan tests => 1;
my $copy = eval { Storable::dclone($artist) };
is_deeply($copy, $artist, 'serialize row object works');
-}
-
-1;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+eval 'use Encode ; 1'
+ or plan skip_all => 'Install Encode run this test';
+
+plan tests => 2;
+
+DBICTest::Schema::Artist->load_components('UTF8Columns');
+DBICTest::Schema::Artist->utf8_columns('name');
+Class::C3->reinitialize();
+
+my $artist = $schema->resultset("Artist")->create( { name => 'uni' } );
+ok( Encode::is_utf8( $artist->name ), 'got name with utf8 flag' );
+
+my $utf8_char = 'uniuni';
+Encode::_utf8_on($utf8_char);
+$artist->name($utf8_char);
+ok( !Encode::is_utf8( $artist->{_column_data}->{name} ),
+ 'store utf8 less chars' );
+
-sub run_tests {
-my $schema = shift;
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
my $queries;
#$schema->storage->debugfh(IO::File->new('t/var/temp.trace', 'w'));
liner_notes on update');
$schema->storage->debug(0);
-}
-1;
+use strict;
+use warnings;
+
use Test::More;
use lib qw(t/lib);
use DBICTest;
-use DBICTest::HelperRels;
eval "use SQL::Translator";
plan skip_all => 'SQL::Translator required' if $@;
# do not taunt happy dave ball
-my $schema = DBICTest::Schema;
+my $schema = 'DBICTest::Schema';
plan tests => 33;
'selftable' => 'treelike', 'foreigntable' => 'treelike',
'selfcols' => ['parent'], 'foreigncols' => ['id'],
'needed' => 1, on_delete => '', on_update => ''},
- {'display' => 'twokeytreelike -> twokeytreelike for parent1,parent2',
- 'selftable' => 'twokeytreelike', 'foreigntable' => 'twokeytreelike',
- 'selfcols' => ['parent1', 'parent2'], 'foreigncols' => ['id1','id2'],
- 'needed' => 1, on_delete => '', on_update => ''},
+
+ # shouldn't this be generated?
+ #
+ #{'display' => 'twokeytreelike -> twokeytreelike for parent1,parent2',
+ # 'selftable' => 'twokeytreelike', 'foreigntable' => 'twokeytreelike',
+ # 'selfcols' => ['parent1', 'parent2'], 'foreigncols' => ['id1','id2'],
+ # 'needed' => 1, on_delete => '', on_update => ''},
+
{'display' => 'tags -> cd',
'selftable' => 'tags', 'foreigntable' => 'cd',
'selfcols' => ['cd'], 'foreigncols' => ['cdid'],
{'display' => 'cd artist and title unique',
'table' => 'cd', 'cols' => ['artist', 'title'],
'needed' => 1},
+ {'display' => 'producer name unique',
+ 'table' => 'producer', 'cols' => ['name'],
+ 'needed' => 1},
{'display' => 'twokeytreelike name unique',
'table' => 'twokeytreelike', 'cols' => ['name'],
'needed' => 1},
return 0;
}
+my( $ondel, $onupd );
+
sub check_unique {
my ($selftable, $selfcol) = @_;
# vim: filetype=perl
+use strict;
+use warnings;
-sub run_tests {
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
- plan tests => 321;
- my $schema = shift;
+my $schema = DBICTest->init_schema();
- my $employees = $schema->resultset('Employee');
- $employees->delete();
+plan tests => 321;
- foreach (1..5) {
- $employees->create({ name=>'temp' });
- }
- $employees = $employees->search(undef,{order_by=>'position'});
- ok( check_rs($employees), "intial positions" );
-
- hammer_rs( $employees );
+my $employees = $schema->resultset('Employee');
+$employees->delete();
- #return;
+foreach (1..5) {
+ $employees->create({ name=>'temp' });
+}
+$employees = $employees->search(undef,{order_by=>'position'});
+ok( check_rs($employees), "intial positions" );
- DBICTest::Employee->grouping_column('group_id');
- $employees->delete();
- foreach my $group_id (1..3) {
- foreach (1..6) {
- $employees->create({ name=>'temp', group_id=>$group_id });
- }
- }
- $employees = $employees->search(undef,{order_by=>'group_id,position'});
+hammer_rs( $employees );
- foreach my $group_id (1..3) {
- my $group_employees = $employees->search({group_id=>$group_id});
- $group_employees->all();
- ok( check_rs($group_employees), "group intial positions" );
- hammer_rs( $group_employees );
+DBICTest::Employee->grouping_column('group_id');
+$employees->delete();
+foreach my $group_id (1..3) {
+ foreach (1..6) {
+ $employees->create({ name=>'temp', group_id=>$group_id });
}
+}
+$employees = $employees->search(undef,{order_by=>'group_id,position'});
+foreach my $group_id (1..3) {
+ my $group_employees = $employees->search({group_id=>$group_id});
+ $group_employees->all();
+ ok( check_rs($group_employees), "group intial positions" );
+ hammer_rs( $group_employees );
}
sub hammer_rs {
my $employee;
my $count = $rs->count();
my $position_column = $rs->result_class->position_column();
+ my $row;
foreach my $position (1..$count) {
return 1;
}
-1;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 8;
+
+my $cd;
+my $rs = $cd = $schema->resultset("CD")->search({});
+
+my $rs_title = $rs->get_column('title');
+my $rs_year = $rs->get_column('year');
+
+is($rs_title->next, 'Spoonful of bees', "next okay");
+
+my @all = $rs_title->all;
+cmp_ok(scalar @all, '==', 5, "five titles returned");
+
+cmp_ok($rs_year->max, '==', 2001, "max okay for year");
+is($rs_title->min, 'Caterwaulin\' Blues', "min okay for title");
+
+cmp_ok($rs_year->sum, '==', 9996, "three artists returned");
+
+my $psrs = $schema->resultset('CD')->search({},
+ {
+ '+select' => \'COUNT(*)',
+ '+as' => 'count'
+ }
+);
+ok(defined($psrs->get_column('count')), '+select/+as count');
+
+$psrs = $schema->resultset('CD')->search({},
+ {
+ '+select' => [ \'COUNT(*)', 'title' ],
+ '+as' => [ 'count', 'addedtitle' ]
+ }
+);
+ok(defined($psrs->get_column('count')), '+select/+as arrayref count');
+ok(defined($psrs->get_column('addedtitle')), '+select/+as title');
+
--- /dev/null
+# vim: filetype=perl
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+eval 'require JSON';
+plan skip_all => 'Install JSON to run this test' if ($@);
+
+eval 'require Text::CSV_XS';
+if ($@) {
+ eval 'require Text::CSV_PP';
+ plan skip_all => 'Install Text::CSV_XS or Text::CSV_PP to run this test' if ($@);
+}
+
+plan tests => 5;
+
+my $employees = $schema->resultset('Employee');
+my $cmd = qq|script/dbicadmin --schema=DBICTest::Schema --class=Employee --tlibs --connect='["dbi:SQLite:dbname=t/var/DBIxClass.db","",""]' --force --tlibs|;
+
+`$cmd --op=insert --set='{name:"Matt"}'`;
+ok( ($employees->count()==1), 'insert count' );
+
+my $employee = $employees->find(1);
+ok( ($employee->name() eq 'Matt'), 'insert valid' );
+
+`$cmd --op=update --set='{name:"Trout"}'`;
+$employee = $employees->find(1);
+ok( ($employee->name() eq 'Trout'), 'update' );
+
+`$cmd --op=insert --set='{name:"Aran"}'`;
+my $data = `$cmd --op=select --attrs='{order_by:"name"}'`;
+ok( ($data=~/Aran.*Trout/s), 'select with attrs' );
+
+`$cmd --op=delete --where='{name:"Trout"}'`;
+ok( ($employees->count()==1), 'delete' );
+
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+eval { require DateTime::Format::MySQL };
+plan skip_all => "Need DateTime::Format::MySQL for inflation tests" if $@;
+
+plan tests => 2;
+
+# inflation test
+my $event = $schema->resultset("Event")->find(1);
+
+isa_ok($event->starts_at, 'DateTime', 'DateTime returned');
+
+is($event->starts_at, '2006-04-25T22:24:33', 'Correct date/time');
+
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
use Class::Inspector;
BEGIN {
sub some_method {}
}
-sub run_tests {
+my $schema = DBICTest->init_schema();
-my $schema = shift;
plan tests => 6;
ok(Class::Inspector->loaded('TestPackage::A'),
ok(!$@, 'ensure_class_loaded detected an existing but non-loaded class');
ok(Class::Inspector->loaded('DBICTest::FakeComponent'),
'DBICTest::FakeComponent now loaded');
-}
1;
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 4;
+
+my $rs1 = $schema->resultset("Artist")->search({ 'tags.tag' => 'Blue' }, { join => {'cds' => 'tracks'}, prefetch => {'cds' => 'tags'} });
+my @artists = $rs1->all;
+cmp_ok(@artists, '==', 1, "Two artists returned");
+
+my $rs2 = $rs1->search({ artistid => '1' }, { join => {'cds' => {'cd_to_producer' => 'producer'} } });
+my $rs3 = $rs2->search_related('cds')->search({'cds.title' => 'Forkful of bees'});
+cmp_ok($rs3->count, '==', 3, "Three artists returned");
+
+my $rs4 = $schema->resultset("CD")->search({ 'artist.artistid' => '1' }, { join => ['tracks', 'artist'], prefetch => 'artist' });
+my @rs4_results = $rs4->all;
+
+
+is($rs4_results[0]->cdid, 1, "correct artist returned");
+
+my $rs5 = $rs4->search({'tracks.title' => 'Sticky Honey'});
+is($rs5->count, 1, "search without using previous joins okay");
+
+1;
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/01core.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/04db.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/05multipk.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/06relationship.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/07pager.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/08inflate.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/08inflate_has_a.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/08inflate_serialize.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/09update.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/10auto.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/11mysql.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/12pg.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/13oracle.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/145db2.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/146db2_400.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/14mssql.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/15limit.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/16joins.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/17join_count.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/18self_referencial.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/19uuid.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/20unique.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/21transactions.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/22cascade_copy.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/23cache.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/24serialize.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/25utf8.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/26might_have.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/28result_set_column.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::BasicRels;
-
-require "t/run/30ensure_class_loaded.tl";
-run_tests(DBICTest->schema);
use lib 't/lib';
-use_ok('DBICTest::HelperRels');
+use_ok('DBICTest');
+DBICTest->init_schema();
DBICTest::CD->load_components(qw/CDBICompat::Pager/);
City->table('City');
City->columns(All => qw/Name State Population/);
-City->has_a(State => 'State');
+{
+ # Disable the `no such table' warning
+ local $SIG{__WARN__} = sub {
+ my $warning = shift;
+ warn $warning unless ($warning =~ /\Qno such table: City(1)\E/);
+ };
+
+ City->has_a(State => 'State');
+}
#-------------------------------------------------------------------------
package CD;
ok $pj = $btaste->Director, "Bad taste now hasa() director";
isa_ok $pj => 'Director';
{
- no warnings 'redefine';
+ no warnings qw(redefine once);
local *Ima::DBI::st::execute =
sub { ::fail("Shouldn't need to query db"); };
is $pj->id, 'Peter Jackson', 'ID already stored';
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/01core.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/04db.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/05multipk.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/06relationship.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/07pager.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/08inflate.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/08inflate_has_a.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/08inflate_serialize.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/09update.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/10auto.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/11mysql.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/12pg.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/13oracle.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/145db2.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/146db2_400.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/14mssql.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/15limit.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/16joins.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/17join_count.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/18self_referencial.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/19uuid.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/20unique.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/21transactions.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/22cascade_copy.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/23cache.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/24serialize.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/25utf8.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/26might_have.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/27ordered.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/28result_set_column.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/29dbicadmin.tl";
-run_tests(DBICTest->schema);
+++ /dev/null
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::HelperRels;
-
-require "t/run/30ensure_class_loaded.tl";
-run_tests(DBICTest->schema);
use warnings;
use DBICTest::Schema;
-sub initialise {
+=head1 NAME
- my $db_file = "t/var/DBIxClass.db";
-
- unlink($db_file) if -e $db_file;
- unlink($db_file . "-journal") if -e $db_file . "-journal";
- mkdir("t/var") unless -d "t/var";
-
- my $dsn = $ENV{"DBICTEST_DSN"} || "dbi:SQLite:${db_file}";
- my $dbuser = $ENV{"DBICTEST_DBUSER"} || '';
- my $dbpass = $ENV{"DBICTEST_DBPASS"} || '';
+DBICTest - Library to be used by DBIx::Class test scripts.
-# my $dsn = "dbi:SQLite:${db_file}";
+=head1 SYNOPSIS
+
+ use lib qw(t/lib);
+ use DBICTest;
+ use Test::More;
- return DBICTest::Schema->compose_connection('DBICTest' => $dsn, $dbuser, $dbpass);
+ my $schema = DBICTest->init_schema();
+
+=head1 DESCRIPTION
+
+This module provides the basic utilities to write tests against
+DBIx::Class.
+
+=head1 METHODS
+
+=head2 init_schema
+
+ my $schema = DBICTest->init_schema(
+ no_deploy=>1,
+ no_populate=>1,
+ );
+
+This method removes the test SQLite database in t/var/DBIxClass.db
+and then creates a new, empty database.
+
+This method will call deploy_schema() by default, unless the
+no_deploy flag is set.
+
+Also, by default, this method will call populate_schema() by
+default, unless the no_deploy or no_populate flags are set.
+
+=cut
+
+sub init_schema {
+ my $self = shift;
+ my %args = @_;
+ my $db_file = "t/var/DBIxClass.db";
+
+ unlink($db_file) if -e $db_file;
+ unlink($db_file . "-journal") if -e $db_file . "-journal";
+ mkdir("t/var") unless -d "t/var";
+
+ my $dsn = $ENV{"DBICTEST_DSN"} || "dbi:SQLite:${db_file}";
+ my $dbuser = $ENV{"DBICTEST_DBUSER"} || '';
+ my $dbpass = $ENV{"DBICTEST_DBPASS"} || '';
+
+ my $schema = DBICTest::Schema->compose_connection('DBICTest' => $dsn, $dbuser, $dbpass);
+ if ( !$args{no_deploy} ) {
+ __PACKAGE__->deploy_schema( $schema );
+ __PACKAGE__->populate_schema( $schema ) if( !$args{no_populate} );
+ }
+ return $schema;
}
-
+
+=head2 deploy_schema
+
+ DBICTest->deploy_schema( $schema );
+
+This method does one of two things to the schema. It can either call
+the experimental $schema->deploy() if the DBICTEST_SQLT_DEPLOY environment
+variable is set, otherwise the default is to read in the t/lib/sqlite.sql
+file and execute the SQL within. Either way you end up with a fresh set
+of tables for testing.
+
+=cut
+
+sub deploy_schema {
+ my $self = shift;
+ my $schema = shift;
+
+ if ($ENV{"DBICTEST_SQLT_DEPLOY"}) {
+ return $schema->deploy();
+ } else {
+ open IN, "t/lib/sqlite.sql";
+ my $sql;
+ { local $/ = undef; $sql = <IN>; }
+ close IN;
+ $schema->storage->dbh->do($_) for split(/;\n/, $sql);
+ }
+}
+
+=head2 populate_schema
+
+ DBICTest->populate_schema( $schema );
+
+After you deploy your schema you can use this method to populate
+the tables with test data.
+
+=cut
+
+sub populate_schema {
+ my $self = shift;
+ my $schema = shift;
+
+ $schema->storage->dbh->do("PRAGMA synchronous = OFF");
+
+ $schema->populate('Artist', [
+ [ qw/artistid name/ ],
+ [ 1, 'Caterwauler McCrae' ],
+ [ 2, 'Random Boy Band' ],
+ [ 3, 'We Are Goth' ],
+ ]);
+
+ $schema->populate('CD', [
+ [ qw/cdid artist title year/ ],
+ [ 1, 1, "Spoonful of bees", 1999 ],
+ [ 2, 1, "Forkful of bees", 2001 ],
+ [ 3, 1, "Caterwaulin' Blues", 1997 ],
+ [ 4, 2, "Generic Manufactured Singles", 2001 ],
+ [ 5, 3, "Come Be Depressed With Us", 1998 ],
+ ]);
+
+ $schema->populate('LinerNotes', [
+ [ qw/liner_id notes/ ],
+ [ 2, "Buy Whiskey!" ],
+ [ 4, "Buy Merch!" ],
+ [ 5, "Kill Yourself!" ],
+ ]);
+
+ $schema->populate('Tag', [
+ [ qw/tagid cd tag/ ],
+ [ 1, 1, "Blue" ],
+ [ 2, 2, "Blue" ],
+ [ 3, 3, "Blue" ],
+ [ 4, 5, "Blue" ],
+ [ 5, 2, "Cheesy" ],
+ [ 6, 4, "Cheesy" ],
+ [ 7, 5, "Cheesy" ],
+ [ 8, 2, "Shiny" ],
+ [ 9, 4, "Shiny" ],
+ ]);
+
+ $schema->populate('TwoKeys', [
+ [ qw/artist cd/ ],
+ [ 1, 1 ],
+ [ 1, 2 ],
+ [ 2, 2 ],
+ ]);
+
+ $schema->populate('FourKeys', [
+ [ qw/foo bar hello goodbye/ ],
+ [ 1, 2, 3, 4 ],
+ [ 5, 4, 3, 6 ],
+ ]);
+
+ $schema->populate('OneKey', [
+ [ qw/id artist cd/ ],
+ [ 1, 1, 1 ],
+ [ 2, 1, 2 ],
+ [ 3, 2, 2 ],
+ ]);
+
+ $schema->populate('SelfRef', [
+ [ qw/id name/ ],
+ [ 1, 'First' ],
+ [ 2, 'Second' ],
+ ]);
+
+ $schema->populate('SelfRefAlias', [
+ [ qw/self_ref alias/ ],
+ [ 1, 2 ]
+ ]);
+
+ $schema->populate('ArtistUndirectedMap', [
+ [ qw/id1 id2/ ],
+ [ 1, 2 ]
+ ]);
+
+ $schema->populate('Producer', [
+ [ qw/producerid name/ ],
+ [ 1, 'Matt S Trout' ],
+ [ 2, 'Bob The Builder' ],
+ [ 3, 'Fred The Phenotype' ],
+ ]);
+
+ $schema->populate('CD_to_Producer', [
+ [ qw/cd producer/ ],
+ [ 1, 1 ],
+ [ 1, 2 ],
+ [ 1, 3 ],
+ ]);
+
+ $schema->populate('TreeLike', [
+ [ qw/id parent name/ ],
+ [ 1, 0, 'foo' ],
+ [ 2, 1, 'bar' ],
+ [ 3, 2, 'baz' ],
+ [ 4, 3, 'quux' ],
+ ]);
+
+ $schema->populate('Track', [
+ [ qw/trackid cd position title/ ],
+ [ 4, 2, 1, "Stung with Success"],
+ [ 5, 2, 2, "Stripy"],
+ [ 6, 2, 3, "Sticky Honey"],
+ [ 7, 3, 1, "Yowlin"],
+ [ 8, 3, 2, "Howlin"],
+ [ 9, 3, 3, "Fowlin"],
+ [ 10, 4, 1, "Boring Name"],
+ [ 11, 4, 2, "Boring Song"],
+ [ 12, 4, 3, "No More Ideas"],
+ [ 13, 5, 1, "Sad"],
+ [ 14, 5, 2, "Under The Weather"],
+ [ 15, 5, 3, "Suicidal"],
+ [ 16, 1, 1, "The Bees Knees"],
+ [ 17, 1, 2, "Apiary"],
+ [ 18, 1, 3, "Beehind You"],
+ ]);
+
+ $schema->populate('Event', [
+ [ qw/id starts_at/ ],
+ [ 1, '2006-04-25 22:24:33' ],
+ ]);
+
+ $schema->populate('Link', [
+ [ qw/id title/ ],
+ [ 1, 'aaa' ]
+ ]);
+
+ $schema->populate('Bookmark', [
+ [ qw/id link/ ],
+ [ 1, 1 ]
+ ]);
+}
+
1;
+++ /dev/null
-package # hide from PAUSE
- DBICTest::BasicRels;
-
-use DBICTest::Schema;
-use DBICTest::Schema::BasicRels;
-use DBICTest::Setup;
-
-1;
+++ /dev/null
-package # hide from PAUSE
- DBICTest::HelperRels;
-
-use DBICTest::Schema;
-use DBICTest::Schema::HelperRels;
-use DBICTest::Setup;
-
-1;
CD
Link
Bookmark
- #Casecheck
#dummy
Track
Tag
'Producer',
'CD_to_Producer',
),
- qw/SelfRefAlias TreeLike TwoKeyTreeLike/
+ qw/SelfRefAlias TreeLike TwoKeyTreeLike Event/
);
1;
use base 'DBIx::Class::Core';
-__PACKAGE__->load_components('PK::Auto');
-
-DBICTest::Schema::Artist->table('artist');
-DBICTest::Schema::Artist->add_columns(
+__PACKAGE__->table('artist');
+__PACKAGE__->add_columns(
'artistid' => {
data_type => 'integer',
is_auto_increment => 1
is_nullable => 1,
},
);
-DBICTest::Schema::Artist->set_primary_key('artistid');
+__PACKAGE__->set_primary_key('artistid');
__PACKAGE__->mk_classdata('field_name_for', {
artistid => 'primary key',
name => 'artist name',
});
+__PACKAGE__->has_many(
+ cds => 'DBICTest::Schema::CD', undef,
+ { order_by => 'year' },
+);
+
+__PACKAGE__->has_many( twokeys => 'DBICTest::Schema::TwoKeys' );
+__PACKAGE__->has_many( onekeys => 'DBICTest::Schema::OneKey' );
+
+__PACKAGE__->has_many(
+ artist_undirected_maps => 'DBICTest::Schema::ArtistUndirectedMap',
+ [ {'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'} ],
+ { cascade_copy => 0 } # this would *so* not make sense
+);
+
1;
);
__PACKAGE__->set_primary_key(qw/id1 id2/);
+__PACKAGE__->belongs_to( 'artist1', 'DBICTest::Schema::Artist', 'id1' );
+__PACKAGE__->belongs_to( 'artist2', 'DBICTest::Schema::Artist', 'id2');
+__PACKAGE__->has_many(
+ 'mapped_artists', 'DBICTest::Schema::Artist',
+ [ {'foreign.artistid' => 'self.id1'}, {'foreign.artistid' => 'self.id2'} ],
+);
+
1;
+++ /dev/null
-package # hide from PAUSE
- DBICTest::Schema::BasicRels;
-
-use base 'DBIx::Class::Core';
-
-DBICTest::Schema::Artist->add_relationship(
- cds => 'DBICTest::Schema::CD',
- { 'foreign.artist' => 'self.artistid' },
- { order_by => 'year', join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi' }
-);
-DBICTest::Schema::Artist->add_relationship(
- twokeys => 'DBICTest::Schema::TwoKeys',
- { 'foreign.artist' => 'self.artistid' },
- { cascade_copy => 1 }
-);
-DBICTest::Schema::Artist->add_relationship(
- onekeys => 'DBICTest::Schema::OneKey',
- { 'foreign.artist' => 'self.artistid' }
-);
-DBICTest::Schema::Artist->add_relationship(
- artist_undirected_maps => 'DBICTest::Schema::ArtistUndirectedMap',
- [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}],
- { accessor => 'multi' }
-);
-DBICTest::Schema::ArtistUndirectedMap->add_relationship(
- 'mapped_artists', 'DBICTest::Schema::Artist',
- [{'foreign.artistid' => 'self.id1'}, {'foreign.artistid' => 'self.id2'}]
-);
-DBICTest::Schema::CD->add_relationship(
- artist => 'DBICTest::Schema::Artist',
- { 'foreign.artistid' => 'self.artist' },
- { accessor => 'filter' },
-);
-DBICTest::Schema::CD->add_relationship(
- tracks => 'DBICTest::Schema::Track',
- { 'foreign.cd' => 'self.cdid' },
- { join_type => 'LEFT', cascade_delete => 1 }
-);
-DBICTest::Schema::CD->add_relationship(
- tags => 'DBICTest::Schema::Tag',
- { 'foreign.cd' => 'self.cdid' },
- { join_type => 'LEFT', cascade_delete => 1, cascade_copy => 1, accessor => 'multi', order_by => 'tag' }
-);
-#DBICTest::Schema::CD->might_have(liner_notes => 'DBICTest::Schema::LinerNotes' => qw/notes/);
-DBICTest::Schema::CD->add_relationship(
- liner_notes => 'DBICTest::Schema::LinerNotes',
- { 'foreign.liner_id' => 'self.cdid' },
- { join_type => 'LEFT', accessor => 'single' }
-);
-DBICTest::Schema::CD->add_relationship(
- cd_to_producer => 'DBICTest::Schema::CD_to_Producer',
- { 'foreign.cd' => 'self.cdid' },
- { join_type => 'LEFT', cascade_delete => 1 }
-);
-
-DBICTest::Schema::SelfRefAlias->add_relationship(
- self_ref => 'DBICTest::Schema::SelfRef',
- { 'foreign.id' => 'self.self_ref' },
- { accessor => 'single' }
-
-);
-DBICTest::Schema::SelfRefAlias->add_relationship(
- alias => 'DBICTest::Schema::SelfRef',
- { 'foreign.id' => 'self.alias' },
- { accessor => 'single' }
-);
-
-DBICTest::Schema::SelfRef->add_relationship(
- aliases => 'DBICTest::Schema::SelfRefAlias',
- { 'foreign.self_ref' => 'self.id' },
- { accessor => 'multi' }
-);
-
-DBICTest::Schema::Tag->add_relationship(
- cd => 'DBICTest::Schema::CD',
- { 'foreign.cdid' => 'self.cd' },
- { accessor => 'single' }
-);
-
-DBICTest::Schema::Track->add_relationship(
- cd => 'DBICTest::Schema::CD',
- { 'foreign.cdid' => 'self.cd' }
-);
-
-DBICTest::Schema::TwoKeys->add_relationship(
- artist => 'DBICTest::Schema::Artist',
- { 'foreign.artistid' => 'self.artist' }
-);
-DBICTest::Schema::TwoKeys->add_relationship(
- cd => 'DBICTest::Schema::CD',
- { 'foreign.cdid' => 'self.cd' }
-);
-
-DBICTest::Schema::CD_to_Producer->add_relationship(
- cd => 'DBICTest::Schema::CD',
- { 'foreign.cdid' => 'self.cd' }
-);
-DBICTest::Schema::CD_to_Producer->add_relationship(
- producer => 'DBICTest::Schema::Producer',
- { 'foreign.producerid' => 'self.producer' }
-);
-
-# now the Helpers
-DBICTest::Schema::CD->many_to_many( 'producers', 'cd_to_producer', 'producer');
-DBICTest::Schema::CD->many_to_many( 'producers_sorted', 'cd_to_producer', 'producer', { order_by => 'producer.name' });
-
-1;
use strict;
use warnings;
-__PACKAGE__->load_components(qw/PK::Auto Core/);
__PACKAGE__->table('bookmark');
__PACKAGE__->add_columns(qw/id link/);
__PACKAGE__->add_columns(
use base 'DBIx::Class::Core';
-__PACKAGE__->load_components('PK::Auto');
-
-DBICTest::Schema::CD->table('cd');
-DBICTest::Schema::CD->add_columns(
+__PACKAGE__->table('cd');
+__PACKAGE__->add_columns(
'cdid' => {
data_type => 'integer',
is_auto_increment => 1,
size => 100,
},
);
-DBICTest::Schema::CD->set_primary_key('cdid');
-DBICTest::Schema::CD->add_unique_constraint(artist_title => [ qw/artist title/ ]);
+__PACKAGE__->set_primary_key('cdid');
+__PACKAGE__->add_unique_constraint([ qw/artist title/ ]);
+
+__PACKAGE__->belongs_to( artist => 'DBICTest::Schema::Artist' );
+
+__PACKAGE__->has_many( tracks => 'DBICTest::Schema::Track' );
+__PACKAGE__->has_many(
+ tags => 'DBICTest::Schema::Tag', undef,
+ { order_by => 'tag' },
+);
+__PACKAGE__->has_many(
+ cd_to_producer => 'DBICTest::Schema::CD_to_Producer' => 'cd'
+);
+
+__PACKAGE__->might_have(
+ liner_notes => 'DBICTest::Schema::LinerNotes', undef,
+ { proxy => [ qw/notes/ ] },
+);
+__PACKAGE__->many_to_many( producers => cd_to_producer => 'producer' );
+__PACKAGE__->many_to_many(
+ producers_sorted => cd_to_producer => 'producer',
+ { order_by => 'producer.name' },
+);
1;
);
__PACKAGE__->set_primary_key(qw/cd producer/);
+__PACKAGE__->belongs_to(
+ 'cd', 'DBICTest::Schema::CD',
+ { 'foreign.cdid' => 'self.cd' }
+);
+
+__PACKAGE__->belongs_to(
+ 'producer', 'DBICTest::Schema::Producer',
+ { 'foreign.producerid' => 'self.producer' }
+);
+
1;
package # hide from PAUSE
DBICTest::Schema::Employee;
-use base 'DBIx::Class';
+use base 'DBIx::Class::Core';
-__PACKAGE__->load_components(qw( Ordered PK::Auto Core ));
+__PACKAGE__->load_components(qw( Ordered ));
__PACKAGE__->table('employee');
--- /dev/null
+package DBICTest::Schema::Event;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Core/;
+
+__PACKAGE__->load_components(qw/InflateColumn::DateTime/);
+
+__PACKAGE__->table('event');
+
+__PACKAGE__->add_columns(
+ id => { data_type => 'integer', is_auto_increment => 1 },
+ starts_at => { data_type => 'datetime' }
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;
+++ /dev/null
-package # hide from PAUSE
- DBICTest::Schema::HelperRels;
-
-use base 'DBIx::Class::Core';
-
-DBICTest::Schema::Artist->has_many(cds => 'DBICTest::Schema::CD', undef,
- { order_by => 'year' });
-DBICTest::Schema::Artist->has_many(twokeys => 'DBICTest::Schema::TwoKeys');
-DBICTest::Schema::Artist->has_many(onekeys => 'DBICTest::Schema::OneKey');
-
-DBICTest::Schema::CD->belongs_to('artist', 'DBICTest::Schema::Artist');
-
-DBICTest::Schema::CD->has_many(tracks => 'DBICTest::Schema::Track');
-DBICTest::Schema::CD->has_many(tags => 'DBICTest::Schema::Tag', undef,
- { order_by => 'tag' });
-DBICTest::Schema::CD->has_many(cd_to_producer => 'DBICTest::Schema::CD_to_Producer' => 'cd');
-
-DBICTest::Schema::CD->might_have(liner_notes => 'DBICTest::Schema::LinerNotes',
- undef, { proxy => [ qw/notes/ ] });
-
-DBICTest::Schema::SelfRefAlias->belongs_to(
- self_ref => 'DBICTest::Schema::SelfRef');
-DBICTest::Schema::SelfRefAlias->belongs_to(
- alias => 'DBICTest::Schema::SelfRef');
-
-DBICTest::Schema::SelfRef->has_many(
- aliases => 'DBICTest::Schema::SelfRefAlias' => 'self_ref');
-
-DBICTest::Schema::Tag->belongs_to('cd', 'DBICTest::Schema::CD');
-
-DBICTest::Schema::Track->belongs_to('cd', 'DBICTest::Schema::CD');
-DBICTest::Schema::Track->belongs_to('disc', 'DBICTest::Schema::CD', 'cd');
-
-DBICTest::Schema::TwoKeys->belongs_to('artist', 'DBICTest::Schema::Artist');
-DBICTest::Schema::TwoKeys->belongs_to('cd', 'DBICTest::Schema::CD');
-
-DBICTest::Schema::CD_to_Producer->belongs_to(
- 'cd', 'DBICTest::Schema::CD',
- { 'foreign.cdid' => 'self.cd' }
-);
-DBICTest::Schema::CD_to_Producer->belongs_to(
- 'producer', 'DBICTest::Schema::Producer',
- { 'foreign.producerid' => 'self.producer' }
-);
-DBICTest::Schema::Artist->has_many(
- 'artist_undirected_maps', 'DBICTest::Schema::ArtistUndirectedMap',
- [{'foreign.id1' => 'self.artistid'}, {'foreign.id2' => 'self.artistid'}],
- { cascade_copy => 0 } # this would *so* not make sense
-);
-DBICTest::Schema::ArtistUndirectedMap->belongs_to(
- 'artist1', 'DBICTest::Schema::Artist', 'id1');
-DBICTest::Schema::ArtistUndirectedMap->belongs_to(
- 'artist2', 'DBICTest::Schema::Artist', 'id2');
-DBICTest::Schema::ArtistUndirectedMap->has_many(
- 'mapped_artists', 'DBICTest::Schema::Artist',
- [{'foreign.artistid' => 'self.id1'}, {'foreign.artistid' => 'self.id2'}]);
-
-# now the Helpers
-DBICTest::Schema::CD->many_to_many( 'producers', 'cd_to_producer', 'producer');
-DBICTest::Schema::CD->many_to_many( 'producers_sorted', 'cd_to_producer', 'producer', { order_by => 'producer.name' });
-
-1;
use strict;
use warnings;
-__PACKAGE__->load_components(qw/PK::Auto Core/);
__PACKAGE__->table('link');
__PACKAGE__->add_columns(
'id' => {
use base 'DBIx::Class::Core';
-__PACKAGE__->load_components('PK::Auto');
-
DBICTest::Schema::OneKey->table('onekey');
DBICTest::Schema::OneKey->add_columns(
'id' => {
},
);
__PACKAGE__->set_primary_key('producerid');
+__PACKAGE__->add_unique_constraint(prod_name => [ qw/name/ ]);
1;
);\r
__PACKAGE__->set_primary_key('id');\r
\r
+__PACKAGE__->has_many( aliases => 'DBICTest::Schema::SelfRefAlias' => 'self_ref' );\r
+\r
1;\r
);\r
__PACKAGE__->set_primary_key(qw/self_ref alias/);\r
\r
+__PACKAGE__->belongs_to( self_ref => 'DBICTest::Schema::SelfRef' );\r
+__PACKAGE__->belongs_to( alias => 'DBICTest::Schema::SelfRef' );\r
+\r
1;\r
use base qw/DBIx::Class::Core/;
-__PACKAGE__->load_components('PK::Auto');
-
-DBICTest::Schema::Tag->table('tags');
-DBICTest::Schema::Tag->add_columns(
+__PACKAGE__->table('tags');
+__PACKAGE__->add_columns(
'tagid' => {
data_type => 'integer',
is_auto_increment => 1,
size => 100,
},
);
-DBICTest::Schema::Tag->set_primary_key('tagid');
+__PACKAGE__->set_primary_key('tagid');
+
+__PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD' );
1;
use base 'DBIx::Class::Core';
-DBICTest::Schema::Track->table('track');
-DBICTest::Schema::Track->add_columns(
+__PACKAGE__->table('track');
+__PACKAGE__->add_columns(
'trackid' => {
data_type => 'integer',
is_auto_increment => 1,
size => 100,
},
);
-DBICTest::Schema::Track->set_primary_key('trackid');
+__PACKAGE__->set_primary_key('trackid');
+
+__PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD' );
+__PACKAGE__->belongs_to( disc => 'DBICTest::Schema::CD' => 'cd');
1;
package # hide from PAUSE
DBICTest::Schema::TreeLike;
-use base qw/DBIx::Class/;
-
-__PACKAGE__->load_components(qw/PK::Auto::SQLite Core/);
+use base qw/DBIx::Class::Core/;
__PACKAGE__->table('treelike');
__PACKAGE__->add_columns(
package # hide from PAUSE
DBICTest::Schema::TwoKeyTreeLike;
-use base qw/DBIx::Class/;
-
-__PACKAGE__->load_components(qw/Core/);
+use base qw/DBIx::Class::Core/;
__PACKAGE__->table('twokeytreelike');
__PACKAGE__->add_columns(
use base 'DBIx::Class::Core';
-DBICTest::Schema::TwoKeys->table('twokeys');
-DBICTest::Schema::TwoKeys->add_columns(
+__PACKAGE__->table('twokeys');
+__PACKAGE__->add_columns(
'artist' => { data_type => 'integer' },
'cd' => { data_type => 'integer' },
);
-DBICTest::Schema::TwoKeys->set_primary_key(qw/artist cd/);
+__PACKAGE__->set_primary_key(qw/artist cd/);
+
+__PACKAGE__->belongs_to( artist => 'DBICTest::Schema::Artist' );
+__PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD' );
1;
+++ /dev/null
-use strict;
-use warnings;
-use DBICTest;
-
-my $schema = DBICTest->initialise;
-
-# $schema->storage->on_connect_do([ "PRAGMA synchronous = OFF" ]);
-
-my $dbh = $schema->storage->dbh;
-
-if ($ENV{"DBICTEST_SQLT_DEPLOY"}) {
- $schema->deploy;
-} else {
- open IN, "t/lib/sqlite.sql";
-
- my $sql;
-
- { local $/ = undef; $sql = <IN>; }
-
- close IN;
-
- $dbh->do($_) for split(/;\n/, $sql);
-}
-
-$schema->storage->dbh->do("PRAGMA synchronous = OFF");
-
-$schema->populate('Artist', [
- [ qw/artistid name/ ],
- [ 1, 'Caterwauler McCrae' ],
- [ 2, 'Random Boy Band' ],
- [ 3, 'We Are Goth' ],
-]);
-
-$schema->populate('CD', [
- [ qw/cdid artist title year/ ],
- [ 1, 1, "Spoonful of bees", 1999 ],
- [ 2, 1, "Forkful of bees", 2001 ],
- [ 3, 1, "Caterwaulin' Blues", 1997 ],
- [ 4, 2, "Generic Manufactured Singles", 2001 ],
- [ 5, 3, "Come Be Depressed With Us", 1998 ],
-]);
-
-$schema->populate('LinerNotes', [
- [ qw/liner_id notes/ ],
- [ 2, "Buy Whiskey!" ],
- [ 4, "Buy Merch!" ],
- [ 5, "Kill Yourself!" ],
-]);
-
-$schema->populate('Tag', [
- [ qw/tagid cd tag/ ],
- [ 1, 1, "Blue" ],
- [ 2, 2, "Blue" ],
- [ 3, 3, "Blue" ],
- [ 4, 5, "Blue" ],
- [ 5, 2, "Cheesy" ],
- [ 6, 4, "Cheesy" ],
- [ 7, 5, "Cheesy" ],
- [ 8, 2, "Shiny" ],
- [ 9, 4, "Shiny" ],
-]);
-
-$schema->populate('TwoKeys', [
- [ qw/artist cd/ ],
- [ 1, 1 ],
- [ 1, 2 ],
- [ 2, 2 ],
-]);
-
-$schema->populate('FourKeys', [
- [ qw/foo bar hello goodbye/ ],
- [ 1, 2, 3, 4 ],
- [ 5, 4, 3, 6 ],
-]);
-
-$schema->populate('OneKey', [
- [ qw/id artist cd/ ],
- [ 1, 1, 1 ],
- [ 2, 1, 2 ],
- [ 3, 2, 2 ],
-]);
-
-$schema->populate('SelfRef', [
- [ qw/id name/ ],
- [ 1, 'First' ],
- [ 2, 'Second' ],
-]);
-
-$schema->populate('SelfRefAlias', [
- [ qw/self_ref alias/ ],
- [ 1, 2 ]
-]);
-
-$schema->populate('ArtistUndirectedMap', [
- [ qw/id1 id2/ ],
- [ 1, 2 ]
-]);
-
-$schema->populate('Producer', [
- [ qw/producerid name/ ],
- [ 1, 'Matt S Trout' ],
- [ 2, 'Bob The Builder' ],
- [ 3, 'Fred The Phenotype' ],
-]);
-
-$schema->populate('CD_to_Producer', [
- [ qw/cd producer/ ],
- [ 1, 1 ],
- [ 1, 2 ],
- [ 1, 3 ],
-]);
-
-$schema->populate('TreeLike', [
- [ qw/id parent name/ ],
- [ 1, 0, 'foo' ],
- [ 2, 1, 'bar' ],
- [ 3, 2, 'baz' ],
- [ 4, 3, 'quux' ],
-]);
-
-$schema->populate('Track', [
- [ qw/trackid cd position title/ ],
- [ 4, 2, 1, "Stung with Success"],
- [ 5, 2, 2, "Stripy"],
- [ 6, 2, 3, "Sticky Honey"],
- [ 7, 3, 1, "Yowlin"],
- [ 8, 3, 2, "Howlin"],
- [ 9, 3, 3, "Fowlin"],
- [ 10, 4, 1, "Boring Name"],
- [ 11, 4, 2, "Boring Song"],
- [ 12, 4, 3, "No More Ideas"],
- [ 13, 5, 1, "Sad"],
- [ 14, 5, 2, "Under The Weather"],
- [ 15, 5, 3, "Suicidal"],
- [ 16, 1, 1, "The Bees Knees"],
- [ 17, 1, 2, "Apiary"],
- [ 18, 1, 3, "Beehind You"],
-]);
-
-$schema->populate('Link', [
- [ qw/id title/ ],
- [ 1, 'aaa' ]
-]);
-
-$schema->populate('Bookmark', [
- [ qw/id link/ ],
- [ 1, 1 ]
-]);
-
-1;
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Sun May 14 18:25:49 2006
+-- Created on Sat May 27 21:28:05 2006
--
BEGIN TRANSACTION;
);
--
+-- Table: event
+--
+CREATE TABLE event (
+ id INTEGER PRIMARY KEY NOT NULL,
+ starts_at datetime NOT NULL
+);
+
+--
-- Table: twokeys
--
CREATE TABLE twokeys (
);
CREATE UNIQUE INDEX tktlnameunique_twokeytreelike on twokeytreelike (name);
-CREATE UNIQUE INDEX artist_title_cd on cd (artist, title);
+CREATE UNIQUE INDEX cd_artist_title_cd on cd (artist, title);
+CREATE UNIQUE INDEX prod_name_producer on producer (name);
COMMIT;
+++ /dev/null
-sub run_tests {
- my $schema = shift;
-
- eval 'use Encode ; 1'
- or plan skip_all, 'Install Encode run this test';
-
- plan tests => 2;
-
- DBICTest::Schema::Artist->load_components('UTF8Columns');
- DBICTest::Schema::Artist->utf8_columns('name');
- Class::C3->reinitialize();
-
- my $artist = $schema->resultset("Artist")->create( { name => 'uni' } );
- ok( Encode::is_utf8( $artist->name ), 'got name with utf8 flag' );
-
- my $utf8_char = 'uniuni';
- Encode::_utf8_on($utf8_char);
- $artist->name($utf8_char);
- ok( !Encode::is_utf8( $artist->{_column_data}->{name} ),
- 'store utf8 less chars' );
-}
-
-1;
+++ /dev/null
-sub run_tests {
-my $schema = shift;
-
-plan tests => 5;
-
-my $rs = $cd = $schema->resultset("CD")->search({});
-
-my $rs_title = $rs->get_column('title');
-my $rs_year = $rs->get_column('year');
-
-is($rs_title->next, 'Spoonful of bees', "next okay");
-
-my @all = $rs_title->all;
-cmp_ok(scalar @all, '==', 5, "five titles returned");
-
-cmp_ok($rs_year->max, '==', 2001, "max okay for year");
-is($rs_title->min, 'Caterwaulin\' Blues', "min okay for title");
-
-cmp_ok($rs_year->sum, '==', 9996, "three artists returned");
-
-}
-
-1;
+++ /dev/null
-# vim: filetype=perl
-
-sub run_tests {
-
- eval 'require JSON';
- plan skip_all, 'Install JSON to run this test' if ($@);
-
- eval 'require Text::CSV_XS';
- if ($@) {
- eval 'require Text::CSV_PP';
- plan skip_all, 'Install Text::CSV_XS or Text::CSV_PP to run this test' if ($@);
- }
-
- plan tests => 5;
- my $schema = shift;
-
- my $employees = $schema->resultset('Employee');
- my $cmd = qq|script/dbicadmin --schema=DBICTest::Schema --class=Employee --tlibs --connect='["dbi:SQLite:dbname=t/var/DBIxClass.db","",""]' --force --tlibs|;
-
- `$cmd --op=insert --set='{name:"Matt"}'`;
- ok( ($employees->count()==1), 'insert count' );
-
- my $employee = $employees->find(1);
- ok( ($employee->name() eq 'Matt'), 'insert valid' );
-
- `$cmd --op=update --set='{name:"Trout"}'`;
- $employee = $employees->find(1);
- ok( ($employee->name() eq 'Trout'), 'update' );
-
- `$cmd --op=insert --set='{name:"Aran"}'`;
- my $data = `$cmd --op=select --attrs='{order_by:"name"}'`;
- ok( ($data=~/Aran.*Trout/s), 'select with attrs' );
-
- `$cmd --op=delete --where='{name:"Trout"}'`;
- ok( ($employees->count()==1), 'delete' );
-}
-
-1;