Revision history for DBIx::Class
+ - add support to Ordered for multiple ordering columns
+ - mark DB.pm and compose_connection as deprecated
+ - switch tests to compose_namespace
+
0.07999_01 2006-10-05 21:00:00
- add connect_info option "disable_statement_caching"
- create insert_bulk using execute_array, populate uses it
These accessors no longer automatically require the classes when
set.
+0.07004
+ - fix find_related-based queries to correctly grep the unique key
+
0.07003 2006-11-16 11:52:00
- fix for rt.cpan.org #22740 (use $^X instead of hardcoded "perl")
- Tweaks to resultset to allow inflate_result to return an array
requires 'DBI' => 1.40;
requires 'Module::Find' => 0;
requires 'Class::Inspector' => 0;
-requires 'Class::Accessor::Grouped' => 0;
+requires 'Class::Accessor::Grouped' => 0.03;
# Perl 5.8.0 doesn't have utf8::is_utf8()
requires 'Encode' => 0 if ($] <= 5.008000);
LTJake: Brian Cassidy <bricas@cpan.org>
+ned: Neil de Carteret
+
nigel: Nigel Metheringham <nigelm@cpan.org>
ningu: David Kamholz <dkamholz@cpan.org>
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/
- Serialize::Storable
Relationship
InflateColumn
PK::Auto
PK
Row
- ResultSourceProxy::Table
- /);
+ ResultSourceProxy::Table/);
1;
use DBIx::Class::Storage::DBI;
use DBIx::Class::ClassResolver::PassThrough;
use DBI;
+use Scalar::Util;
+
+unless ($INC{"DBIx/Class/CDBICompat.pm"}) {
+ warn "IMPORTANT: DBIx::Class::DB is DEPRECATED AND *WILL* BE REMOVED. DO NOT USE.\n";
+}
__PACKAGE__->load_components(qw/ResultSetProxy/);
DBIx::Class::DB - (DEPRECATED) classdata schema component
-=head1 SYNOPSIS
-
- package MyDB;
-
- use base qw/DBIx::Class/;
- __PACKAGE__->load_components('DB');
-
- __PACKAGE__->connection('dbi:...', 'user', 'pass', \%attrs);
-
- package MyDB::MyTable;
-
- use base qw/MyDB/;
- __PACKAGE__->load_components('Core'); # just load this in MyDB if it will
- # always be there
-
- ...
-
=head1 DESCRIPTION
This class is designed to support the Class::DBI connection-as-classdata style
for DBIx::Class. You are *strongly* recommended to use a DBIx::Class::Schema
instead; DBIx::Class::DB will not undergo new development and will be moved
-to being a CDBICompat-only component before 1.0.
+to being a CDBICompat-only component before 1.0. In order to discourage further
+use, documentation has been removed as of 0.08000
+
+=begin HIDE_BECAUSE_THIS_CLASS_IS_DEPRECATED
=head1 METHODS
=cut
sub resultset_instance {
- my $class = ref $_[0] || $_[0];
- my $source = $class->result_source_instance;
+ $_[0]->result_source_instance->resultset
+}
+
+sub result_source_instance {
+ my $class = shift;
+ $class = ref $class || $class;
+
+ __PACKAGE__->mk_classdata(qw/_result_source_instance/)
+ unless __PACKAGE__->can('_result_source_instance');
+
+
+ return $class->_result_source_instance(@_) if @_;
+
+ my $source = $class->_result_source_instance;
+ return {} unless Scalar::Util::blessed($source);
+
if ($source->result_class ne $class) {
- $source = $source->new($source);
- $source->result_class($class);
+ # Remove old source instance so we dont get deep recursion
+ #$DB::single = 1;
+ # Need to set it to a non-undef value so that it doesn't just fallback to
+ # a parent class's _result_source_instance
+ #$class->_result_source_instance({});
+ #$class->table($class);
+ #$source = $class->_result_source_instance;
+
+ $DB::single = 1;
+ $source = $source->new({
+ %$source,
+ source_name => $class,
+ result_class => $class
+ } );
+ $class->_result_source_instance($source);
+ if (my $coderef = $class->can('schema_instance')) {
+ $coderef->($class)->register_class($class, $class);
+ }
}
- return $source->resultset;
+ return $source;
}
=head2 resolve_class
Alias for L<txn_rollback>
+=end HIDE_BECAUSE_THIS_CLASS_IS_DEPRECATED
+
=head1 AUTHORS
Matt S. Trout <mst@shadowcatsystems.co.uk>
=head1 NAME
-DBIx::Class::InflateColumn - Automatically create objects from column data
+DBIx::Class::InflateColumn - Automatically create references from column data
=head1 SYNOPSIS
=head1 DESCRIPTION
-This component translates column data into objects, i.e. "inflating"
-the column data. It also "deflates" objects into an appropriate format
+This component translates column data into references, i.e. "inflating"
+the column data. It also "deflates" references into an appropriate format
for the database.
It can be used, for example, to automatically convert to and from
L<DateTime> objects for your date and time fields.
+It will accept arrayrefs, hashrefs and blessed references (objects),
+but not scalarrefs. Scalar references are passed through to the
+database to deal with, to allow such settings as C< \'year + 1'> and
+C< \'DEFAULT' > to work.
+
=head1 METHODS
=head2 inflate_column
sub _deflated_column {
my ($self, $col, $value) = @_;
- return $value unless ref $value; # If it's not an object, don't touch it
+# return $value unless ref $value && blessed($value); # If it's not an object, don't touch it
+ ## Leave scalar refs (ala SQL::Abstract literal SQL), untouched, deflate all other refs
+ return $value unless (ref $value && ref($value) ne 'SCALAR');
my $info = $self->column_info($col) or
$self->throw_exception("No column info for $col");
return $value unless exists $info->{_inflate_info};
=cut
sub set_inflated_column {
- my ($self, $col, $obj) = @_;
- $self->set_column($col, $self->_deflated_column($col, $obj));
- if (blessed $obj) {
- $self->{_inflated_column}{$col} = $obj;
+ my ($self, $col, $inflated) = @_;
+ $self->set_column($col, $self->_deflated_column($col, $inflated));
+# if (blessed $inflated) {
+ if (ref $inflated && ref($inflated) ne 'SCALAR') {
+ $self->{_inflated_column}{$col} = $inflated;
} else {
delete $self->{_inflated_column}{$col};
}
- return $obj;
+ return $inflated;
}
=head2 store_inflated_column
=cut
sub store_inflated_column {
- my ($self, $col, $obj) = @_;
- unless (blessed $obj) {
+ my ($self, $col, $inflated) = @_;
+# unless (blessed $inflated) {
+ unless (ref $inflated && ref($inflated) ne 'SCALAR') {
delete $self->{_inflated_column}{$col};
- $self->store_column($col => $obj);
- return $obj;
+ $self->store_column($col => $inflated);
+ return $inflated;
}
delete $self->{_column_data}{$col};
- return $self->{_inflated_column}{$col} = $obj;
-}
-
-=head2 get_column
-
-Gets a column value in the same way as L<DBIx::Class::Row/get_column>. If there
-is an inflated value stored that has not yet been deflated, it is deflated
-when the method is invoked.
-
-=cut
-
-sub get_column {
- my ($self, $col) = @_;
- if (exists $self->{_inflated_column}{$col}
- && !exists $self->{_column_data}{$col}) {
- $self->store_column($col, $self->_deflated_column($col, $self->{_inflated_column}{$col}));
- }
- return $self->next::method($col);
-}
-
-=head2 get_columns
-
-Returns the get_column info for all columns as a hash,
-just like L<DBIx::Class::Row/get_columns>. Handles inflation just
-like L</get_column>.
-
-=cut
-
-sub get_columns {
- my $self = shift;
- if (exists $self->{_inflated_column}) {
- foreach my $col (keys %{$self->{_inflated_column}}) {
- $self->store_column($col, $self->_deflated_column($col, $self->{_inflated_column}{$col}))
- unless exists $self->{_column_data}{$col};
- }
- }
- return $self->next::method;
-}
-
-=head2 has_column_loaded
-
-Like L<DBIx::Class::Row/has_column_loaded>, but also returns true if there
-is an inflated value stored.
-
-=cut
-
-sub has_column_loaded {
- my ($self, $col) = @_;
- return 1 if exists $self->{_inflated_column}{$col};
- return $self->next::method($col);
-}
-
-=head2 update
-
-Updates a row in the same way as L<DBIx::Class::Row/update>, handling
-inflation and deflation of columns appropriately.
-
-=cut
-
-sub update {
- my ($class, $attrs, @rest) = @_;
- foreach my $key (keys %{$attrs||{}}) {
- if (ref $attrs->{$key} && $class->has_column($key)
- && exists $class->column_info($key)->{_inflate_info}) {
- $class->set_inflated_column($key, delete $attrs->{$key});
- }
- }
- return $class->next::method($attrs, @rest);
-}
-
-=head2 new
-
-Creates a row in the same way as L<DBIx::Class::Row/new>, handling
-inflation and deflation of columns appropriately.
-
-=cut
-
-sub new {
- my ($class, $attrs, @rest) = @_;
- my $inflated;
- foreach my $key (keys %{$attrs||{}}) {
- $inflated->{$key} = delete $attrs->{$key}
- if ref $attrs->{$key} && $class->has_column($key)
- && exists $class->column_info($key)->{_inflate_info};
- }
- my $obj = $class->next::method($attrs, @rest);
- $obj->{_inflated_column} = $inflated if $inflated;
- return $obj;
+ return $self->{_inflated_column}{$col} = $inflated;
}
=head1 SEE ALSO
Daniel Westermann-Clark <danieltwc@cpan.org> (documentation)
+Jess Robinson <cpan@desert-island.demon.co.uk>
+
=head1 LICENSE
You may distribute this code under the same terms as Perl itself.
sub ident_condition {
my ($self, $alias) = @_;
my %cond;
- $cond{(defined $alias ? "${alias}.$_" : $_)} = $self->get_column($_)
- for $self->primary_columns;
+ my $prefix = defined $alias ? $alias.'.' : '';
+ $cond{$prefix.$_} = $self->get_column($_) for $self->primary_columns;
return \%cond;
}
}
}
-sub new {
- my ($class, $attrs, @rest) = @_;
- my ($related, $info);
- foreach my $key (keys %{$attrs||{}}) {
- next unless $info = $class->relationship_info($key);
- $related->{$key} = delete $attrs->{$key}
- if ref $attrs->{$key}
- && $info->{attrs}{accessor}
- && $info->{attrs}{accessor} eq 'single';
- }
- my $obj = $class->next::method($attrs, @rest);
- if ($related) {
- $obj->{_relationship_data} = $related;
- foreach my $rel (keys %$related) {
- $obj->set_from_related($rel, $related->{$rel});
- }
- }
- return $obj;
-}
-
-sub update {
- my ($obj, $attrs, @rest) = @_;
- my $info;
- foreach my $key (keys %{$attrs||{}}) {
- next unless $info = $obj->relationship_info($key);
- if (ref $attrs->{$key} && $info->{attrs}{accessor}
- && $info->{attrs}{accessor} eq 'single') {
- my $rel = delete $attrs->{$key};
- $obj->set_from_related($key => $rel);
- $obj->{_relationship_data}{$key} = $rel;
- }
- }
- return $obj->next::method($attrs, @rest);
-}
-
1;
"${class} has more"
) if $too_many;
+ $class->throw_exception(
+ "has_many needs a primary key to infer a join; ".
+ "${class} has none"
+ ) if !defined $pri && (!defined $cond || !length $cond);
+
my ($f_key,$guess);
if (defined $cond && length $cond) {
$f_key = $cond;
unless (ref $cond) {
$class->ensure_class_loaded($f_class);
my ($pri, $too_many) = $class->primary_columns;
+
$class->throw_exception(
"might_have/has_one can only infer join for a single primary key; ".
"${class} has more"
) if $too_many;
+
+ $class->throw_exception(
+ "might_have/has_one needs a primary key to infer a join; ".
+ "${class} has none"
+ ) if !defined $pri && (!defined $cond || !length $cond);
+
my $f_class_loaded = eval { $f_class->columns };
my ($f_key,$guess);
if (defined $cond && length $cond) {
sub many_to_many {
my ($class, $meth, $rel, $f_rel, $rel_attrs) = @_;
+
+ $class->throw_exception(
+ "missing relation in many-to-many"
+ ) unless $rel;
+
+ $class->throw_exception(
+ "missing foreign relation in many-to-many"
+ ) unless $f_rel;
+
{
no strict 'refs';
no warnings 'redefine';
use Data::Page;
use Storable;
use DBIx::Class::ResultSetColumn;
+use DBIx::Class::ResultSourceHandle;
use base qw/DBIx::Class/;
-__PACKAGE__->mk_group_accessors('simple' => qw/result_source result_class/);
+__PACKAGE__->mk_group_accessors('simple' => qw/result_class _source_handle/);
=head1 NAME
return $class->new_result(@_) if ref $class;
my ($source, $attrs) = @_;
- #weaken $source;
+ $source = $source->handle
+ unless $source->isa('DBIx::Class::ResultSourceHandle');
+ $attrs = { %{$attrs||{}} };
if ($attrs->{page}) {
$attrs->{rows} ||= 10;
$attrs->{alias} ||= 'me';
my $self = {
- result_source => $source,
- result_class => $attrs->{result_class} || $source->result_class,
+ _source_handle => $source,
+ result_class => $attrs->{result_class} || $source->resolve->result_class,
cond => $attrs->{where},
count => undef,
pager => undef,
: $having);
}
- my $rs = (ref $self)->new($self->result_source, $new_attrs);
+ my $rs = (ref $self)->new($self->_source_handle, $new_attrs);
if ($rows) {
$rs->set_cache($rows);
}
? ($attrs->{key})
: $self->result_source->unique_constraint_names;
+ my $where = $self->_collapse_cond($self->{attrs}{where} || {});
+ my $num_where = scalar keys %$where;
+
my @unique_queries;
foreach my $name (@constraint_names) {
my @unique_cols = $self->result_source->unique_constraint_columns($name);
my $unique_query = $self->_build_unique_query($query, \@unique_cols);
+ my $num_cols = scalar @unique_cols;
my $num_query = scalar keys %$unique_query;
- next unless $num_query;
- # XXX: Assuming quite a bit about $self->{attrs}{where}
- my $num_cols = scalar @unique_cols;
- my $num_where = exists $self->{attrs}{where}
- ? scalar keys %{ $self->{attrs}{where} }
- : 0;
- push @unique_queries, $unique_query
- if $num_query + $num_where == $num_cols;
+ my $total = $num_query + $num_where;
+ if ($num_query && ($num_query == $num_cols || $total == $num_cols)) {
+ # The query is either unique on its own or is unique in combination with
+ # the existing where clause
+ push @unique_queries, $unique_query;
+ }
}
return @unique_queries;
sub _construct_object {
my ($self, @row) = @_;
my $info = $self->_collapse_result($self->{_attrs}{as}, \@row);
- my @new = $self->result_class->inflate_result($self->result_source, @$info);
+ my @new = $self->result_class->inflate_result($self->_source_handle, @$info);
@new = $self->{_attrs}{record_filter}->(@new)
if exists $self->{_attrs}{record_filter};
return @new;
# offset, order by and page are not needed to count. record_filter is cdbi
delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/;
- my $tmp_rs = (ref $self)->new($self->result_source, $attrs);
+ my $tmp_rs = (ref $self)->new($self->_source_handle, $attrs);
my ($count) = $tmp_rs->cursor->next;
return $count;
}
sub page {
my ($self, $page) = @_;
- return (ref $self)->new($self->result_source, { %{$self->{attrs}}, page => $page });
+ return (ref $self)->new($self->_source_handle, { %{$self->{attrs}}, page => $page });
}
=head2 new_result
my %new = (
%{ $self->_remove_alias($values, $alias) },
%{ $self->_remove_alias($collapsed_cond, $alias) },
- -result_source => $self->result_source,
);
- my $obj = $self->result_class->new(\%new);
- return $obj;
+ return $self->result_class->new(\%new,$self->_source_handle);
}
# _collapse_cond
my $rel_obj = $self->result_source->relationship_info($rel);
$self->throw_exception(
- "search_related: result source '" . $self->result_source->name .
+ "search_related: result source '" . $self->_source_handle->source_moniker .
"' has no such relationship $rel")
unless $rel_obj;
my $join_count = $seen->{$rel};
my $alias = ($join_count > 1 ? join('_', $rel, $join_count) : $rel);
- $self->result_source->schema->resultset($rel_obj->{class})->search_rs(
+ $self->_source_handle->schema->resultset($rel_obj->{class})->search_rs(
undef, {
%{$self->{attrs}||{}},
join => undef,
return $self->{_attrs} if $self->{_attrs};
my $attrs = { %{$self->{attrs}||{}} };
- my $source = $self->{result_source};
+ my $source = $self->result_source;
my $alias = $attrs->{alias};
$attrs->{columns} ||= delete $attrs->{cols} if exists $attrs->{cols};
}
}
+sub result_source {
+ my $self = shift;
+
+ if (@_) {
+ $self->_source_handle($_[0]->handle);
+ } else {
+ $self->_source_handle->resolve;
+ }
+}
+
=head2 throw_exception
See L<DBIx::Class::Schema/throw_exception> for details.
sub throw_exception {
my $self=shift;
- $self->result_source->schema->throw_exception(@_);
+ $self->_source_handle->schema->throw_exception(@_);
}
# XXX: FIXME: Attributes docs need clearing up
sub new {
my ($class, $rs, $column) = @_;
$class = ref $class if ref $class;
-
- my $object_ref = { _column => $column,
- _parent_resultset => $rs };
-
- my $new = bless $object_ref, $class;
- $new->throw_exception("column must be supplied") unless ($column);
+ my $new = bless { _column => $column, _parent_resultset => $rs }, $class;
+ $new->throw_exception("column must be supplied") unless $column;
return $new;
}
sub next {
my $self = shift;
-
$self->{_resultset} = $self->{_parent_resultset}->search(undef, {select => [$self->{_column}], as => [$self->{_column}]}) unless ($self->{_resultset});
my ($row) = $self->{_resultset}->cursor->next;
return $row;
=cut
sub min {
- my $self = shift;
- return $self->func('MIN');
+ return shift->func('MIN');
}
=head2 max
=cut
sub max {
- my $self = shift;
- return $self->func('MAX');
+ return shift->func('MAX');
}
=head2 sum
=cut
sub sum {
- my $self = shift;
- return $self->func('SUM');
+ return shift->func('SUM');
}
=head2 func
=cut
sub func {
- my $self = shift;
- my $function = shift;
-
+ my ($self,$function) = @_;
my ($row) = $self->{_parent_resultset}->search(undef, {select => {$function => $self->{_column}}, as => [$self->{_column}]})->cursor->next;
return $row;
}
use warnings;
use DBIx::Class::ResultSet;
+use DBIx::Class::ResultSourceHandle;
use Carp::Clan qw/^DBIx::Class/;
use Storable;
__PACKAGE__->mk_group_accessors('simple' => qw/_ordered_columns
_columns _primaries _unique_constraints name resultset_attributes
- schema from _relationships column_info_from_storage source_name
- source_info/);
+ schema from _relationships column_info_from_storage source_info/);
__PACKAGE__->mk_group_accessors('inherited' => qw/resultset_class
result_class/);
+__PACKAGE__->mk_group_ro_accessors('simple' => qw/source_name/);
+
=head1 NAME
DBIx::Class::ResultSource - Result source object
my ($class, $attrs) = @_;
$class = ref $class if ref $class;
- my $new = { %{$attrs || {}}, _resultset => undef };
- bless $new, $class;
-
+ my $new = bless { %{$attrs || {}} }, $class;
$new->{resultset_class} ||= 'DBIx::Class::ResultSet';
$new->{resultset_attributes} = { %{$new->{resultset_attributes} || {}} };
$new->{_ordered_columns} = [ @{$new->{_ordered_columns}||[]}];
'call it on the schema instead.'
) if scalar @_;
- # disabled until we can figure out a way to do it without consistency issues
- #
- #return $self->{_resultset}
- # if ref $self->{_resultset} eq $self->resultset_class;
- #return $self->{_resultset} =
-
return $self->resultset_class->new(
$self, $self->{resultset_attributes}
);
# from your schema...
$schema->resultset('Books')->find(1);
+=head2 handle
+
+Obtain a new handle to this source. Returns an instance of a
+L<DBIx::Class::ResultSourceHandle>.
+
+=cut
+
+sub handle {
+ return new DBIx::Class::ResultSourceHandle({
+ schema => $_[0]->schema,
+ source_moniker => $_[0]->source_name
+ });
+}
+
=head2 throw_exception
See L<DBIx::Class::Schema/"throw_exception">.
--- /dev/null
+package DBIx::Class::ResultSourceHandle;
+
+use strict;
+use warnings;
+use Storable;
+
+use base qw/DBIx::Class/;
+
+use overload
+ q/""/ => sub { __PACKAGE__ . ":" . shift->source_moniker; },
+ fallback => 1;
+
+__PACKAGE__->mk_group_accessors('simple' => qw/schema source_moniker/);
+
+=head1 NAME
+
+DBIx::Class::ResultSourceHandle
+
+=head1 DESCRIPTION
+
+This module removes fixed link between Rows/ResultSets and the actual source
+objects, which gets round the following problems
+
+=over 4
+
+=item *
+
+Needing to keep C<$schema> in scope, since any objects/result_sets
+will have a C<$schema> object through their source handle
+
+=item *
+
+Large output when using Data::Dump(er) since this class can be set to
+stringify to almost nothing
+
+=item *
+
+Closer to being aboe to do a Serialize::Storable that doesn't require class-based connections
+
+=back
+
+=head1 METHODS
+
+=head2 new
+
+=cut
+
+sub new {
+ my ($class, $data) = @_;
+
+ $class = ref $class if ref $class;
+
+ bless $data, $class;
+}
+
+=head2 resolve
+
+Resolve the moniker into the actual ResultSource object
+
+=cut
+
+sub resolve { return $_[0]->schema->source($_[0]->source_moniker) }
+
+sub STORABLE_freeze {
+ my ($self, $cloning) = @_;
+ my $to_serialize = { %$self };
+ delete $to_serialize->{schema};
+ return (Storable::freeze($to_serialize));
+}
+
+sub STORABLE_thaw {
+ my ($self, $cloning,$ice) = @_;
+ %$self = %{ Storable::thaw($ice) };
+}
+
+1;
use warnings;
use base qw/DBIx::Class/;
+use Scalar::Util qw/blessed/;
+use Carp::Clan qw/^DBIx::Class/;
sub iterator_class { shift->result_source_instance->resultset_class(@_) }
sub resultset_class { shift->result_source_instance->resultset_class(@_) }
sub result_class { shift->result_source_instance->result_class(@_) }
-sub source_name { shift->result_source_instance->source_name(@_) }
sub source_info { shift->result_source_instance->source_info(@_) }
+sub set_inherited_ro_instance {
+ my $self = shift;
+
+ croak "Cannot set @{[shift]} on an instance" if blessed $self;
+
+ return $self->set_inherited(@_);
+}
+
+sub get_inherited_ro_instance {
+ return shift->get_inherited(@_);
+}
+
+__PACKAGE__->mk_group_accessors('inherited_ro_instance' => 'source_name');
+
+
sub resultset_attributes {
shift->result_source_instance->resultset_attributes(@_);
}
source_name => undef,
});
}
- $class->mk_classdata('result_source_instance' => $table);
+
+ $class->mk_classdata('result_source_instance')
+ unless $class->can('result_source_instance');
+
+ $class->result_source_instance($table);
+
if ($class->can('schema_instance')) {
$class =~ m/([^:]+)$/;
$class->schema_instance->register_class($class, $class);
use base qw/DBIx::Class/;
use Carp::Clan qw/^DBIx::Class/;
-__PACKAGE__->mk_group_accessors('simple' => 'result_source');
+__PACKAGE__->mk_group_accessors('simple' => qw/_source_handle/);
=head1 NAME
=cut
sub new {
- my ($class, $attrs) = @_;
+ my ($class, $attrs, $source) = @_;
$class = ref $class if ref $class;
my $new = { _column_data => {} };
bless $new, $class;
+ $new->_source_handle($source) if $source;
+
if ($attrs) {
$new->throw_exception("attrs must be a hashref")
unless ref($attrs) eq 'HASH';
+
+ my ($related,$inflated);
+ foreach my $key (keys %$attrs) {
+ if (ref $attrs->{$key}) {
+ my $info = $class->relationship_info($key);
+ if ($info && $info->{attrs}{accessor}
+ && $info->{attrs}{accessor} eq 'single')
+ {
+ $new->set_from_related($key, $attrs->{$key});
+ $related->{$key} = $attrs->{$key};
+ next;
+ }
+ elsif ($class->has_column($key)
+ && exists $class->column_info($key)->{_inflate_info})
+ {
+ $inflated->{$key} = $attrs->{$key};
+ next;
+ }
+ }
+ $new->throw_exception("No such column $key on $class")
+ unless $class->has_column($key);
+ $new->store_column($key => $attrs->{$key});
+ }
if (my $source = delete $attrs->{-result_source}) {
$new->result_source($source);
}
- foreach my $k (keys %$attrs) {
- $new->throw_exception("No such column $k on $class")
- unless $class->has_column($k);
- $new->store_column($k => $attrs->{$k});
- }
+
+ $new->{_relationship_data} = $related if $related;
+ $new->{_inflated_column} = $inflated if $inflated;
}
return $new;
sub insert {
my ($self) = @_;
return $self if $self->in_storage;
- $self->{result_source} ||= $self->result_source_instance
+ my $source = $self->result_source;
+ $source ||= $self->result_source($self->result_source_instance)
if $self->can('result_source_instance');
- my $source = $self->{result_source};
$self->throw_exception("No result_source set on this object; can't insert")
unless $source;
#use Data::Dumper; warn Dumper($self);
$self->in_storage(1);
$self->{_dirty_columns} = {};
$self->{related_resultsets} = {};
+ undef $self->{_orig_ident};
return $self;
}
sub update {
my ($self, $upd) = @_;
$self->throw_exception( "Not in database" ) unless $self->in_storage;
- $self->set_columns($upd) if $upd;
- my %to_update = $self->get_dirty_columns;
- return $self unless keys %to_update;
my $ident_cond = $self->ident_condition;
$self->throw_exception("Cannot safely update a row in a PK-less table")
if ! keys %$ident_cond;
+ if ($upd) {
+ foreach my $key (keys %$upd) {
+ if (ref $upd->{$key}) {
+ my $info = $self->relationship_info($key);
+ if ($info && $info->{attrs}{accessor}
+ && $info->{attrs}{accessor} eq 'single')
+ {
+ my $rel = delete $upd->{$key};
+ $self->set_from_related($key => $rel);
+ $self->{_relationship_data}{$key} = $rel;
+ }
+ elsif ($self->has_column($key)
+ && exists $self->column_info($key)->{_inflate_info})
+ {
+ $self->set_inflated_column($key, delete $upd->{$key});
+ }
+ }
+ }
+ $self->set_columns($upd);
+ }
+ my %to_update = $self->get_dirty_columns;
+ return $self unless keys %to_update;
my $rows = $self->result_source->storage->update(
- $self->result_source->from, \%to_update, $ident_cond);
+ $self->result_source->from, \%to_update, $self->{_orig_ident} || $ident_cond);
if ($rows == 0) {
$self->throw_exception( "Can't update ${self}: row not found" );
} elsif ($rows > 1) {
}
$self->{_dirty_columns} = {};
$self->{related_resultsets} = {};
+ undef $self->{_orig_ident};
return $self;
}
$obj->delete
Deletes the object from the database. The object is still perfectly
-usable, but C<-E<gt>in_storage()> will now return 0 and the object must
-reinserted using C<-E<gt>insert()> before C<-E(<gt>update()> can be used
+usable, but C<< ->in_storage() >> will now return 0 and the object must
+reinserted using C<< ->insert() >> before C<< ->update() >> can be used
on it. If you delete an object in a class with a C<has_many>
relationship, all the related objects will be deleted as well. To turn
this behavior off, pass C<cascade_delete => 0> in the C<$attr>
my $val = $obj->get_column($col);
-Gets a column value from a row object. Currently, does not do
-any queries; the column must have already been fetched from
-the database and stored in the object.
+Gets a column value from a row object. Does not do any queries; the column
+must have already been fetched from the database and stored in the object. If
+there is an inflated value stored that has not yet been deflated, it is deflated
+when the method is invoked.
=cut
my ($self, $column) = @_;
$self->throw_exception( "Can't fetch data as class method" ) unless ref $self;
return $self->{_column_data}{$column} if exists $self->{_column_data}{$column};
+ if (exists $self->{_inflated_column}{$column}) {
+ return $self->store_column($column,
+ $self->_deflated_column($column, $self->{_inflated_column}{$column}));
+ }
$self->throw_exception( "No such column '${column}'" ) unless $self->has_column($column);
return undef;
}
sub has_column_loaded {
my ($self, $column) = @_;
$self->throw_exception( "Can't call has_column data as class method" ) unless ref $self;
+ return 1 if exists $self->{_inflated_column}{$column};
return exists $self->{_column_data}{$column};
}
sub get_columns {
my $self = shift;
+ if (exists $self->{_inflated_column}) {
+ foreach my $col (keys %{$self->{_inflated_column}}) {
+ $self->store_column($col, $self->_deflated_column($col, $self->{_inflated_column}{$col}))
+ unless exists $self->{_column_data}{$col};
+ }
+ }
return %{$self->{_column_data}};
}
sub set_column {
my $self = shift;
my ($column) = @_;
+ $self->{_orig_ident} ||= $self->ident_condition;
my $old = $self->get_column($column);
my $ret = $self->store_column(@_);
$self->{_dirty_columns}{$column} = 1
sub inflate_result {
my ($class, $source, $me, $prefetch) = @_;
- #use Data::Dumper; print Dumper(@_);
+
+ my ($source_handle) = $source;
+
+ if ($source->isa('DBIx::Class::ResultSourceHandle')) {
+ $source = $source_handle->resolve
+ } else {
+ $source_handle = $source->handle
+ }
+
my $new = {
- result_source => $source,
+ _source_handle => $source_handle,
_column_data => $me,
_in_storage => 1
};
Accessor to the ResultSource this object was created from
+=cut
+
+sub result_source {
+ my $self = shift;
+
+ if (@_) {
+ $self->_source_handle($_[0]->handle);
+ } else {
+ $self->_source_handle->resolve;
+ }
+}
+
=head2 register_column
$column_info = { .... };
use Carp::Clan qw/^DBIx::Class/;
use Scalar::Util qw/weaken/;
+use File::Spec;
require Module::Find;
use base qw/DBIx::Class/;
sub register_source {
my ($self, $moniker, $source) = @_;
+
+ %$source = %{ $source->new( { %$source, source_name => $moniker }) };
+
my %reg = %{$self->source_registrations};
$reg{$moniker} = $source;
$self->source_registrations(\%reg);
+
$source->schema($self);
+
weaken($source->{schema}) if ref($self);
if ($source->result_class) {
my %map = %{$self->class_mappings};
}
}
+sub _unregister_source {
+ my ($self, $moniker) = @_;
+ my %reg = %{$self->source_registrations};
+
+ my $source = delete $reg{$moniker};
+ $self->source_registrations(\%reg);
+ if ($source->result_class) {
+ my %map = %{$self->class_mappings};
+ delete $map{$source->result_class};
+ $self->class_mappings(\%map);
+ }
+}
+
=head2 class
=over 4
}
}
$class->ensure_class_loaded($comp_class);
- $comp_class->source_name($comp) unless $comp_class->source_name;
- push(@to_register, [ $comp_class->source_name, $comp_class ]);
+ $comp = $comp_class->source_name || $comp;
+# $DB::single = 1;
+ push(@to_register, [ $comp, $comp_class ]);
}
}
}
return;
}
-=head2 compose_connection
+=head2 compose_connection (DEPRECATED)
=over 4
=back
+DEPRECATED. You probably wanted compose_namespace.
+
+Actually, you probably just wanted to call connect.
+
+=for hidden due to deprecation
+
Calls L<DBIx::Class::Schema/"compose_namespace"> to the target namespace,
calls L<DBIx::Class::Schema/connection> with @db_info on the new schema,
then injects the L<DBix::Class::ResultSetProxy> component and a
=cut
-sub compose_connection {
- my ($self, $target, @info) = @_;
- my $base = 'DBIx::Class::ResultSetProxy';
- eval "require ${base};";
- $self->throw_exception
- ("No arguments to load_classes and couldn't load ${base} ($@)")
- if $@;
-
- if ($self eq $target) {
- # Pathological case, largely caused by the docs on early C::M::DBIC::Plain
- foreach my $moniker ($self->sources) {
- my $source = $self->source($moniker);
+{
+ my $warn;
+
+ sub compose_connection {
+ my ($self, $target, @info) = @_;
+
+ warn "compose_connection deprecated as of 0.08000" unless $warn++;
+
+ my $base = 'DBIx::Class::ResultSetProxy';
+ eval "require ${base};";
+ $self->throw_exception
+ ("No arguments to load_classes and couldn't load ${base} ($@)")
+ if $@;
+
+ if ($self eq $target) {
+ # Pathological case, largely caused by the docs on early C::M::DBIC::Plain
+ foreach my $moniker ($self->sources) {
+ my $source = $self->source($moniker);
+ my $class = $source->result_class;
+ $self->inject_base($class, $base);
+ $class->mk_classdata(resultset_instance => $source->resultset);
+ $class->mk_classdata(class_resolver => $self);
+ }
+ $self->connection(@info);
+ return $self;
+ }
+
+ my $schema = $self->compose_namespace($target, $base);
+ {
+ no strict 'refs';
+ *{"${target}::schema"} = sub { $schema };
+ }
+
+ $schema->connection(@info);
+ foreach my $moniker ($schema->sources) {
+ my $source = $schema->source($moniker);
my $class = $source->result_class;
- $self->inject_base($class, $base);
+ #warn "$moniker $class $source ".$source->storage;
+ $class->mk_classdata(result_source_instance => $source);
$class->mk_classdata(resultset_instance => $source->resultset);
- $class->mk_classdata(class_resolver => $self);
+ $class->mk_classdata(class_resolver => $schema);
}
- $self->connection(@info);
- return $self;
- }
-
- my $schema = $self->compose_namespace($target, $base);
- {
- no strict 'refs';
- *{"${target}::schema"} = sub { $schema };
- }
-
- $schema->connection(@info);
- foreach my $moniker ($schema->sources) {
- my $source = $schema->source($moniker);
- my $class = $source->result_class;
- #warn "$moniker $class $source ".$source->storage;
- $class->mk_classdata(result_source_instance => $source);
- $class->mk_classdata(resultset_instance => $source->resultset);
- $class->mk_classdata(class_resolver => $schema);
+ return $schema;
}
- return $schema;
}
=head2 compose_namespace
my $storage = $storage_class->new($self);
$storage->connect_info(\@info);
$self->storage($storage);
+ $self->on_connect() if($self->can('on_connect'));
return $self;
}
Attempts to deploy the schema to the current storage using L<SQL::Translator>.
Note that this feature is currently EXPERIMENTAL and may not work correctly
-across all databases, or fully handle complex relationships.
+across all databases, or fully handle complex relationships. Saying that, it
+has been used successfully by many people, including the core dev team.
See L<SQL::Translator/METHODS> for a list of values for C<$sqlt_args>. The most
common value for this would be C<< { add_drop_table => 1, } >> to have the SQL
produced include a DROP TABLE statement for each table created.
+Additionally, the DBIx::Class parser accepts a C<sources> parameter as a hash
+ref or an array ref, containing a list of source to deploy. If present, then
+only the sources listed will get deployed.
+
=cut
sub deploy {
=over 4
-=item Arguments: \@databases, $version, $directory, $sqlt_args
+=item Arguments: \@databases, $version, $directory, $preversion, $sqlt_args
=back
Creates an SQL file based on the Schema, for each of the specified
-database types, in the given directory.
+database types, in the given directory. Given a previous version number,
+this will also create a file containing the ALTER TABLE statements to
+transform the previous schema into the current one. Note that these
+statements may contain DROP TABLE or DROP COLUMN statements that can
+potentially destroy data.
+
+The file names are created using the C<ddl_filename> method below, please
+override this method in your schema if you would like a different file
+name format. For the ALTER file, the same format is used, replacing
+$version in the name with "$preversion-$version".
+
+If no arguments are passed, then the following default values are used:
+
+=over 4
+
+=item databases - ['MySQL', 'SQLite', 'PostgreSQL']
+
+=item version - $schema->VERSION
+
+=item directory - './'
+
+=item preversion - <none>
+
+=back
Note that this feature is currently EXPERIMENTAL and may not work correctly
across all databases, or fully handle complex relationships.
+WARNING: Please check all SQL files created, before applying them.
+
=cut
sub create_ddl_dir {
=head2 ddl_filename (EXPERIMENTAL)
- my $filename = $table->ddl_filename($type, $dir, $version)
+=over 4
+
+=item Arguments: $directory, $database-type, $version, $preversion
+
+=back
+
+ my $filename = $table->ddl_filename($type, $dir, $version, $preversion)
+
+This method is called by C<create_ddl_dir> to compose a file name out of
+the supplied directory, database type and version number. The default file
+name format is: C<$dir$schema-$version-$type.sql>.
-Creates a filename for a SQL file based on the table class name. Not
-intended for direct end user use.
+You may override this method in your schema if you wish to use a different
+format.
=cut
sub ddl_filename {
- my ($self, $type, $dir, $version) = @_;
+ my ($self, $type, $dir, $version, $pversion) = @_;
my $filename = ref($self);
$filename =~ s/::/-/;
- $filename = "$dir$filename-$version-$type.sql";
+ $filename = File::Spec->catfile($dir, "$filename-$version-$type.sql");
+ $filename =~ s/$version/$pversion-$version/ if($pversion);
return $filename;
}
--- /dev/null
+package DBIx::Class::Version::Table;
+use base 'DBIx::Class';
+use strict;
+use warnings;
+
+__PACKAGE__->load_components(qw/ Core/);
+__PACKAGE__->table('SchemaVersions');
+
+__PACKAGE__->add_columns
+ ( 'Version' => {
+ 'data_type' => 'VARCHAR',
+ 'is_auto_increment' => 0,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'name' => 'Version',
+ 'is_nullable' => 0,
+ 'size' => '10'
+ },
+ 'Installed' => {
+ 'data_type' => 'VARCHAR',
+ 'is_auto_increment' => 0,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'name' => 'Installed',
+ 'is_nullable' => 0,
+ 'size' => '20'
+ },
+ );
+__PACKAGE__->set_primary_key('Version');
+
+package DBIx::Class::Version;
+use base 'DBIx::Class::Schema';
+use strict;
+use warnings;
+
+__PACKAGE__->register_class('Table', 'DBIx::Class::Version::Table');
+
+
+# ---------------------------------------------------------------------------
+package DBIx::Class::Schema::Versioned;
+
+use strict;
+use warnings;
+use base 'DBIx::Class';
+use POSIX 'strftime';
+use Data::Dumper;
+
+__PACKAGE__->mk_classdata('_filedata');
+__PACKAGE__->mk_classdata('upgrade_directory');
+__PACKAGE__->mk_classdata('backup_directory');
+
+sub on_connect
+{
+ my ($self) = @_;
+ my $vschema = DBIx::Class::Version->connect(@{$self->storage->connect_info()});
+ my $vtable = $vschema->resultset('Table');
+ my $pversion;
+
+ if(!$self->exists($vtable))
+ {
+# $vschema->storage->debug(1);
+ $vschema->storage->ensure_connected();
+ $vschema->deploy();
+ $pversion = 0;
+ }
+ else
+ {
+ my $psearch = $vtable->search(undef,
+ { select => [
+ { 'max' => 'Installed' },
+ ],
+ as => ['maxinstall'],
+ })->first;
+ $pversion = $vtable->search({ Installed => $psearch->get_column('maxinstall'),
+ })->first;
+ $pversion = $pversion->Version if($pversion);
+ }
+# warn("Previous version: $pversion\n");
+ if($pversion eq $self->VERSION)
+ {
+ warn "This version is already installed\n";
+ return 1;
+ }
+
+## use IC::DT?
+
+ if(!$pversion)
+ {
+ $vtable->create({ Version => $self->VERSION,
+ Installed => strftime("%Y-%m-%d %H:%M:%S", gmtime())
+ });
+ ## If we let the user do this, where does the Version table get updated?
+ warn "No previous version found, calling deploy to install this version.\n";
+ $self->deploy();
+ return 1;
+ }
+
+ my $file = $self->ddl_filename(
+ $self->storage->sqlt_type,
+ $self->upgrade_directory,
+ $self->VERSION
+ );
+ if(!$file)
+ {
+ # No upgrade path between these two versions
+ return 1;
+ }
+
+ $file = $self->ddl_filename(
+ $self->storage->sqlt_type,
+ $self->upgrade_directory,
+ $self->VERSION,
+ $pversion,
+ );
+# $file =~ s/@{[ $self->VERSION ]}/"${pversion}-" . $self->VERSION/e;
+ if(!-f $file)
+ {
+ warn "Upgrade not possible, no upgrade file found ($file)\n";
+ return;
+ }
+
+ my $fh;
+ open $fh, "<$file" or warn("Can't open upgrade file, $file ($!)");
+ my @data = split(/;\n/, join('', <$fh>));
+ close($fh);
+ @data = grep { $_ && $_ !~ /^-- / } @data;
+ @data = grep { $_ !~ /^(BEGIN TRANACTION|COMMIT)/m } @data;
+
+ $self->_filedata(\@data);
+
+ ## Don't do this yet, do only on command?
+ ## If we do this later, where does the Version table get updated??
+ warn "Versions out of sync. This is " . $self->VERSION .
+ ", your database contains version $pversion, please call upgrade on your Schema.\n";
+# $self->upgrade($pversion, $self->VERSION);
+}
+
+sub exists
+{
+ my ($self, $rs) = @_;
+
+ my $c = eval {
+ $rs->search({ 1, 0 })->count;
+ };
+ return 0 if $@ || !defined $c;
+
+ return 1;
+}
+
+sub backup
+{
+ my ($self) = @_;
+ ## Make each ::DBI::Foo do this
+ $self->storage->backup($self->backup_directory());
+}
+
+sub upgrade
+{
+ my ($self) = @_;
+
+ ## overridable sub, per default just run all the commands.
+
+ $self->backup();
+
+ $self->run_upgrade(qr/create/i);
+ $self->run_upgrade(qr/alter table .*? add/i);
+ $self->run_upgrade(qr/alter table .*? (?!drop)/i);
+ $self->run_upgrade(qr/alter table .*? drop/i);
+ $self->run_upgrade(qr/drop/i);
+# $self->run_upgrade(qr//i);
+
+ my $vschema = DBIx::Class::Version->connect(@{$self->storage->connect_info()});
+ my $vtable = $vschema->resultset('Table');
+ $vtable->create({ Version => $self->VERSION,
+ Installed => strftime("%Y-%m-%d %H:%M:%S", gmtime())
+ });
+}
+
+
+sub run_upgrade
+{
+ my ($self, $stm) = @_;
+# print "Reg: $stm\n";
+ my @statements = grep { $_ =~ $stm } @{$self->_filedata};
+# print "Statements: ", join("\n", @statements), "\n";
+ $self->_filedata([ grep { $_ !~ /$stm/i } @{$self->_filedata} ]);
+
+ for (@statements)
+ {
+ $self->storage->debugfh->print("$_\n") if $self->storage->debug;
+# print "Running \n>>$_<<\n";
+ $self->storage->dbh->do($_) or warn "SQL was:\n $_";
+ }
+
+ return 1;
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Versioning - DBIx::Class::Schema plugin for Schema upgrades
+
+=head1 SYNOPSIS
+
+ package Library::Schema;
+ use base qw/DBIx::Class::Schema/;
+ # load Library::Schema::CD, Library::Schema::Book, Library::Schema::DVD
+ __PACKAGE__->load_classes(qw/CD Book DVD/);
+
+ __PACKAGE__->load_components(qw/+DBIx::Class::Schema::Versioned/);
+ __PACKAGE__->upgrade_directory('/path/to/upgrades/');
+ __PACKAGE__->backup_directory('/path/to/backups/');
+
+ sub backup
+ {
+ my ($self) = @_;
+ # my special backup process
+ }
+
+ sub upgrade
+ {
+ my ($self) = @_;
+
+ ## overridable sub, per default just runs all the commands.
+
+ $self->run_upgrade(qr/create/i);
+ $self->run_upgrade(qr/alter table .*? add/i);
+ $self->run_upgrade(qr/alter table .*? (?!drop)/i);
+ $self->run_upgrade(qr/alter table .*? drop/i);
+ $self->run_upgrade(qr/drop/i);
+ $self->run_upgrade(qr//i);
+ }
+
+=head1 DESCRIPTION
+
+This module is a component designed to extend L<DBIx::Class::Schema>
+classes, to enable them to upgrade to newer schema layouts. To use this
+module, you need to have called C<create_ddl_dir> on your Schema to
+create your upgrade files to include with your delivery.
+
+A table called I<SchemaVersions> is created and maintained by the
+module. This contains two fields, 'Version' and 'Installed', which
+contain each VERSION of your Schema, and the date+time it was installed.
+
+If you would like to influence which levels of version change need
+upgrades in your Schema, you can override the method C<ddl_filename>
+in L<DBIx::Class::Schema>. Return a false value if there is no upgrade
+path between the two versions supplied. By default, every change in
+your VERSION is regarded as needing an upgrade.
+
+The actual upgrade is called manually by calling C<upgrade> on your
+schema object. Code is run at connect time to determine whether an
+upgrade is needed, if so, a warning "Versions out of sync" is
+produced.
+
+NB: At the moment, SQLite upgrading is rather spotty, as SQL::Translator::Diff
+returns SQL statements that SQLite does not support.
+
+
+=head1 METHODS
+
+=head2 backup
+
+This is an overwritable method which is called just before the upgrade, to
+allow you to make a backup of the database. Per default this method attempts
+to call C<< $self->storage->backup >>, to run the standard backup on each
+database type.
+
+This method should return the name of the backup file, if appropriate.
+
+C<backup> is called from C<upgrade>, make sure you call it, if you write your
+own <upgrade> method.
+
+=head2 upgrade
+
+This is an overwritable method used to run your upgrade. The freeform method
+allows you to run your upgrade any way you please, you can call C<run_upgrade>
+any number of times to run the actual SQL commands, and in between you can
+sandwich your data upgrading. For example, first run all the B<CREATE>
+commands, then migrate your data from old to new tables/formats, then
+issue the DROP commands when you are finished.
+
+=head2 run_upgrade
+
+ $self->run_upgrade(qr/create/i);
+
+Runs a set of SQL statements matching a passed in regular expression. The
+idea is that this method can be called any number of times from your
+C<upgrade> method, running whichever commands you specify via the
+regex in the parameter.
+
+=head2 upgrade_directory
+
+Use this to set the directory your upgrade files are stored in.
+
+=head2 backup_directory
+
+Use this to set the directory you want your backups stored in.
+
+=head1 AUTHOR
+
+Jess Robinson <castaway@desert-island.demon.co.uk>
+++ /dev/null
-package DBIx::Class::Serialize::Storable;
-use strict;
-use warnings;
-use Storable;
-
-sub STORABLE_freeze {
- my ($self,$cloning) = @_;
- my $to_serialize = { %$self };
- delete $to_serialize->{result_source};
- return (Storable::freeze($to_serialize));
-}
-
-sub STORABLE_thaw {
- my ($self,$cloning,$serialized) = @_;
- %$self = %{ Storable::thaw($serialized) };
- $self->result_source($self->result_source_instance)
- if $self->can('result_source_instance');
-}
-
-1;
-
-__END__
-
-=head1 NAME
-
- DBIx::Class::Serialize::Storable - hooks for Storable freeze/thaw
-
-=head1 SYNOPSIS
-
- # in a table class definition
- __PACKAGE__->load_components(qw/Serialize::Storable/);
-
- # meanwhile, in a nearby piece of code
- my $cd = $schema->resultset('CD')->find(12);
- # if the cache uses Storable, this will work automatically
- $cache->set($cd->ID, $cd);
-
-=head1 DESCRIPTION
-
-This component adds hooks for Storable so that row objects can be
-serialized. It assumes that your row object class (C<result_class>) is
-the same as your table class, which is the normal situation.
-
-=head1 HOOKS
-
-The following hooks are defined for L<Storable> - see the
-documentation for L<Storable/Hooks> for detailed information on these
-hooks.
-
-=head2 STORABLE_freeze
-
-The serializing hook, called on the object during serialization. It
-can be inherited, or defined in the class itself, like any other
-method.
-
-=head2 STORABLE_thaw
-
-The deserializing hook called on the object during deserialization.
-
-=head1 AUTHORS
-
-David Kamholz <dkamholz@cpan.org>
-
-=head1 LICENSE
-
-You may distribute this code under the same terms as Perl itself.
-
-=cut
sub _find_syntax {
my ($self, $syntax) = @_;
my $dbhname = blessed($syntax) ? $syntax->{Driver}{Name} : $syntax;
-# print STDERR "Found DBH $syntax >$dbhname< ", $syntax->{Driver}->{Name}, "\n";
if(ref($self) && $dbhname && $dbhname eq 'DB2') {
return 'RowNumberOver';
}
if (ref $cond eq 'HASH') {
my %j;
for (keys %$cond) {
- my $x = '= '.$self->_quote($cond->{$_}); $j{$_} = \$x;
+ my $v = $cond->{$_};
+ if (ref $v) {
+ # XXX no throw_exception() in this package and croak() fails with strange results
+ Carp::croak(ref($v) . qq{ reference arguments are not supported in JOINS - try using \"..." instead'})
+ if ref($v) ne 'SCALAR';
+ $j{$_} = $v;
+ }
+ else {
+ my $x = '= '.$self->_quote($v); $j{$_} = \$x;
+ }
};
- return $self->_recurse_where(\%j);
+ return scalar($self->_recurse_where(\%j));
} elsif (ref $cond eq 'ARRAY') {
return join(' OR ', map { $self->_join_condition($_) } @$cond);
} else {
$self->throw_exception("'$sql' did not generate a statement.");
}
if ($self->debug) {
- my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind;
+ my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind;
$self->debugobj->query_end($sql, @debug_bind);
}
return (wantarray ? ($rv, $sth, @bind) : $rv);
=over 4
-=item Arguments: $schema \@databases, $version, $directory, $sqlt_args
+=item Arguments: $schema \@databases, $version, $directory, $preversion, $sqlt_args
=back
sub create_ddl_dir
{
- my ($self, $schema, $databases, $version, $dir, $sqltargs) = @_;
+ my ($self, $schema, $databases, $version, $dir, $preversion, $sqltargs) = @_;
if(!$dir || !-d $dir)
{
$sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} };
eval "use SQL::Translator";
- $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@;
+ $self->throw_exception("Can't create a ddl file without SQL::Translator: $@") if $@;
- my $sqlt = SQL::Translator->new($sqltargs);
+ my $sqlt = SQL::Translator->new({
+# debug => 1,
+ add_drop_table => 1,
+ });
foreach my $db (@$databases)
{
$sqlt->reset();
$sqlt->parser('SQL::Translator::Parser::DBIx::Class');
# $sqlt->parser_args({'DBIx::Class' => $schema);
+ $sqlt = $self->configure_sqlt($sqlt, $db);
$sqlt->data($schema);
$sqlt->producer($db);
my $filename = $schema->ddl_filename($db, $dir, $version);
if(-e $filename)
{
- $self->throw_exception("$filename already exists, skipping $db");
+ warn("$filename already exists, skipping $db");
next;
}
- open($file, ">$filename")
- or $self->throw_exception("Can't open $filename for writing ($!)");
+
my $output = $sqlt->translate;
-#use Data::Dumper;
-# print join(":", keys %{$schema->source_registrations});
-# print Dumper($sqlt->schema);
if(!$output)
{
- $self->throw_exception("Failed to translate to $db. (" . $sqlt->error . ")");
+ warn("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
next;
}
+ if(!open($file, ">$filename"))
+ {
+ $self->throw_exception("Can't open $filename for writing ($!)");
+ next;
+ }
print $file $output;
close($file);
+
+ if($preversion)
+ {
+ eval "use SQL::Translator::Diff";
+ if($@)
+ {
+ warn("Can't diff versions without SQL::Translator::Diff: $@");
+ next;
+ }
+
+ my $prefilename = $schema->ddl_filename($db, $dir, $preversion);
+# print "Previous version $prefilename\n";
+ if(!-e $prefilename)
+ {
+ warn("No previous schema file found ($prefilename)");
+ next;
+ }
+ #### We need to reparse the SQLite file we just wrote, so that
+ ## Diff doesnt get all confoosed, and Diff is *very* confused.
+ ## FIXME: rip Diff to pieces!
+# my $target_schema = $sqlt->schema;
+# unless ( $target_schema->name ) {
+# $target_schema->name( $filename );
+# }
+ my @input;
+ push @input, {file => $prefilename, parser => $db};
+ push @input, {file => $filename, parser => $db};
+ my ( $source_schema, $source_db, $target_schema, $target_db ) = map {
+ my $file = $_->{'file'};
+ my $parser = $_->{'parser'};
+
+ my $t = SQL::Translator->new;
+ $t->debug( 0 );
+ $t->trace( 0 );
+ $t->parser( $parser ) or die $t->error;
+ my $out = $t->translate( $file ) or die $t->error;
+ my $schema = $t->schema;
+ unless ( $schema->name ) {
+ $schema->name( $file );
+ }
+ ($schema, $parser);
+ } @input;
+
+ my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db,
+ $target_schema, $db,
+ {}
+ );
+ my $difffile = $schema->ddl_filename($db, $dir, $version, $preversion);
+ print STDERR "Diff: $difffile: $db, $dir, $version, $preversion \n";
+ if(-e $difffile)
+ {
+ warn("$difffile already exists, skipping");
+ next;
+ }
+ if(!open $file, ">$difffile")
+ {
+ $self->throw_exception("Can't write to $difffile ($!)");
+ next;
+ }
+ print $file $diff;
+ close($file);
+ }
}
+}
+sub configure_sqlt() {
+ my $self = shift;
+ my $tr = shift;
+ my $db = shift || $self->sqlt_type;
+ if ($db eq 'PostgreSQL') {
+ $tr->quote_table_names(0);
+ $tr->quote_field_names(0);
+ }
+ return $tr;
}
=head2 deployment_statements
$type ||= $self->sqlt_type;
$version ||= $schema->VERSION || '1.x';
$dir ||= './';
+ my $filename = $schema->ddl_filename($type, $dir, $version);
+ if(-f $filename)
+ {
+ my $file;
+ open($file, "<$filename")
+ or $self->throw_exception("Can't open $filename ($!)");
+ my @rows = <$file>;
+ close($file);
+ return join('', @rows);
+ }
+
eval "use SQL::Translator";
if(!$@)
{
$self->throw_exception($@) if $@;
eval "use SQL::Translator::Producer::${type};";
$self->throw_exception($@) if $@;
+
+ # sources needs to be a parser arg, but for simplicty allow at top level
+ # coming in
+ $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources}
+ if exists $sqltargs->{sources};
+
my $tr = SQL::Translator->new(%$sqltargs);
SQL::Translator::Parser::DBIx::Class::parse( $tr, $schema );
return "SQL::Translator::Producer::${type}"->can('produce')->($tr);
}
- my $filename = $schema->ddl_filename($type, $dir, $version);
- if(!-f $filename)
- {
-# $schema->create_ddl_dir([ $type ], $version, $dir, $sqltargs);
- $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy");
- return;
- }
- my $file;
- open($file, "<$filename")
- or $self->throw_exception("Can't open $filename ($!)");
- my @rows = <$file>;
- close($file);
-
- return join('', @rows);
-
+ $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy");
+ return;
+
}
sub deploy {
sub last_insert_id {
my ($self,$source,$col) = @_;
my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
+ $self->throw_exception("could not fetch primary key for " . $source->name . ", could not "
+ . "get autoinc sequence for $col (check that table and column specifications are correct "
+ . "and in the correct case)") unless defined $seq;
$self->dbh_do($self->can('_dbh_last_insert_id'), $seq);
}
use strict;
use warnings;
+use POSIX 'strftime';
+use File::Copy;
+use File::Spec;
use base qw/DBIx::Class::Storage::DBI::MultiDistinctEmulation/;
$dbh->func('last_insert_rowid');
}
+sub backup
+{
+ my ($self, $dir) = @_;
+ $dir ||= './';
+
+ ## Where is the db file?
+ my $dsn = $self->connect_info()->[0];
+
+ my $dbname = $1 if($dsn =~ /dbname=([^;]+)/);
+ if(!$dbname)
+ {
+ $dbname = $1 if($dsn =~ /^dbi:SQLite:(.+)$/i);
+ }
+ $self->throw_exception("Cannot determine name of SQLite db file")
+ if(!$dbname || !-f $dbname);
+
+# print "Found database: $dbname\n";
+# my $dbfile = file($dbname);
+ my ($vol, $dbdir, $file) = File::Spec->splitpath($dbname);
+# my $file = $dbfile->basename();
+ $file = strftime("%y%m%d%h%M%s", localtime()) . $file;
+ $file = "B$file" while(-f $file);
+
+ mkdir($dir) unless -f $dir;
+ my $backupfile = File::Spec->catfile($dir, $file);
+
+ my $res = copy($dbname, $backupfile);
+ $self->throw_exception("Backup failed! ($!)") if(!$res);
+
+ return $backupfile;
+}
+
1;
=head1 NAME
# We're working with DBIx::Class Schemas, not data streams.
# -------------------------------------------------------------------
sub parse {
- my ($tr, $data) = @_;
- my $args = $tr->parser_args;
- my $dbixschema = $args->{'DBIx::Schema'} || $data;
- $dbixschema ||= $args->{'package'};
+ my ($tr, $data) = @_;
+ my $args = $tr->parser_args;
+ my $dbixschema = $args->{'DBIx::Schema'} || $data;
+ $dbixschema ||= $args->{'package'};
+ my $limit_sources = $args->{'sources'};
die 'No DBIx::Schema' unless ($dbixschema);
if (!ref $dbixschema) {
my %seen_tables;
- foreach my $moniker ($dbixschema->sources)
+ my @monikers = $dbixschema->sources;
+ if ($limit_sources) {
+ my $ref = ref $limit_sources || '';
+ die "'sources' parameter must be an array or hash ref" unless $ref eq 'ARRAY' || ref eq 'HASH';
+
+ # limit monikers to those specified in
+ my $sources;
+ if ($ref eq 'ARRAY') {
+ $sources->{$_} = 1 for (@$limit_sources);
+ } else {
+ $sources = $limit_sources;
+ }
+ @monikers = grep { $sources->{$_} } @monikers;
+ }
+
+
+ foreach my $moniker (@monikers)
{
#eval "use $tableclass";
#print("Can't load $tableclass"), next if($@);
}
my @rels = $source->relationships();
+
+ my %created_FK_rels;
+
foreach my $rel (@rels)
{
my $rel_info = $source->relationship_info($rel);
$on_update = $otherrelationship->{'attrs'}->{cascade_copy} ? 'CASCADE' : '';
}
+ # Make sure we dont create the same foreign key constraint twice
+ my $key_test = join("\x00", @keys);
+
#Decide if this is a foreign key based on whether the self
#items are our primary columns.
# If the sets are different, then we assume it's a foreign key from
# us to another table.
- if (!$source->compare_relationship_keys(\@keys, \@primary)) {
+ # OR: If is_foreign_key attr is explicity set on one the local columns
+ if ( ! exists $created_FK_rels{$rel_table}->{$key_test}
+ &&
+ ( !$source->compare_relationship_keys(\@keys, \@primary) ||
+ grep { $source->column_info($_)->{is_foreign_key} } @keys
+ )
+ ) {
+ $created_FK_rels{$rel_table}->{$key_test} = 1;
$table->add_constraint(
type => 'foreign_key',
name => "fk_$keys[0]",
use lib qw(t/lib);
use_ok('DBICTest');
-DBICTest->init_schema();
+my $schema = DBICTest->init_schema();
-my $orig_debugcb = DBICTest->schema->storage->debugcb;
-my $orig_debug = DBICTest->schema->storage->debug;
+my $orig_debugcb = $schema->storage->debugcb;
+my $orig_debug = $schema->storage->debug;
-diag('Testing against ' . join(' ', map { DBICTest->schema->storage->dbh->get_info($_) } qw/17 18/));
+diag('Testing against ' . join(' ', map { $schema->storage->dbh->get_info($_) } qw/17 18/));
-DBICTest->schema->storage->sql_maker->quote_char('`');
-DBICTest->schema->storage->sql_maker->name_sep('.');
+$schema->storage->sql_maker->quote_char('`');
+$schema->storage->sql_maker->name_sep('.');
my $sql = '';
-DBICTest->schema->storage->debugcb(sub { $sql = $_[1] });
-DBICTest->schema->storage->debug(1);
+$schema->storage->debugcb(sub { $sql = $_[1] });
+$schema->storage->debug(1);
my $rs;
-$rs = DBICTest::CD->search(
+$rs = $schema->resultset('CD')->search(
{ 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ join => 'artist' });
eval { $rs->count };
like($sql, qr/\QSELECT COUNT( * ) FROM `cd` `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )\E/, 'got correct SQL for count query with quoting');
my $order = 'year DESC';
-$rs = DBICTest::CD->search({},
+$rs = $schema->resultset('CD')->search({},
{ 'order_by' => $order });
eval { $rs->first };
like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)');
-$rs = DBICTest::CD->search({},
+$rs = $schema->resultset('CD')->search({},
{ 'order_by' => \$order });
eval { $rs->first };
like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref');
-DBICTest->schema->storage->sql_maker->quote_char([qw/[ ]/]);
-DBICTest->schema->storage->sql_maker->name_sep('.');
+$schema->storage->sql_maker->quote_char([qw/[ ]/]);
+$schema->storage->sql_maker->name_sep('.');
-$rs = DBICTest::CD->search(
+$rs = $schema->resultset('CD')->search(
{ 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ join => 'artist' });
eval { $rs->count };
order => '12'
);
-DBICTest->schema->storage->sql_maker->quote_char('`');
-DBICTest->schema->storage->sql_maker->name_sep('.');
+$schema->storage->sql_maker->quote_char('`');
+$schema->storage->sql_maker->name_sep('.');
-is(DBICTest->schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
+is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
-DBICTest->schema->storage->debugcb($orig_debugcb);
-DBICTest->schema->storage->debug($orig_debug);
+$schema->storage->debugcb($orig_debugcb);
+$schema->storage->debug($orig_debug);
use lib qw(t/lib);
use_ok('DBICTest');
-DBICTest->init_schema();
+my $schema = DBICTest->init_schema();
-my $orig_debugcb = DBICTest->schema->storage->debugcb;
-my $orig_debug = DBICTest->schema->storage->debug;
+my $orig_debugcb = $schema->storage->debugcb;
+my $orig_debug = $schema->storage->debug;
-diag('Testing against ' . join(' ', map { DBICTest->schema->storage->dbh->get_info($_) } qw/17 18/));
+diag('Testing against ' . join(' ', map { $schema->storage->dbh->get_info($_) } qw/17 18/));
-my $dsn = DBICTest->schema->storage->connect_info->[0];
-DBICTest->schema->connection($dsn, { quote_char => '`', name_sep => '.' });
+my $dsn = $schema->storage->connect_info->[0];
+$schema->connection($dsn, { quote_char => '`', name_sep => '.' });
my $sql = '';
-DBICTest->schema->storage->debugcb(sub { $sql = $_[1] });
-DBICTest->schema->storage->debug(1);
+$schema->storage->debugcb(sub { $sql = $_[1] });
+$schema->storage->debug(1);
my $rs;
-$rs = DBICTest::CD->search(
+$rs = $schema->resultset('CD')->search(
{ 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ join => 'artist' });
eval { $rs->count };
like($sql, qr/\QSELECT COUNT( * ) FROM `cd` `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )\E/, 'got correct SQL for count query with quoting');
my $order = 'year DESC';
-$rs = DBICTest::CD->search({},
+$rs = $schema->resultset('CD')->search({},
{ 'order_by' => $order });
eval { $rs->first };
like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)');
-$rs = DBICTest::CD->search({},
+$rs = $schema->resultset('CD')->search({},
{ 'order_by' => \$order });
eval { $rs->first };
like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref');
-DBICTest->schema->connection($dsn, { quote_char => [qw/[ ]/], name_sep => '.' });
-DBICTest->schema->storage->debugcb(sub { $sql = $_[1] });
-DBICTest->schema->storage->debug(1);
+$schema->connection($dsn, { quote_char => [qw/[ ]/], name_sep => '.' });
+$schema->storage->debugcb(sub { $sql = $_[1] });
+$schema->storage->debug(1);
-$rs = DBICTest::CD->search(
+$rs = $schema->resultset('CD')->search(
{ 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ join => 'artist' });
eval { $rs->count };
order => '12'
);
-DBICTest->schema->connection($dsn, { quote_char => '`', name_sep => '.' });
+$schema->connection($dsn, { quote_char => '`', name_sep => '.' });
-is(DBICTest->schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
+is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
-DBICTest->schema->storage->debugcb($orig_debugcb);
-DBICTest->schema->storage->debug($orig_debug);
+$schema->storage->debugcb($orig_debugcb);
+$schema->storage->debug($orig_debug);
use lib qw(t/lib);
use_ok('DBICTest');
-DBICTest->init_schema();
+my $schema = DBICTest->init_schema();
my $cbworks = 0;
-DBICTest->schema->storage->debugcb(sub { $cbworks = 1; });
-DBICTest->schema->storage->debug(0);
-my $rs = DBICTest::CD->search({});
+$schema->storage->debugcb(sub { $cbworks = 1; });
+$schema->storage->debug(0);
+my $rs = $schema->resultset('CD')->search({});
$rs->count();
ok(!$cbworks, 'Callback not called with debug disabled');
-DBICTest->schema->storage->debug(1);
+$schema->storage->debug(1);
$rs->count();
ok($cbworks, 'Debug callback worked.');
my $prof = new DBIx::Test::Profiler();
-DBICTest->schema->storage->debugobj($prof);
+$schema->storage->debugobj($prof);
# Test non-transaction calls.
$rs->count();
$prof->reset();
# Test transaction calls
-DBICTest->schema->txn_begin();
+$schema->txn_begin();
ok($prof->{'txn_begin'}, 'txn_begin called');
-$rs = DBICTest::CD->search({});
+$rs = $schema->resultset('CD')->search({});
$rs->count();
ok($prof->{'query_start'}, 'query_start called');
ok($prof->{'query_end'}, 'query_end called');
-DBICTest->schema->txn_commit();
+$schema->txn_commit();
ok($prof->{'txn_commit'}, 'txn_commit called');
$prof->reset();
# Test a rollback
-DBICTest->schema->txn_begin();
-$rs = DBICTest::CD->search({});
+$schema->txn_begin();
+$rs = $schema->resultset('CD')->search({});
$rs->count();
-DBICTest->schema->txn_rollback();
+$schema->txn_rollback();
ok($prof->{'txn_rollback'}, 'txn_rollback called');
-DBICTest->schema->storage->debug(0);
+$schema->storage->debug(0);
package DBIx::Test::Profiler;
use strict;
like($@, qr/DBICTest::Exception is handling this: $ex_regex/);
# While we're at it, lets throw a custom exception through Storage::DBI
-eval { DBICTest->schema->storage->throw_exception('floob') };
+eval { $schema->storage->throw_exception('floob') };
like($@, qr/DBICTest::Exception is handling this: floob/);
use DBICTest::ResultSetManager; # uses Class::Inspector
-my $schema = DBICTest::ResultSetManager->compose_connection('DB', 'foo');
+my $schema = DBICTest::ResultSetManager->compose_namespace('DB');
my $rs = $schema->resultset('Foo');
ok( !DB::Foo->can('bar'), 'Foo class does not have bar method' );
cmp_ok(@artsn, '==', 4, "Four artists returned");
# make sure subclasses that don't set source_name are ok
- ok($schema->source('ArtistSubclass', 'ArtistSubclass exists'));
+ ok($schema->source('ArtistSubclass'), 'ArtistSubclass exists');
}
my $newbook = $schema->resultset( 'Bookmark' )->find(1);
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More tests => 2;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::Schema;
+use DBICTest::Schema::Artist;
+
+DBICTest::Schema::Artist->source_name('MyArtist');
+DBICTest::Schema->register_class('FooA', 'DBICTest::Schema::Artist');
+
+my $schema = DBICTest->init_schema();
+
+my $a = $schema->resultset('FooA')->search;
+is($a->count, 3, 'have 3 artists');
+is($schema->class('FooA'), 'DBICTest::FooA', 'Correct artist class');
plan tests => 5;
-my $artist = DBICTest::Artist->find(1);
+my $artist = $schema->resultset("Artist")->find(1);
ok($artist->find_related('twokeys', {cd => 1}), "find multiple pks using relationships + args");
ok($schema->resultset("FourKeys")->search({ foo => 1, bar => 2 })->find({ hello => 3, goodbye => 4 }), "search on partial key followed by a find");
use lib qw(t/lib);
use DBICTest;
-DBICTest::Schema::CD->add_column('year');
my $schema = DBICTest->init_schema();
eval { require DateTime };
plan skip_all => "Need DateTime for inflation tests" if $@;
-plan tests => 4;
+plan tests => 20;
-DBICTest::Schema::CD->inflate_column( 'year',
+$schema->class('CD')
+#DBICTest::Schema::CD
+->inflate_column( 'year',
{ inflate => sub { DateTime->new( year => shift ) },
deflate => sub { shift->year } }
);
is( $cd->year->month, 1, 'inflated month ok' );
+eval { $cd->year(\'year +1'); };
+ok(!$@, 'updated year using a scalarref');
+$cd->update();
+$cd->discard_changes();
+
+is( ref($cd->year), 'DateTime', 'year is still a DateTime, ok' );
+
+is( $cd->year->year, 1998, 'updated year, bypassing inflation' );
+
+is( $cd->year->month, 1, 'month is still 1' );
+
+# get_inflated_column test
+
+is( ref($cd->get_inflated_column('year')), 'DateTime', 'get_inflated_column produces a DateTime');
+
# deflate test
my $now = DateTime->now;
$cd->year( $now );
$cd->update;
-($cd) = $schema->resultset("CD")->search( year => $now->year );
+$cd = $schema->resultset("CD")->find(3);
is( $cd->year->year, $now->year, 'deflate ok' );
+# set_inflated_column test
+eval { $cd->set_inflated_column('year', $now) };
+ok(!$@, 'set_inflated_column with DateTime object');
+$cd->update;
+
+$cd = $schema->resultset("CD")->find(3);
+is( $cd->year->year, $now->year, 'deflate ok' );
+
+$cd = $schema->resultset("CD")->find(3);
+my $before_year = $cd->year->year;
+eval { $cd->set_inflated_column('year', \'year + 1') };
+ok(!$@, 'set_inflated_column to "year + 1"');
+$cd->update;
+
+$cd = $schema->resultset("CD")->find(3);
+is( $cd->year->year, $before_year+1, 'deflate ok' );
+
+# store_inflated_column test
+$cd = $schema->resultset("CD")->find(3);
+eval { $cd->store_inflated_column('year', $now) };
+ok(!$@, 'store_inflated_column with DateTime object');
+$cd->update;
+
+is( $cd->year->year, $now->year, 'deflate ok' );
+
+# update tests
+$cd = $schema->resultset("CD")->find(3);
+eval { $cd->update({'year' => $now}) };
+ok(!$@, 'update using DateTime object ok');
+is($cd->year->year, $now->year, 'deflate ok');
+
+$cd = $schema->resultset("CD")->find(3);
+$before_year = $cd->year->year;
+eval { $cd->update({'year' => \'year + 1'}) };
+ok(!$@, 'update using scalarref ok');
+
+$cd = $schema->resultset("CD")->find(3);
+is($cd->year->year, $before_year + 1, 'deflate ok');
+
+# discard_changes test
+$cd = $schema->resultset("CD")->find(3);
+# inflate the year
+$before_year = $cd->year->year;
+$cd->update({ year => \'year + 1'});
+$cd->discard_changes;
+
+is($cd->year->year, $before_year + 1, 'discard_changes clears the inflated value');
+
+# eval { $cd->store_inflated_column('year', \'year + 1') };
+# print STDERR "ERROR: $@" if($@);
+# ok(!$@, 'store_inflated_column to "year + 1"');
+
+# is_deeply( $cd->year, \'year + 1', 'deflate ok' );
+
plan (skip_all => "No suitable serializer found") unless $selected;
-plan (tests => 6);
+plan (tests => 8);
DBICTest::Schema::Serialized->inflate_column( 'serialized',
{ inflate => $selected->{inflater},
deflate => $selected->{deflater},
ok($inflated = $entry->serialized, 'hashref inflation ok');
is_deeply($inflated, $complex1->{serialized}, 'inflated hash matches original');
+my $entry2 = $rs->create({ id => 2, serialized => ''});
+
+eval { $entry2->set_inflated_column('serialized', $complex1->{serialized}) };
+ok(!$@, 'set_inflated_column to a hashref');
+$entry2->update;
+is_deeply($entry2->serialized, $complex1->{serialized}, 'inflated hash matches original');
+
ok($entry->update ({ %{$complex2} }), 'arrayref deflation ok');
ok($inflated = $entry->serialized, 'arrayref inflation ok');
is_deeply($inflated, $complex2->{serialized}, 'inflated array matches original');
BEGIN {
eval "use DBD::SQLite";
- plan $@ ? (skip_all => 'needs DBD::SQLite for testing') : (tests => 3);
+ plan $@ ? (skip_all => 'needs DBD::SQLite for testing') : (tests => 6);
}
my $art = $schema->resultset("Artist")->find(1);
$art->discard_changes;
+ok($art->update({ artistid => 100 }), 'update allows pk mutation');
+
+is($art->artistid, 100, 'pk mutation applied');
+
+my $art_100 = $schema->resultset("Artist")->find(100);
+$art_100->artistid(101);
+ok($art_100->update(), 'update allows pk mutation via column accessor');
plan tests => 5;
-DBICTest::Schema->compose_connection('MySQLTest' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('MySQLTest' => $dsn, $user, $pass);
my $dbh = MySQLTest->schema->storage->dbh;
plan tests => 8;
DBICTest::Schema->load_classes( 'Casecheck' );
-DBICTest::Schema->compose_connection('PgTest' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('PgTest' => $dsn, $user, $pass);
my $dbh = PgTest->schema->storage->dbh;
PgTest->schema->source("Artist")->name("testschema.artist");
plan tests => 6;
-DBICTest::Schema->compose_connection('OraTest' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('OraTest' => $dsn, $user, $pass);
my $dbh = OraTest->schema->storage->dbh;
plan tests => 6;
-DBICTest::Schema->compose_connection('DB2Test' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('DB2Test' => $dsn, $user, $pass);
my $dbh = DB2Test->schema->storage->dbh;
plan tests => 6;
-DBICTest::Schema->compose_connection('DB2Test' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('DB2Test' => $dsn, $user, $pass);
my $dbh = DB2Test->schema->storage->dbh;
# Add more for others in the future when they exist (ODBC? ADO? JDBC?)
DBICTest::Schema->storage_type($storage_type);
-DBICTest::Schema->compose_connection( 'MSSQLTest' => $dsn, $user, $pass );
+DBICTest::Schema->compose_namespace( 'MSSQLTest' => $dsn, $user, $pass );
my $dbh = MSSQLTest->schema->storage->dbh;
eval "use DBD::SQLite";
plan $@
? ( skip_all => 'needs DBD::SQLite for testing' )
- : ( tests => 49 );
+ : ( tests => 53 );
}
# figure out if we've got a version of sqlite that is older than 3.2.6, in
;
is( $sa->_recurse_from(@j4), $match, 'join 4 (nested joins + join types) ok');
+my @j5 = (
+ { child => 'person' },
+ [ { father => 'person' }, { 'father.person_id' => \'!= child.father_id' }, ],
+ [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
+);
+$match = 'person child JOIN person father ON ( father.person_id != '
+ . 'child.father_id ) JOIN person mother ON ( mother.person_id '
+ . '= child.mother_id )'
+ ;
+is( $sa->_recurse_from(@j5), $match, 'join 5 (SCALAR reference for ON statement) ok' );
+
+my @j6 = (
+ { child => 'person' },
+ [ { father => 'person' }, { 'father.person_id' => { '!=', '42' } }, ],
+ [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
+);
+$match = qr/^\QHASH reference arguments are not supported in JOINS - try using \"..." instead\E/;
+eval { $sa->_recurse_from(@j6) };
+like( $@, $match, 'join 6 (HASH reference for ON statement dies) ok' );
+
my $rs = $schema->resultset("CD")->search(
{ 'year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
{ from => [ { 'me' => 'cd' },
$schema->storage->debug($orig_debug);
$schema->storage->debugobj->callback(undef);
+
+$rs = $schema->resultset('Artist');
+$rs->create({ artistid => 4, name => 'Unknown singer-songwriter' });
+$rs->create({ artistid => 5, name => 'Emo 4ever' });
+@artists = $rs->search(undef, { prefetch => 'cds', order_by => 'artistid' });
+is(scalar @artists, 5, 'has_many prefetch with adjacent empty rows ok');
+
+# -------------
+#
+# Tests for multilevel has_many prefetch
+
+# artist resultsets - with and without prefetch
+my $art_rs = $schema->resultset('Artist');
+my $art_rs_pr = $art_rs->search(
+ {},
+ {
+ join => [ { cds => ['tracks'] } ],
+ prefetch => [ { cds => ['tracks'] } ]
+ }
+);
+
+# This test does the same operation twice - once on a
+# set of items fetched from the db with no prefetch of has_many rels
+# The second prefetches 2 levels of has_many
+# We check things are the same by comparing the name or title
+# we build everything into a hash structure and compare the one
+# from each rs to see what differs
+
+sub make_hash_struc {
+ my $rs = shift;
+
+ my $struc = {};
+ foreach my $art ( $rs->all ) {
+ foreach my $cd ( $art->cds ) {
+ foreach my $track ( $cd->tracks ) {
+ $struc->{ $art->name }{ $cd->title }{ $track->title }++;
+ }
+ }
+ }
+ return $struc;
+}
+
+my $prefetch_result = make_hash_struc($art_rs_pr);
+my $nonpre_result = make_hash_struc($art_rs);
+
+is_deeply( $prefetch_result, $nonpre_result,
+ 'Compare 2 level prefetch result to non-prefetch result' );
my $schema = DBICTest->init_schema();
-plan tests => 43;
+plan tests => 45;
# Check the defined unique constraints
is_deeply(
is($cd8->title, $cd1->title, 'title is correct');
is($cd8->year, $cd1->year, 'year is correct');
-my $cd9 = $artist->update_or_create_related('cds',
+my $cd9 = $artist->cds->update_or_create(
{
+ cdid => $cd1->cdid,
title => $title,
year => 2021,
},
},
{ key => 'foo_bar' }
);
+
ok(! $row->is_changed, 'update_or_create on table without primary key: row is clean');
is($row->foo, 1, 'foo is correct');
is($row->bar, 2, 'bar is correct');
is($row->baz, 3, 'baz is correct');
+
+# Test a unique condition with extra information in the where attr
+{
+ my $artist = $schema->resultset('Artist')->find({ artistid => 1 });
+ my $cd = $artist->cds->find_or_new(
+ {
+ cdid => 1,
+ title => 'Not The Real Title',
+ year => 3000,
+ },
+ { key => 'primary' }
+ );
+
+ ok($cd->in_storage, 'find correctly grepped the key across a relationship');
+ is($cd->cdid, 1, 'cdid is correct');
+}
my $schema = DBICTest->init_schema;
-plan tests => 53;
+plan tests => 54;
my $translator = SQL::Translator->new(
parser_args => {
my $output = $translator->translate();
+
+ok($output, "SQLT produced someoutput")
+ or diag($translator->error);
+
# Note that the constraints listed here are the only ones that are tested -- if
# more exist in the Schema than are listed here and all listed constraints are
# correct, the test will still pass. If you add a class with UNIQUE or FOREIGN
DBICTest::Schema->storage(undef); # just in case?
DBICTest::Schema->storage_type('::DBI::MySQLNoBindVars');
-DBICTest::Schema->compose_connection('MySQLTest' => $dsn, $user, $pass);
+DBICTest::Schema->compose_namespace('MySQLTest' => $dsn, $user, $pass);
my $dbh = MySQLTest->schema->storage->dbh;
my $schema = DBICTest->init_schema();
-plan tests => 5;
+plan tests => 10;
my $old_artistid = 1;
my $new_artistid = $schema->resultset("Artist")->get_column('artistid')->max + 1;
ok(defined $artist, 'found an artist with the new PK');
is($artist->artistid, $new_artistid, 'artist ID matches');
}
+
+# Do it all over again, using a different methodology:
+$old_artistid = $new_artistid;
+$new_artistid++;
+
+# Update the PK
+{
+ my $artist = $schema->resultset("Artist")->find($old_artistid);
+ ok(defined $artist, 'found an artist with the new PK');
+
+ $artist->artistid($new_artistid);
+ $artist->update;
+ is($artist->artistid, $new_artistid, 'artist ID matches');
+}
+
+# Look for the old PK
+{
+ my $artist = $schema->resultset("Artist")->find($old_artistid);
+ ok(!defined $artist, 'no artist found with the old PK');
+}
+
+# Look for the new PK
+{
+ my $artist = $schema->resultset("Artist")->find($new_artistid);
+ ok(defined $artist, 'found an artist with the new PK');
+ is($artist->artistid, $new_artistid, 'artist ID matches');
+}
--- /dev/null
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Test::More;
+
+BEGIN {
+ eval "use DBD::SQLite; use SQL::Translator;";
+ plan $@
+ ? ( skip_all => 'needs DBD::SQLite and SQL::Translator for testing' )
+ : ( tests => 6 );
+}
+
+use lib qw(t/lib);
+
+use_ok('DBICVersionOrig');
+
+my $db_file = "t/var/versioning.db";
+unlink($db_file) if -e $db_file;
+unlink($db_file . "-journal") if -e $db_file . "-journal";
+mkdir("t/var") unless -d "t/var";
+unlink('t/var/DBICVersion-Schema-1.0-SQLite.sql');
+
+my $schema_orig = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+# $schema->storage->ensure_connected();
+
+is($schema_orig->ddl_filename('SQLite', 't/var', '1.0'), 't/var/DBICVersion-Schema-1.0-SQLite.sql', 'Filename creation working');
+$schema_orig->create_ddl_dir('SQLite', undef, 't/var');
+
+ok(-f 't/var/DBICVersion-Schema-1.0-SQLite.sql', 'Created DDL file');
+## do this here or let Versioned.pm do it?
+# $schema->deploy();
+
+my $tvrs = $schema_orig->resultset('Table');
+is($schema_orig->exists($tvrs), 1, 'Created schema from DDL file');
+
+eval "use DBICVersionNew";
+my $schema_new = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+
+unlink('t/var/DBICVersion-Schema-2.0-SQLite.sql');
+unlink('t/var/DBICVersion-Schema-1.0-2.0-SQLite.sql');
+$schema_new->create_ddl_dir('SQLite', undef, 't/var', '1.0');
+ok(-f 't/var/DBICVersion-Schema-1.0-2.0-SQLite.sql', 'Created DDL upgrade file');
+
+## create new to pick up filedata for upgrade files we just made (on_connect)
+my $schema_upgrade = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+
+## do this here or let Versioned.pm do it?
+$schema_upgrade->upgrade();
+$tvrs = $schema_upgrade->resultset('Table');
+is($schema_upgrade->exists($tvrs), 1, 'Upgraded schema from DDL file');
+
+unlink($db_file) if -e $db_file;
+unlink($db_file . "-journal") if -e $db_file . "-journal";
+unlink('t/var/DBICVersion-Schema-1.0-SQLite.sql');
+unlink('t/var/DBICVersion-Schema-2.0-SQLite.sql');
+unlink('t/var/DBICVersion-Schema-1.0-2.0-SQLite.sql');
+unlink(<t/var/backup/*>);
use_ok('DBICTest');
-DBICTest->init_schema();
+my $schema = DBICTest->init_schema();
-my $sql_maker = DBICTest->schema->storage->sql_maker;
+my $sql_maker = $schema->storage->sql_maker;
$sql_maker->quote_char('`');
$sql_maker->name_sep('.');
use lib 't/lib';
use_ok('DBICTest');
-DBICTest->init_schema();
+my $schema = DBICTest->init_schema();
-DBICTest::CD->load_components(qw/CDBICompat::Pager/);
+DBICTest::CD->load_components(qw/CDBICompat CDBICompat::Pager/);
+
+DBICTest::CD->result_source_instance->schema($schema);
my ( $pager, $it ) = DBICTest::CD->page(
{},
my $dbuser = $ENV{"DBICTEST_DBUSER"} || '';
my $dbpass = $ENV{"DBICTEST_DBPASS"} || '';
- my $schema = DBICTest::Schema->compose_connection('DBICTest' => $dsn, $dbuser, $dbpass);
+ my $schema = DBICTest::Schema->compose_namespace('DBICTest')
+ ->connect($dsn, $dbuser, $dbpass);
$schema->storage->on_connect_do(['PRAGMA synchronous = OFF']);
if ( !$args{no_deploy} ) {
__PACKAGE__->deploy_schema( $schema );
DBICTest::Schema::ArtistSourceName;
use base 'DBICTest::Schema::Artist';
-
+__PACKAGE__->table(__PACKAGE__->table);
__PACKAGE__->source_name('SourceNameArtists');
1;
use base 'DBIx::Class::Core';
-DBICTest::Schema::FourKeys->table('fourkeys');
-DBICTest::Schema::FourKeys->add_columns(
+__PACKAGE__->table('fourkeys');
+__PACKAGE__->add_columns(
'foo' => { data_type => 'integer' },
'bar' => { data_type => 'integer' },
'hello' => { data_type => 'integer' },
'goodbye' => { data_type => 'integer' },
'sensors' => { data_type => 'character' },
);
-DBICTest::Schema::FourKeys->set_primary_key(qw/foo bar hello goodbye/);
+__PACKAGE__->set_primary_key(qw/foo bar hello goodbye/);
-DBICTest::Schema::FourKeys->has_many(
+__PACKAGE__->has_many(
'fourkeys_to_twokeys', 'DBICTest::Schema::FourKeys_to_TwoKeys', {
'foreign.f_foo' => 'self.foo',
'foreign.f_bar' => 'self.bar',
'foreign.f_goodbye' => 'self.goodbye',
});
-DBICTest::Schema::FourKeys->many_to_many(
+__PACKAGE__->many_to_many(
'twokeys', 'fourkeys_to_twokeys', 'twokeys',
);
use base qw/DBIx::Class::Core/;
-DBICTest::Schema::LinerNotes->table('liner_notes');
-DBICTest::Schema::LinerNotes->add_columns(
+__PACKAGE__->table('liner_notes');
+__PACKAGE__->add_columns(
'liner_id' => {
data_type => 'integer',
},
size => 100,
},
);
-DBICTest::Schema::LinerNotes->set_primary_key('liner_id');
-DBICTest::Schema::LinerNotes->belongs_to(
+__PACKAGE__->set_primary_key('liner_id');
+__PACKAGE__->belongs_to(
'cd', 'DBICTest::Schema::CD', 'liner_id'
);
use base 'DBIx::Class::Core';
-DBICTest::Schema::NoPrimaryKey->table('noprimarykey');
-DBICTest::Schema::NoPrimaryKey->add_columns(
+__PACKAGE__->table('noprimarykey');
+__PACKAGE__->add_columns(
'foo' => { data_type => 'integer' },
'bar' => { data_type => 'integer' },
'baz' => { data_type => 'integer' },
);
-DBICTest::Schema::NoPrimaryKey->add_unique_constraint(foo_bar => [ qw/foo bar/ ]);
+__PACKAGE__->add_unique_constraint(foo_bar => [ qw/foo bar/ ]);
1;
use base 'DBIx::Class::Core';
-DBICTest::Schema::OneKey->table('onekey');
-DBICTest::Schema::OneKey->add_columns(
+__PACKAGE__->table('onekey');
+__PACKAGE__->add_columns(
'id' => {
data_type => 'integer',
is_auto_increment => 1,
data_type => 'integer',
},
);
-DBICTest::Schema::OneKey->set_primary_key('id');
+__PACKAGE__->set_primary_key('id');
1;
use base 'DBIx::Class::Core';
-DBICTest::Schema::Serialized->table('serialized');
-DBICTest::Schema::Serialized->add_columns(
+__PACKAGE__->table('serialized');
+__PACKAGE__->add_columns(
'id' => { data_type => 'integer' },
'serialized' => { data_type => 'text' },
);
-DBICTest::Schema::Serialized->set_primary_key('id');
+__PACKAGE__->set_primary_key('id');
1;
--- /dev/null
+package DBICVersion::Table;
+
+use base 'DBIx::Class';
+use strict;
+use warnings;
+
+__PACKAGE__->load_components(qw/ Core/);
+__PACKAGE__->table('TestVersion');
+
+__PACKAGE__->add_columns
+ ( 'Version' => {
+ 'data_type' => 'INTEGER',
+ 'is_auto_increment' => 1,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'is_nullable' => 0,
+ 'size' => ''
+ },
+ 'VersionName' => {
+ 'data_type' => 'VARCHAR',
+ 'is_auto_increment' => 0,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'is_nullable' => 1,
+ 'size' => '20'
+ },
+ );
+
+__PACKAGE__->set_primary_key('Version');
+
+package DBICVersion::Schema;
+use base 'DBIx::Class::Schema';
+use strict;
+use warnings;
+
+our $VERSION = '2.0';
+
+__PACKAGE__->register_class('Table', 'DBICVersion::Table');
+__PACKAGE__->load_components('+DBIx::Class::Schema::Versioned');
+__PACKAGE__->upgrade_directory('t/var/');
+__PACKAGE__->backup_directory('t/var/backup/');
+
+#sub upgrade_directory
+#{
+# return 't/var/';
+#}
+
+1;
--- /dev/null
+package DBICVersion::Table;
+
+use base 'DBIx::Class';
+use strict;
+use warnings;
+
+__PACKAGE__->load_components(qw/ Core/);
+__PACKAGE__->table('TestVersion');
+
+__PACKAGE__->add_columns
+ ( 'Version' => {
+ 'data_type' => 'INTEGER',
+ 'is_auto_increment' => 1,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'is_nullable' => 0,
+ 'size' => ''
+ },
+ 'VersionName' => {
+ 'data_type' => 'VARCHAR',
+ 'is_auto_increment' => 0,
+ 'default_value' => undef,
+ 'is_foreign_key' => 0,
+ 'is_nullable' => 0,
+ 'size' => '10'
+ },
+ );
+
+__PACKAGE__->set_primary_key('Version');
+
+package DBICVersion::Schema;
+use base 'DBIx::Class::Schema';
+use strict;
+use warnings;
+
+our $VERSION = '1.0';
+
+__PACKAGE__->register_class('Table', 'DBICVersion::Table');
+__PACKAGE__->load_components('+DBIx::Class::Schema::Versioned');
+
+sub upgrade_directory
+{
+ return 't/var/';
+}
+
+1;