use Carp::Clan qw/^DBIx::Class/;
use Data::Page;
use Storable;
-use Data::Dumper;
-use Scalar::Util qw/weaken/;
-
use DBIx::Class::ResultSetColumn;
use base qw/DBIx::Class/;
+
+use Data::Dumper; $Data::Dumper::Indent = 1;
+
__PACKAGE__->load_components(qw/AccessorGroup/);
__PACKAGE__->mk_group_accessors('simple' => qw/result_source result_class/);
sub new {
my $class = shift;
return $class->new_result(@_) if ref $class;
-
+
my ($source, $attrs) = @_;
- weaken $source;
+ #weaken $source;
if ($attrs->{page}) {
$attrs->{rows} ||= 10;
# from => $attrs->{from},
# collapse => $collapse,
count => undef,
- page => delete $attrs->{page},
pager => undef,
attrs => $attrs
}, $class;
=back
-This method does the same exact thing as search() except it will
+This method does the same exact thing as search() except it will
always return a resultset, even in list context.
=cut
sub search_rs {
my $self = shift;
- my $our_attrs = { %{$self->{attrs}} };
- my $having = delete $our_attrs->{having};
+ my $rows;
+
+ unless (@_) { # no search, effectively just a clone
+ $rows = $self->get_cache;
+ }
+
my $attrs = {};
$attrs = pop(@_) if @_ > 1 and ref $_[$#_] eq 'HASH';
-
- # merge new attrs into old
- foreach my $key (qw/join prefetch/) {
- next unless (exists $attrs->{$key});
- if (exists $our_attrs->{$key}) {
- $our_attrs->{$key} = $self->_merge_attr($our_attrs->{$key}, $attrs->{$key});
- } else {
- $our_attrs->{$key} = $attrs->{$key};
- }
- delete $attrs->{$key};
- }
+ my $our_attrs = { %{$self->{attrs}} };
+ my $having = delete $our_attrs->{having};
- if (exists $our_attrs->{prefetch}) {
- $our_attrs->{join} = $self->_merge_attr($our_attrs->{join}, $our_attrs->{prefetch}, 1);
+ # merge new attrs into inherited
+ foreach my $key (qw/join prefetch/) {
+ next unless exists $attrs->{$key};
+ $our_attrs->{$key} = $self->_merge_attr($our_attrs->{$key}, delete $attrs->{$key});
}
-
+
my $new_attrs = { %{$our_attrs}, %{$attrs} };
-
- # merge new where and having into old
my $where = (@_
- ? ((@_ == 1 || ref $_[0] eq "HASH")
- ? shift
- : ((@_ % 2)
- ? $self->throw_exception(
- "Odd number of arguments to search")
- : {@_}))
- : undef());
+ ? (
+ (@_ == 1 || ref $_[0] eq "HASH")
+ ? shift
+ : (
+ (@_ % 2)
+ ? $self->throw_exception("Odd number of arguments to search")
+ : {@_}
+ )
+ )
+ : undef
+ );
+
if (defined $where) {
- $new_attrs->{where} = (defined $new_attrs->{where}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $where, $new_attrs->{where} ] }
- : $where);
+ $new_attrs->{where} = (
+ defined $new_attrs->{where}
+ ? { '-and' => [
+ map {
+ ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_
+ } $where, $new_attrs->{where}
+ ]
+ }
+ : $where);
}
if (defined $having) {
- $new_attrs->{having} = (defined $new_attrs->{having}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $having, $new_attrs->{having} ] }
- : $having);
+ $new_attrs->{having} = (
+ defined $new_attrs->{having}
+ ? { '-and' => [
+ map {
+ ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_
+ } $having, $new_attrs->{having}
+ ]
+ }
+ : $having);
}
my $rs = (ref $self)->new($self->result_source, $new_attrs);
- $rs->{_parent_rs} = $self->{_parent_rs} if ($self->{_parent_rs}); #XXX - hack to pass through parent of related resultsets
-
- unless (@_) { # no search, effectively just a clone
- my $rows = $self->get_cache;
- if ($rows) {
- $rs->set_cache($rows);
- }
+ if ($rows) {
+ $rs->set_cache($rows);
}
-
return $rs;
}
You can also find a row by a specific unique constraint using the C<key>
attribute. For example:
- my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', { key => 'artist_title' });
+ my $cd = $schema->resultset('CD')->find('Massive Attack', 'Mezzanine', {
+ key => 'cd_artist_title'
+ });
Additionally, you can specify the columns explicitly by name:
artist => 'Massive Attack',
title => 'Mezzanine',
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
If the C<key> is specified as C<primary>, it searches only on the primary key.
) unless @cols;
# Parse out a hashref from input
- my $cond;
+ my $input_query;
if (ref $_[0] eq 'HASH') {
- $cond = { %{$_[0]} };
+ $input_query = { %{$_[0]} };
}
elsif (@_ == @cols) {
- $cond = {};
- @{$cond}{@cols} = @_;
+ $input_query = {};
+ @{$input_query}{@cols} = @_;
}
else {
# Compatibility: Allow e.g. find(id => $value)
- carp "find by key => value deprecated; please use a hashref instead";
- $cond = {@_};
+ carp "Find by key => value deprecated; please use a hashref instead";
+ $input_query = {@_};
}
- return $self->_find($cond, $attrs);
+ my @unique_queries = $self->_unique_queries($input_query, $attrs);
+
+ # Handle cases where the ResultSet defines the query, or where the user is
+ # abusing find
+ my $query = @unique_queries ? \@unique_queries : $input_query;
+
+ # Run the query
+ if (keys %$attrs) {
+ my $rs = $self->search($query, $attrs);
+ return keys %{$rs->_resolved_attrs->{collapse}} ? $rs->next : $rs->single;
+ }
+ else {
+ return keys %{$self->_resolved_attrs->{collapse}}
+ ? $self->search($query)->next
+ : $self->single($query);
+ }
}
-# _find
+# _unique_queries
#
-# Helper method: search against the unique constraints.
+# Build a list of queries which satisfy unique constraints.
-sub _find {
- my ($self, $cond, $attrs) = @_;
+sub _unique_queries {
+ my ($self, $query, $attrs) = @_;
- # Check the condition against our source's unique constraints
+ my $alias = $self->{attrs}{alias};
my @constraint_names = exists $attrs->{key}
? ($attrs->{key})
: $self->result_source->unique_constraint_names;
- my @unique_conds;
+ my @unique_queries;
foreach my $name (@constraint_names) {
my @unique_cols = $self->result_source->unique_constraint_columns($name);
- my $unique_cond = $self->_build_unique_query($cond, \@unique_cols);
+ my $unique_query = $self->_build_unique_query($query, \@unique_cols);
- next unless scalar keys %$unique_cond == scalar @unique_cols;
+ my $num_query = scalar keys %$unique_query;
+ next unless $num_query;
# Add the ResultSet's alias
- foreach my $key (grep { ! m/\./ } keys %$unique_cond) {
- $unique_cond->{"$self->{attrs}{alias}.$key"} = delete $unique_cond->{$key};
+ foreach my $col (grep { ! m/\./ } keys %$unique_query) {
+ $unique_query->{"$alias.$col"} = delete $unique_query->{$col};
}
- push @unique_conds, $unique_cond;
- }
-# use Data::Dumper; warn Dumper $self->result_source->name, $cond, \@unique_conds;
-
- # Verify the query
- my $query = \@unique_conds;
- if (scalar @unique_conds == 0) {
- if (exists $attrs->{key}) {
- $self->throw_exception("required values for the $attrs->{key} key not provided");
- }
- else {
- # Compatibility: Allow broken find usage for now
- carp "find requires values for the primary key or a unique constraint"
- . "; please use search instead";
- $query = $cond;
- }
+ # XXX: Assuming quite a bit about $self->{attrs}{where}
+ my $num_cols = scalar @unique_cols;
+ my $num_where = exists $self->{attrs}{where}
+ ? scalar keys %{ $self->{attrs}{where} }
+ : 0;
+ push @unique_queries, $unique_query
+ if $num_query + $num_where == $num_cols;
}
- # Run the query
- if (keys %$attrs) {
- my $rs = $self->search($query, $attrs);
- $rs->_resolve;
- return keys %{$rs->{_attrs}->{collapse}} ? $rs->next : $rs->single;
- }
- else {
- $self->_resolve;
- return (keys %{$self->{_attrs}->{collapse}})
- ? $self->search($query)->next
- : $self->single($query);
- }
+ return @unique_queries;
}
# _build_unique_query
sub _build_unique_query {
my ($self, $query, $unique_cols) = @_;
- my %unique_query =
+ return {
map { $_ => $query->{$_} }
grep { exists $query->{$_} }
- @$unique_cols;
-
- return \%unique_query;
+ @$unique_cols
+ };
}
=head2 search_related
=over 4
-=item Arguments: $cond, \%attrs?
+=item Arguments: $rel, $cond, \%attrs?
=item Return Value: $new_resultset
sub cursor {
my ($self) = @_;
- $self->_resolve;
- my $attrs = { %{$self->{_attrs}} };
+ my $attrs = { %{$self->_resolved_attrs} };
return $self->{cursor}
||= $self->result_source->storage->select($attrs->{from}, $attrs->{select},
$attrs->{where},$attrs);
sub single {
my ($self, $where) = @_;
- $self->_resolve;
- my $attrs = { %{$self->{_attrs}} };
+ my $attrs = { %{$self->_resolved_attrs} };
if ($where) {
if (defined $attrs->{where}) {
$attrs->{where} = {
}
}
+ unless ($self->_is_unique_query($attrs->{where})) {
+ carp "Query not guaranteed to return a single row"
+ . "; please declare your unique constraints or use search instead";
+ }
+
my @data = $self->result_source->storage->select_single(
- $attrs->{from}, $attrs->{select},
- $attrs->{where},$attrs);
+ $attrs->{from}, $attrs->{select},
+ $attrs->{where}, $attrs
+ );
+
return (@data ? $self->_construct_object(@data) : ());
}
+# _is_unique_query
+#
+# Try to determine if the specified query is guaranteed to be unique, based on
+# the declared unique constraints.
+
+sub _is_unique_query {
+ my ($self, $query) = @_;
+
+ my $collapsed = $self->_collapse_query($query);
+ my $alias = $self->{attrs}{alias};
+
+ foreach my $name ($self->result_source->unique_constraint_names) {
+ my @unique_cols = map {
+ "$alias.$_"
+ } $self->result_source->unique_constraint_columns($name);
+
+ # Count the values for each unique column
+ my %seen = map { $_ => 0 } @unique_cols;
+
+ foreach my $key (keys %$collapsed) {
+ my $aliased = $key =~ /\./ ? $key : "$alias.$key";
+ next unless exists $seen{$aliased}; # Additional constraints are okay
+ $seen{$aliased} = scalar keys %{ $collapsed->{$key} };
+ }
+
+ # If we get 0 or more than 1 value for a column, it's not necessarily unique
+ return 1 unless grep { $_ != 1 } values %seen;
+ }
+
+ return 0;
+}
+
+# _collapse_query
+#
+# Recursively collapse the query, accumulating values for each column.
+
+sub _collapse_query {
+ my ($self, $query, $collapsed) = @_;
+
+ $collapsed ||= {};
+
+ if (ref $query eq 'ARRAY') {
+ foreach my $subquery (@$query) {
+ next unless ref $subquery; # -or
+# warn "ARRAY: " . Dumper $subquery;
+ $collapsed = $self->_collapse_query($subquery, $collapsed);
+ }
+ }
+ elsif (ref $query eq 'HASH') {
+ if (keys %$query and (keys %$query)[0] eq '-and') {
+ foreach my $subquery (@{$query->{-and}}) {
+# warn "HASH: " . Dumper $subquery;
+ $collapsed = $self->_collapse_query($subquery, $collapsed);
+ }
+ }
+ else {
+# warn "LEAF: " . Dumper $query;
+ foreach my $col (keys %$query) {
+ my $value = $query->{$col};
+ $collapsed->{$col}{$value}++;
+ }
+ }
+ }
+
+ return $collapsed;
+}
+
=head2 get_column
=over 4
sub get_column {
my ($self, $column) = @_;
-
my $new = DBIx::Class::ResultSetColumn->new($self, $column);
return $new;
}
print $cd->title;
}
-Note that you need to store the resultset object, and call C<next> on it.
+Note that you need to store the resultset object, and call C<next> on it.
Calling C<< resultset('Table')->next >> repeatedly will always return the
first record from the resultset.
$self->{all_cache_position} = 1;
return ($self->all)[0];
}
- my @row = (exists $self->{stashed_row} ?
- @{delete $self->{stashed_row}} :
- $self->cursor->next
+ my @row = (
+ exists $self->{stashed_row}
+ ? @{delete $self->{stashed_row}}
+ : $self->cursor->next
);
return unless (@row);
return $self->_construct_object(@row);
}
-sub _resolve {
- my $self = shift;
-
- return if(exists $self->{_attrs}); #return if _resolve has already been called
-
- my $attrs = $self->{attrs};
- my $source = ($self->{_parent_rs}) ? $self->{_parent_rs} : $self->{result_source};
-
- # XXX - lose storable dclone
- my $record_filter = delete $attrs->{record_filter} if (defined $attrs->{record_filter});
- $attrs = Storable::dclone($attrs || {}); # { %{ $attrs || {} } };
- $attrs->{record_filter} = $record_filter if ($record_filter);
- $self->{attrs}->{record_filter} = $record_filter if ($record_filter);
-
- my $alias = $attrs->{alias};
-
- $attrs->{columns} ||= delete $attrs->{cols} if $attrs->{cols};
- delete $attrs->{as} if $attrs->{columns};
- $attrs->{columns} ||= [ $self->{result_source}->columns ] unless $attrs->{select};
- my $select_alias = ($self->{_parent_rs}) ? $self->{attrs}->{_live_join} : $alias;
- $attrs->{select} = [
- map { m/\./ ? $_ : "${select_alias}.$_" } @{delete $attrs->{columns}}
- ] if $attrs->{columns};
- $attrs->{as} ||= [
- map { m/^\Q$alias.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
- ];
- if (my $include = delete $attrs->{include_columns}) {
- push(@{$attrs->{select}}, @$include);
- push(@{$attrs->{as}}, map { m/([^.]+)$/; $1; } @$include);
- }
-
- $attrs->{from} ||= [ { $alias => $source->from } ];
- $attrs->{seen_join} ||= {};
- my %seen;
- if (my $join = delete $attrs->{join}) {
- foreach my $j (ref $join eq 'ARRAY' ? @$join : ($join)) {
- if (ref $j eq 'HASH') {
- $seen{$_} = 1 foreach keys %$j;
- } else {
- $seen{$j} = 1;
- }
- }
-
- push(@{$attrs->{from}}, $source->resolve_join($join, $attrs->{alias}, $attrs->{seen_join}));
- }
- $attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct};
- $attrs->{order_by} = [ $attrs->{order_by} ] if
- $attrs->{order_by} and !ref($attrs->{order_by});
- $attrs->{order_by} ||= [];
-
- if(my $seladds = delete($attrs->{'+select'})) {
- my @seladds = (ref($seladds) eq 'ARRAY' ? @$seladds : ($seladds));
- $attrs->{select} = [
- @{ $attrs->{select} },
- map { (m/\./ || ref($_)) ? $_ : "${alias}.$_" } $seladds
- ];
- }
- if(my $asadds = delete($attrs->{'+as'})) {
- my @asadds = (ref($asadds) eq 'ARRAY' ? @$asadds : ($asadds));
- $attrs->{as} = [ @{ $attrs->{as} }, @asadds ];
- }
-
- my $collapse = $attrs->{collapse} || {};
- if (my $prefetch = delete $attrs->{prefetch}) {
- my @pre_order;
- foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
- if ( ref $p eq 'HASH' ) {
- foreach my $key (keys %$p) {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$key};
- }
- } else {
- push(@{$attrs->{from}}, $source->resolve_join($p, $attrs->{alias}))
- unless $seen{$p};
- }
- my @prefetch = $source->resolve_prefetch(
- $p, $attrs->{alias}, {}, \@pre_order, $collapse);
- push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
- push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
- }
- push(@{$attrs->{order_by}}, @pre_order);
- }
- $attrs->{collapse} = $collapse;
- $self->{_attrs} = $attrs;
-}
-
-sub _merge_attr {
- my ($self, $a, $b, $is_prefetch) = @_;
-
- return $b unless $a;
- if (ref $b eq 'HASH' && ref $a eq 'HASH') {
- foreach my $key (keys %{$b}) {
- if (exists $a->{$key}) {
- $a->{$key} = $self->_merge_attr($a->{$key}, $b->{$key}, $is_prefetch);
- } else {
- $a->{$key} = delete $b->{$key};
- }
- }
- return $a;
- } else {
- $a = [$a] unless (ref $a eq 'ARRAY');
- $b = [$b] unless (ref $b eq 'ARRAY');
-
- my $hash = {};
- my $array = [];
- foreach ($a, $b) {
- foreach my $element (@{$_}) {
- if (ref $element eq 'HASH') {
- $hash = $self->_merge_attr($hash, $element, $is_prefetch);
- } elsif (ref $element eq 'ARRAY') {
- $array = [@{$array}, @{$element}];
- } else {
- if (($b == $_) && $is_prefetch) {
- $self->_merge_array($array, $element, $is_prefetch);
- } else {
- push(@{$array}, $element);
- }
- }
- }
- }
-
- if ((keys %{$hash}) && (scalar(@{$array} > 0))) {
- return [$hash, @{$array}];
- } else {
- return (keys %{$hash}) ? $hash : $array;
- }
- }
-}
-
-sub _merge_array {
- my ($self, $a, $b) = @_;
-
- $b = [$b] unless (ref $b eq 'ARRAY');
- # add elements from @{$b} to @{$a} which aren't already in @{$a}
- foreach my $b_element (@{$b}) {
- push(@{$a}, $b_element) unless grep {$b_element eq $_} @{$a};
- }
-}
-
sub _construct_object {
my ($self, @row) = @_;
- my @as = @{ $self->{_attrs}{as} };
-
- my $info = $self->_collapse_result(\@as, \@row);
+ my $info = $self->_collapse_result($self->{_attrs}{as}, \@row);
my $new = $self->result_class->inflate_result($self->result_source, @$info);
$new = $self->{_attrs}{record_filter}->($new)
if exists $self->{_attrs}{record_filter};
sub _collapse_result {
my ($self, $as, $row, $prefix) = @_;
- my $live_join = $self->{attrs}->{_live_join} ||="";
my %const;
-
my @copy = @$row;
+
foreach my $this_as (@$as) {
my $val = shift @copy;
if (defined $prefix) {
}
}
+ my $alias = $self->{attrs}{alias};
my $info = [ {}, {} ];
foreach my $key (keys %const) {
- if (length $key && $key ne $live_join) {
+ if (length $key && $key ne $alias) {
my $target = $info;
my @parts = split(/\./, $key);
foreach my $p (@parts) {
$info->[0] = $const{$key};
}
}
-
+
my @collapse;
if (defined $prefix) {
@collapse = map {
m/^\Q${prefix}.\E(.+)$/ ? ($1) : ()
- } keys %{$self->{_attrs}->{collapse}}
+ } keys %{$self->{_attrs}{collapse}}
} else {
- @collapse = keys %{$self->{_attrs}->{collapse}};
+ @collapse = keys %{$self->{_attrs}{collapse}};
};
if (@collapse) {
$target = $target->[1]->{$p} ||= [];
}
my $c_prefix = (defined($prefix) ? "${prefix}.${c}" : $c);
- my @co_key = @{$self->{_attrs}->{collapse}{$c_prefix}};
- my %co_check = map { ($_, $target->[0]->{$_}); } @co_key;
+ my @co_key = @{$self->{_attrs}{collapse}{$c_prefix}};
my $tree = $self->_collapse_result($as, $row, $c_prefix);
+ my %co_check = map { ($_, $tree->[0]->{$_}); } @co_key;
my (@final, @raw);
- while ( !(grep {
- !defined($tree->[0]->{$_}) ||
- $co_check{$_} ne $tree->[0]->{$_}
- } @co_key) ) {
+
+ while (
+ !(
+ grep {
+ !defined($tree->[0]->{$_}) || $co_check{$_} ne $tree->[0]->{$_}
+ } @co_key
+ )
+ ) {
push(@final, $tree);
last unless (@raw = $self->cursor->next);
$row = $self->{stashed_row} = \@raw;
$tree = $self->_collapse_result($as, $row, $c_prefix);
}
- @$target = (@final ? @final : [ {}, {} ]);
+ @$target = (@final ? @final : [ {}, {} ]);
# single empty result to indicate an empty prefetched has_many
}
+
+ #print "final info: " . Dumper($info);
return $info;
}
my $self = shift;
return $self->search(@_)->count if @_ and defined $_[0];
return scalar @{ $self->get_cache } if $self->get_cache;
-
my $count = $self->_count;
return 0 unless $count;
sub _count { # Separated out so pager can get the full count
my $self = shift;
my $select = { count => '*' };
-
- $self->_resolve;
- my $attrs = { %{ $self->{_attrs} } };
+
+ my $attrs = { %{$self->_resolved_attrs} };
if (my $group_by = delete $attrs->{group_by}) {
delete $attrs->{having};
my @distinct = (ref $group_by ? @$group_by : ($group_by));
# todo: try CONCAT for multi-column pk
my @pk = $self->result_source->primary_columns;
if (@pk == 1) {
+ my $alias = $attrs->{alias};
foreach my $column (@distinct) {
- if ($column =~ qr/^(?:\Q$attrs->{alias}.\E)?$pk[0]$/) {
+ if ($column =~ qr/^(?:\Q${alias}.\E)?$pk[0]$/) {
@distinct = ($column);
last;
}
# offset, order by and page are not needed to count. record_filter is cdbi
delete $attrs->{$_} for qw/rows offset order_by page pager record_filter/;
- my ($count) = (ref $self)->new($self->result_source, $attrs)->cursor->next;
+
+ my $tmp_rs = (ref $self)->new($self->result_source, $attrs);
+ my ($count) = $tmp_rs->cursor->next;
return $count;
}
my @obj;
# TODO: don't call resolve here
- $self->_resolve;
- if (keys %{$self->{_attrs}->{collapse}}) {
-# if ($self->{attrs}->{prefetch}) {
+ if (keys %{$self->_resolved_attrs->{collapse}}) {
+# if ($self->{attrs}{prefetch}) {
# Using $self->cursor->all is really just an optimisation.
# If we're collapsing has_many prefetches it probably makes
# very little difference, and this is cleaner than hacking
sub reset {
my ($self) = @_;
- delete $self->{_attrs} if (exists $self->{_attrs});
-
+ delete $self->{_attrs} if exists $self->{_attrs};
$self->{all_cache_position} = 0;
$self->cursor->reset;
return $self;
my ($self) = @_;
my $cond = {};
- if (!ref($self->{cond})) {
- # No-op. No condition, we're updating/deleting everything
- }
- elsif (ref $self->{cond} eq 'ARRAY') {
+ # No-op. No condition, we're updating/deleting everything
+ return $cond unless ref $self->{cond};
+
+ if (ref $self->{cond} eq 'ARRAY') {
$cond = [
map {
my %hash;
$cond->{-and} = [];
my @cond = @{$self->{cond}{-and}};
- for (my $i = 0; $i <= @cond - 1; $i++) {
+ for (my $i = 0; $i < @cond; $i++) {
my $entry = $cond[$i];
my %hash;
sub delete {
my ($self) = @_;
- my $del = {};
my $cond = $self->_cond_for_update_delete;
my ($self) = @_;
my $attrs = $self->{attrs};
$self->throw_exception("Can't create pager for non-paged rs")
- unless $self->{page};
+ unless $self->{attrs}{page};
$attrs->{rows} ||= 10;
return $self->{pager} ||= Data::Page->new(
- $self->_count, $attrs->{rows}, $self->{page});
+ $self->_count, $attrs->{rows}, $self->{attrs}{page});
}
=head2 page
sub page {
my ($self, $page) = @_;
- my $attrs = { %{$self->{attrs}} };
- $attrs->{page} = $page;
- return (ref $self)->new($self->result_source, $attrs);
+ return (ref $self)->new($self->result_source, { %{$self->{attrs}}, page => $page });
}
=head2 new_result
$class->find_or_create({ key => $val, ... });
-Searches for a record matching the search condition; if it doesn't find one,
-creates one and returns that instead.
+Tries to find a record based on its primary key or unique constraint; if none
+is found, creates one and returns that instead.
my $cd = $schema->resultset('CD')->find_or_create({
cdid => 5,
artist => 'Massive Attack',
title => 'Mezzanine',
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
See also L</find> and L</update_or_create>. For information on how to declare
title => 'Mezzanine',
year => 1998,
},
- { key => 'artist_title' }
+ { key => 'cd_artist_title' }
);
If no C<key> is specified, it searches on all unique constraints defined on the
sub update_or_create {
my $self = shift;
my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {});
- my $hash = ref $_[0] eq 'HASH' ? shift : {@_};
+ my $cond = ref $_[0] eq 'HASH' ? shift : {@_};
- my @constraint_names = exists $attrs->{key}
- ? ($attrs->{key})
- : $self->result_source->unique_constraint_names;
- $self->throw_exception(
- "update_or_create requires a primary key or unique constraint; none is defined on "
- . $self->result_source->name
- ) unless @constraint_names;
-
- my @unique_queries;
- foreach my $name (@constraint_names) {
- my @unique_cols = $self->result_source->unique_constraint_columns($name);
- my $unique_query = $self->_build_unique_query($hash, \@unique_cols);
-
- push @unique_queries, $unique_query
- if keys %$unique_query == @unique_cols;
+ my $row = $self->find($cond);
+ if (defined $row) {
+ $row->update($cond);
+ return $row;
}
- if (@unique_queries) {
- my $row = $self->single(\@unique_queries);
- if (defined $row) {
- $row->update($hash);
- return $row;
- }
- }
-
- return $self->create($hash);
+ return $self->create($cond);
}
=head2 get_cache
=cut
sub related_resultset {
- my ( $self, $rel ) = @_;
+ my ($self, $rel) = @_;
$self->{related_resultsets} ||= {};
return $self->{related_resultsets}{$rel} ||= do {
- #warn "fetching related resultset for rel '$rel' " . $self->result_source->{name};
- my $rel_obj = $self->result_source->relationship_info($rel);
- $self->throw_exception(
- "search_related: result source '" . $self->result_source->name .
- "' has no such relationship ${rel}")
- unless $rel_obj; #die Dumper $self->{attrs};
-
- my $rs = $self->result_source->schema->resultset($rel_obj->{class}
- )->search( undef,
- { %{$self->{attrs}},
- select => undef,
- as => undef,
- join => $rel,
- _live_join => $rel }
- );
-
- # keep reference of the original resultset
- $rs->{_parent_rs} = $self->result_source;
- return $rs;
+ my $rel_obj = $self->result_source->relationship_info($rel);
+
+ $self->throw_exception(
+ "search_related: result source '" . $self->result_source->name .
+ "' has no such relationship $rel")
+ unless $rel_obj;
+
+ my $rs = $self->search(undef, { join => $rel });
+ my ($from,$seen) = $rs->_resolve_from;
+
+ my $join_count = $self->{attrs}{seen_join}{$rel};
+ my $alias = $join_count ? join('_', $rel, $join_count+1) : $rel;
+
+ $self->result_source->schema->resultset($rel_obj->{class})->search_rs(
+ undef, {
+ select => undef,
+ as => undef,
+ alias => $alias,
+ where => $self->{cond},
+ _parent_from => $from,
+ seen_join => $seen,
+ });
};
}
+sub _resolve_from {
+ my ($self) = @_;
+ my $source = $self->result_source;
+ my $attrs = $self->{attrs};
+
+ my $from = $attrs->{_parent_from}
+ || [ { $attrs->{alias} => $source->from } ];
+# ? [ @{$attrs->{_parent_from}} ]
+# : undef;
+
+ my $seen = { %{$attrs->{seen_join}||{}} };
+
+ if ($attrs->{join}) {
+ push(@{$from},
+ $source->resolve_join($attrs->{join}, $attrs->{alias}, $seen)
+ );
+ }
+
+ return ($from,$seen);
+}
+
+sub _resolved_attrs {
+ my $self = shift;
+ return $self->{_attrs} if $self->{_attrs};
+
+ my $attrs = { %{$self->{attrs}||{}} };
+ my $source = $self->{result_source};
+ my $alias = $attrs->{alias};
+
+ # XXX - lose storable dclone
+ my $record_filter = delete $attrs->{record_filter};
+ #$attrs = Storable::dclone($attrs || {}); # { %{ $attrs || {} } };
+
+ $attrs->{record_filter} = $record_filter if $record_filter;
+
+ $attrs->{columns} ||= delete $attrs->{cols} if exists $attrs->{cols};
+ if ($attrs->{columns}) {
+ delete $attrs->{as};
+ } elsif (!$attrs->{select}) {
+ $attrs->{columns} = [ $self->{result_source}->columns ];
+ }
+
+ $attrs->{select} ||= [
+ map { m/\./ ? $_ : "${alias}.$_" } @{delete $attrs->{columns}}
+ ];
+ $attrs->{as} ||= [
+ map { m/^\Q${alias}.\E(.+)$/ ? $1 : $_ } @{$attrs->{select}}
+ ];
+
+ my $adds;
+ if ($adds = delete $attrs->{include_columns}) {
+ $adds = [$adds] unless ref $adds eq 'ARRAY';
+ push(@{$attrs->{select}}, @$adds);
+ push(@{$attrs->{as}}, map { m/([^.]+)$/; $1 } @$adds);
+ }
+ if ($adds = delete $attrs->{'+select'}) {
+ $adds = [$adds] unless ref $adds eq 'ARRAY';
+ push(@{$attrs->{select}}, map { /\./ || ref $_ ? $_ : "${alias}.$_" } @$adds);
+ }
+ if (my $adds = delete $attrs->{'+as'}) {
+ $adds = [$adds] unless ref $adds eq 'ARRAY';
+ push(@{$attrs->{as}}, @$adds);
+ }
+
+ $attrs->{from} ||= delete $attrs->{_parent_from}
+ || [ { 'me' => $source->from } ];
+
+ if (exists $attrs->{join} || exists $attrs->{prefetch}) {
+
+ my $join = delete $attrs->{join} || {};
+
+ if (defined $attrs->{prefetch}) {
+ $join = $self->_merge_attr(
+ $join, $attrs->{prefetch}
+ );
+ }
+
+ push(@{$attrs->{from}},
+ $source->resolve_join($join, $alias, { %{$attrs->{seen_join}||{}} })
+ );
+ }
+
+ $attrs->{group_by} ||= $attrs->{select} if delete $attrs->{distinct};
+ if ($attrs->{order_by}) {
+ $attrs->{order_by} = [ $attrs->{order_by} ] unless ref $attrs->{order_by};
+ } else {
+ $attrs->{order_by} ||= [];
+ }
+
+ my $collapse = $attrs->{collapse} || {};
+ if (my $prefetch = delete $attrs->{prefetch}) {
+ my @pre_order;
+ foreach my $p (ref $prefetch eq 'ARRAY' ? @$prefetch : ($prefetch)) {
+ # bring joins back to level of current class
+ my @prefetch = $source->resolve_prefetch(
+ $p, $alias, { %{$attrs->{seen_join}||{}} }, \@pre_order, $collapse
+ );
+ push(@{$attrs->{select}}, map { $_->[0] } @prefetch);
+ push(@{$attrs->{as}}, map { $_->[1] } @prefetch);
+ }
+ push(@{$attrs->{order_by}}, @pre_order);
+ }
+ $attrs->{collapse} = $collapse;
+
+ return $self->{_attrs} = $attrs;
+}
+
+sub _merge_attr {
+ my ($self, $a, $b) = @_;
+ return $b unless $a;
+
+ if (ref $b eq 'HASH' && ref $a eq 'HASH') {
+ foreach my $key (keys %{$b}) {
+ if (exists $a->{$key}) {
+ $a->{$key} = $self->_merge_attr($a->{$key}, $b->{$key});
+ } else {
+ $a->{$key} = $b->{$key};
+ }
+ }
+ return $a;
+ } else {
+ $a = [$a] unless ref $a eq 'ARRAY';
+ $b = [$b] unless ref $b eq 'ARRAY';
+
+ my $hash = {};
+ my @array;
+ foreach my $x ($a, $b) {
+ foreach my $element (@{$x}) {
+ if (ref $element eq 'HASH') {
+ $hash = $self->_merge_attr($hash, $element);
+ } elsif (ref $element eq 'ARRAY') {
+ push(@array, @{$element});
+ } else {
+ push(@array, $element) unless $b == $x
+ && grep { $_ eq $element } @array;
+ }
+ }
+ }
+
+ @array = grep { !exists $hash->{$_} } @array;
+
+ return keys %{$hash}
+ ? ( scalar(@array)
+ ? [$hash, @array]
+ : $hash
+ )
+ : \@array;
+ }
+}
+
=head2 throw_exception
See L<DBIx::Class::Schema/throw_exception> for details.
through directly to SQL, so you can give e.g. C<year DESC> for a
descending order on the column `year'.
-Please note that if you have quoting enabled (see
+Please note that if you have quoting enabled (see
L<DBIx::Class::Storage/quote_char>) you will need to do C<\'year DESC' > to
specify an order. (The scalar ref causes it to be passed as raw sql to the DB,
so you will need to manually quote things as appropriate.)
Makes the resultset paged and specifies the page to retrieve. Effectively
identical to creating a non-pages resultset and then calling ->page($page)
-on it.
+on it.
If L<rows> attribute is not specified it defualts to 10 rows per page.
HAVING is a select statement attribute that is applied between GROUP BY and
ORDER BY. It is applied to the after the grouping calculations have been
-done.
+done.
having => { 'count(employee)' => { '>=', 100 } }
Set to 1 to group by all columns.
+=head2 where
+
+=over 4
+
+Adds to the WHERE clause.
+
+ # only return rows WHERE deleted IS NULL for all searches
+ __PACKAGE__->resultset_attributes({ where => { deleted => undef } }); )
+
+Can be overridden by passing C<{ where => undef }> as an attribute
+to a resulset.
+
+=back
+
=head2 cache
Set to 1 to cache search results. This prevents extra SQL queries if you
revisit rows in your ResultSet:
my $resultset = $schema->resultset('Artist')->search( undef, { cache => 1 } );
-
+
while( my $artist = $resultset->next ) {
... do stuff ...
}