X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FResultSet.pm;h=6b856855a6d5e11e13accffc44253e78cdde4e1f;hb=d867eedaa703200d7f0bc329836e99b6bd22bc39;hp=01a270f5a14755a88ddf8f6b6d7f4311a7ea760e;hpb=7d82e1cdbd8ab9c37fecd3f6952cb378659b4443;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 01a270f..6b85685 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -7,6 +7,7 @@ use overload 'bool' => "_bool", fallback => 1; use Carp::Clan qw/^DBIx::Class/; +use DBIx::Class::Exception; use Data::Page; use Storable; use DBIx::Class::ResultSetColumn; @@ -570,12 +571,16 @@ sub _unique_queries { my $where = $self->_collapse_cond($self->{attrs}{where} || {}); my $num_where = scalar keys %$where; - my @unique_queries; + my (@unique_queries, %seen_column_combinations); foreach my $name (@constraint_names) { - my @unique_cols = $self->result_source->unique_constraint_columns($name); - my $unique_query = $self->_build_unique_query($query, \@unique_cols); + my @constraint_cols = $self->result_source->unique_constraint_columns($name); - my $num_cols = scalar @unique_cols; + my $constraint_sig = join "\x00", sort @constraint_cols; + next if $seen_column_combinations{$constraint_sig}++; + + my $unique_query = $self->_build_unique_query($query, \@constraint_cols); + + my $num_cols = scalar @constraint_cols; my $num_query = scalar keys %$unique_query; my $total = $num_query + $num_where; @@ -1264,10 +1269,10 @@ sub _count_subq_rs { my $sub_attrs = { %$attrs }; # extra selectors do not go in the subquery and there is no point of ordering it - delete $sub_attrs->{$_} for qw/collapse prefetch_select select as order_by/; + delete $sub_attrs->{$_} for qw/collapse select _prefetch_select as order_by/; - # if we prefetch, we group_by primary keys only as this is what we would get out of the rs via ->next/->all - # clobber old group_by regardless + # if we prefetch, we group_by primary keys only as this is what we would get out + # of the rs via ->next/->all. We DO WANT to clobber old group_by regardless if ( keys %{$attrs->{collapse}} ) { $sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->primary_columns) ] } @@ -1279,8 +1284,15 @@ sub _count_subq_rs { $sub_attrs->{from}, $sub_attrs->{alias} ); + # this is so that ordering can be thrown away in things like Top limit + $sub_attrs->{-for_count_only} = 1; + + my $sub_rs = $rsrc->resultset_class->new ($rsrc, $sub_attrs); + $attrs->{from} = [{ - count_subq => $rsrc->resultset_class->new ($rsrc, $sub_attrs )->as_query + -alias => 'count_subq', + -source_handle => $rsrc->handle, + count_subq => $sub_rs->as_query, }]; # the subquery replaces this @@ -1308,9 +1320,12 @@ sub _count_subq_rs { sub _switch_to_inner_join_if_needed { my ($self, $from, $alias) = @_; + # subqueries and other oddness is naturally not supported return $from if ( ref $from ne 'ARRAY' || + @$from <= 1 + || ref $from->[0] ne 'HASH' || ! $from->[0]{-alias} @@ -1318,58 +1333,38 @@ sub _switch_to_inner_join_if_needed { $from->[0]{-alias} eq $alias ); - # this would be the case with a subquery - we'll never find - # the target as it is not in the parseable part of {from} - return $from if @$from == 1; - - my (@switch_idx, $found_target); - + my $switch_branch; JOINSCAN: - for my $i (1 .. $#$from) { - - push @switch_idx, $i; - my $j = $from->[$i]; - my $jalias = $j->[0]{-alias}; - - # we found our current target - delete any siblings (same level joins) - # and bail out - if ($jalias eq $alias) { - $found_target++; - - my $cur_depth = $j->[0]{-relation_chain_depth}; - # we are -1, so look at -2 - while (@switch_idx > 1 - && $from->[$switch_idx[-2]][0]{-relation_chain_depth} == $cur_depth - ) { - splice @switch_idx, -2, 1; - } - + for my $j (@{$from}[1 .. $#$from]) { + if ($j->[0]{-alias} eq $alias) { + $switch_branch = $j->[0]{-join_path}; last JOINSCAN; } } # something else went wrong - return $from unless $found_target; + return $from unless $switch_branch; # So it looks like we will have to switch some stuff around. # local() is useless here as we will be leaving the scope # anyway, and deep cloning is just too fucking expensive # So replace the inner hashref manually - my @new_from; - my $sw_idx = { map { $_ => 1 } @switch_idx }; + my @new_from = ($from->[0]); + my $sw_idx = { map { $_ => 1 } @$switch_branch }; - for my $i (0 .. $#$from) { - if ($sw_idx->{$i}) { - my %attrs = %{$from->[$i][0]}; - delete $attrs{-join_type}; + for my $j (@{$from}[1 .. $#$from]) { + my $jalias = $j->[0]{-alias}; + if ($sw_idx->{$jalias}) { + my %attrs = %{$j->[0]}; + delete $attrs{-join_type}; push @new_from, [ \%attrs, - @{$from->[$i]}[ 1 .. $#{$from->[$i]} ], + @{$j}[ 1 .. $#$j ], ]; } else { - push @new_from, $from->[$i]; + push @new_from, $j; } } @@ -1519,7 +1514,8 @@ sub _rs_update_delete { if (my $g = $attrs->{group_by}) { my @current_group_by = map { $_ =~ /\./ ? $_ : "$attrs->{alias}.$_" } - (ref $g eq 'ARRAY' ? @$g : $g ); + @$g + ; if ( join ("\x00", sort @current_group_by) @@ -2201,13 +2197,14 @@ You most likely want this method when looking for existing rows using a unique constraint that is not the primary key, or looking for related rows. -If you want objects to be saved immediately, use L instead. +If you want objects to be saved immediately, use L +instead. -B: C is probably not what you want when creating a -new row in a table that uses primary keys supplied by the -database. Passing in a primary key column with a value of I -will cause L to attempt to search for a row with a value of -I. +B: Take care when using C with a table having +columns with default values that you intend to be automatically +supplied by the database (e.g. an auto_increment primary key column). +In normal usage, the value of such columns should NOT be included at +all in the call to C, even when set to C. =cut @@ -2242,12 +2239,15 @@ store. If the appropriate relationships are set up, foreign key fields can also be passed an object representing the foreign row, and the value will be set to its primary key. -To create related objects, pass a hashref for the value if the related -item is a foreign key relationship (L), -and use the name of the relationship as the key. (NOT the name of the field, -necessarily). For C and C relationships, pass an arrayref -of hashrefs containing the data for each of the rows to create in the foreign -tables, again using the relationship name as the key. +To create related objects, pass a hashref of related-object column values +B. If the relationship is of type C +(L) - pass an arrayref of hashrefs. +The process will correctly identify columns holding foreign keys, and will +transparrently populate them from the keys of the corresponding relation. +This can be applied recursively, and will work correctly for a structure +with an arbitrary depth and width, as long as the relationships actually +exists and the correct column data has been supplied. + Instead of hashrefs of plain related data (key/value pairs), you may also pass new or inserted objects. New objects (not inserted yet, see @@ -2284,6 +2284,19 @@ Cresultset. Note Hashref. } }); +=over + +=item WARNING + +When subclassing ResultSet never attempt to override this method. Since +it is a simple shortcut for C<< $self->new_result($attrs)->insert >>, a +lot of the internals simply never call it, so your override will be +bypassed more often than not. Override either L +or L depending on how early in the +L process you need to intervene. + +=back + =cut sub create { @@ -2333,11 +2346,11 @@ condition. Another process could create a record in the table after the find has completed and before the create has started. To avoid this problem, use find_or_create() inside a transaction. -B: C is probably not what you want when creating -a new row in a table that uses primary keys supplied by the -database. Passing in a primary key column with a value of I -will cause L to attempt to search for a row with a value of -I. +B: Take care when using C with a table having +columns with default values that you intend to be automatically +supplied by the database (e.g. an auto_increment primary key column). +In normal usage, the value of such columns should NOT be included at +all in the call to C, even when set to C. See also L and L. For information on how to declare unique constraints, see L. @@ -2400,11 +2413,11 @@ If the C is specified as C, it searches only on the primary key. See also L and L. For information on how to declare unique constraints, see L. -B: C is probably not what you want when -looking for a row in a table that uses primary keys supplied by the -database, unless you actually have a key value. Passing in a primary -key column with a value of I will cause L to attempt to -search for a row with a value of I. +B: Take care when using C with a table having +columns with default values that you intend to be automatically +supplied by the database (e.g. an auto_increment primary key column). +In normal usage, the value of such columns should NOT be included at +all in the call to C, even when set to C. =cut @@ -2461,7 +2474,13 @@ For example: $cd->insert; } -See also L, L and L. +B: Take care when using C with a table having +columns with default values that you intend to be automatically +supplied by the database (e.g. an auto_increment primary key column). +In normal usage, the value of such columns should NOT be included at +all in the call to C, even when set to C. + +See also L, L and L. =cut @@ -2671,6 +2690,11 @@ sub current_source_alias { # in order to properly resolve prefetch aliases (any alias # with a relation_chain_depth less than the depth of the # current prefetch is not considered) +# +# The increments happen in 1/2s to make it easier to correlate the +# join depth with the join path. An integer means a relationship +# specified via a search_related, whereas a fraction means an added +# join/prefetch via attributes sub _chain_relationship { my ($self, $rel) = @_; my $source = $self->result_source; @@ -2687,16 +2711,25 @@ sub _chain_relationship { }]; my $seen = { %{$attrs->{seen_join} || {} } }; + my $jpath = ($attrs->{seen_join} && keys %{$attrs->{seen_join}}) + ? $from->[-1][0]{-join_path} + : []; + # we need to take the prefetch the attrs into account before we # ->_resolve_join as otherwise they get lost - captainL my $merged = $self->_merge_attr( $attrs->{join}, $attrs->{prefetch} ); - my @requested_joins = $source->_resolve_join($merged, $attrs->{alias}, $seen); + my @requested_joins = $source->_resolve_join( + $merged, + $attrs->{alias}, + $seen, + $jpath, + ); push @$from, @requested_joins; - ++$seen->{-relation_chain_depth}; + $seen->{-relation_chain_depth} += 0.5; # if $self already had a join/prefetch specified on it, the requested # $rel might very well be already included. What we do in this case @@ -2704,19 +2737,36 @@ sub _chain_relationship { # the join in question so we could tell it *is* the search_related) my $already_joined; + # we consider the last one thus reverse for my $j (reverse @requested_joins) { if ($rel eq $j->[0]{-join_path}[-1]) { - $j->[0]{-relation_chain_depth}++; + $j->[0]{-relation_chain_depth} += 0.5; $already_joined++; last; } } + +# alternative way to scan the entire chain - not backwards compatible +# for my $j (reverse @$from) { +# next unless ref $j eq 'ARRAY'; +# if ($j->[0]{-join_path} && $j->[0]{-join_path}[-1] eq $rel) { +# $j->[0]{-relation_chain_depth} += 0.5; +# $already_joined++; +# last; +# } +# } + unless ($already_joined) { - push @$from, $source->_resolve_join($rel, $attrs->{alias}, $seen); + push @$from, $source->_resolve_join( + $rel, + $attrs->{alias}, + $seen, + $jpath, + ); } - ++$seen->{-relation_chain_depth}; + $seen->{-relation_chain_depth} += 0.5; return ($from,$seen); } @@ -2740,24 +2790,38 @@ sub _resolved_attrs { # build columns (as long as select isn't set) into a set of as/select hashes unless ( $attrs->{select} ) { - @colbits = map { - ( ref($_) eq 'HASH' ) - ? $_ - : { - ( - /^\Q${alias}.\E(.+)$/ - ? "$1" - : "$_" - ) - => - ( - /\./ - ? "$_" - : "${alias}.$_" - ) - } - } ( ref($attrs->{columns}) eq 'ARRAY' ) ? @{ delete $attrs->{columns}} : (delete $attrs->{columns} || $source->columns ); + + my @cols = ( ref($attrs->{columns}) eq 'ARRAY' ) + ? @{ delete $attrs->{columns}} + : ( + ( delete $attrs->{columns} ) + || + $source->storage->_order_select_columns( + $source, + [ $source->columns ], + ) + ) + ; + + @colbits = map { + ( ref($_) eq 'HASH' ) + ? $_ + : { + ( + /^\Q${alias}.\E(.+)$/ + ? "$1" + : "$_" + ) + => + ( + /\./ + ? "$_" + : "${alias}.$_" + ) + } + } @cols; } + # add the additional columns on foreach ( 'include_columns', '+columns' ) { push @colbits, map { @@ -2815,7 +2879,7 @@ sub _resolved_attrs { if ( $attrs->{join} || $attrs->{prefetch} ) { - $self->throw_exception ('join/prefetch can not be used with a literal scalarref {from}') + $self->throw_exception ('join/prefetch can not be used with a custom {from}') if ref $attrs->{from} ne 'ARRAY'; my $join = delete $attrs->{join} || {}; @@ -2828,30 +2892,39 @@ sub _resolved_attrs { [ @{ $attrs->{from} }, $source->_resolve_join( - $join, $alias, { %{ $attrs->{seen_join} || {} } } + $join, + $alias, + { %{ $attrs->{seen_join} || {} } }, + ($attrs->{seen_join} && keys %{$attrs->{seen_join}}) + ? $attrs->{from}[-1][0]{-join_path} + : [] + , ) ]; } - if ( $attrs->{order_by} ) { + if ( defined $attrs->{order_by} ) { $attrs->{order_by} = ( ref( $attrs->{order_by} ) eq 'ARRAY' ? [ @{ $attrs->{order_by} } ] - : [ $attrs->{order_by} ] + : [ $attrs->{order_by} || () ] ); } - if ($attrs->{group_by} and ! ref $attrs->{group_by}) { + if ($attrs->{group_by} and ref $attrs->{group_by} ne 'ARRAY') { $attrs->{group_by} = [ $attrs->{group_by} ]; } - # If the order_by is otherwise empty - we will use this for TOP limit - # emulation and the like. - # Although this is needed only if the order_by is not defined, it is - # actually cheaper to just populate this rather than properly examining - # order_by (stuf like [ {} ] and the like) - $attrs->{_virtual_order_by} = [ $self->result_source->primary_columns ]; - + # generate the distinct induced group_by early, as prefetch will be carried via a + # subquery (since a group_by is present) + if (delete $attrs->{distinct}) { + if ($attrs->{group_by}) { + carp ("Useless use of distinct on a grouped resultset ('distinct' is ignored when a 'group_by' is present)"); + } + else { + $attrs->{group_by} = [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ]; + } + } $attrs->{collapse} ||= {}; if ( my $prefetch = delete $attrs->{prefetch} ) { @@ -2864,25 +2937,25 @@ sub _resolved_attrs { my @prefetch = $source->_resolve_prefetch( $prefetch, $alias, $join_map, $prefetch_ordering, $attrs->{collapse} ); - $attrs->{prefetch_select} = [ map { $_->[0] } @prefetch ]; - push @{ $attrs->{select} }, @{$attrs->{prefetch_select}}; + # we need to somehow mark which columns came from prefetch + $attrs->{_prefetch_select} = [ map { $_->[0] } @prefetch ]; + + push @{ $attrs->{select} }, @{$attrs->{_prefetch_select}}; push @{ $attrs->{as} }, (map { $_->[1] } @prefetch); - push( @{ $attrs->{order_by} }, @$prefetch_ordering ); + push( @{$attrs->{order_by}}, @$prefetch_ordering ); $attrs->{_collapse_order_by} = \@$prefetch_ordering; } - - if (delete $attrs->{distinct}) { - $attrs->{group_by} ||= [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ]; - } - # if both page and offset are specified, produce a combined offset # even though it doesn't make much sense, this is what pre 081xx has # been doing if (my $page = delete $attrs->{page}) { - $attrs->{offset} = ($attrs->{rows} * ($page - 1)) + - ($attrs->{offset} || 0); + $attrs->{offset} = + ($attrs->{rows} * ($page - 1)) + + + ($attrs->{offset} || 0) + ; } return $self->{_attrs} = $attrs; @@ -2894,13 +2967,21 @@ sub _joinpath_aliases { my $paths = {}; return $paths unless ref $fromspec eq 'ARRAY'; + my $cur_depth = $seen->{-relation_chain_depth} || 0; + + if (int ($cur_depth) != $cur_depth) { + $self->throw_exception ("-relation_chain_depth is not an integer, something went horribly wrong ($cur_depth)"); + } + for my $j (@$fromspec) { next if ref $j ne 'ARRAY'; - next if $j->[0]{-relation_chain_depth} < ( $seen->{-relation_chain_depth} || 0); + next if ($j->[0]{-relation_chain_depth} || 0) < $cur_depth; + + my $jpath = $j->[0]{-join_path}; my $p = $paths; - $p = $p->{$_} ||= {} for @{$j->[0]{-join_path}}; + $p = $p->{$_} ||= {} for @{$jpath}[$cur_depth .. $#$jpath]; push @{$p->{-join_aliases} }, $j->[0]{-alias}; } @@ -2949,6 +3030,13 @@ sub _rollout_hash { sub _calculate_score { my ($self, $a, $b) = @_; + if (defined $a xor defined $b) { + return 0; + } + elsif (not defined $a) { + return 1; + } + if (ref $b eq 'HASH') { my ($b_key) = keys %{$b}; if (ref $a eq 'HASH') { @@ -3030,12 +3118,13 @@ See L for details. sub throw_exception { my $self=shift; + if (ref $self && $self->_source_handle->schema) { $self->_source_handle->schema->throw_exception(@_) - } else { - croak(@_); } - + else { + DBIx::Class::Exception->throw(@_); + } } # XXX: FIXME: Attributes docs need clearing up @@ -3057,10 +3146,15 @@ These are in no particular order: =back -Which column(s) to order the results by. If a single column name, or -an arrayref of names is supplied, the argument is passed through -directly to SQL. The hashref syntax allows for connection-agnostic -specification of ordering direction: +Which column(s) to order the results by. + +[The full list of suitable values is documented in +L; the following is a summary of +common options.] + +If a single column name, or an arrayref of names is supplied, the +argument is passed through directly to SQL. The hashref syntax allows +for connection-agnostic specification of ordering direction: For descending order: @@ -3339,6 +3433,42 @@ with that artist is given below (assuming many-to-many from artists to tags): B If you specify a C attribute, the C and C