X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FResultSet.pm;h=128b5546ec82bc8569d168b0859bc6810e751e67;hb=09d2e66a5d5558ef9a19dc2ec510d5dafd2fb7d8;hp=cc5b39818f5c5e9974d800dbe06e4789f1136d48;hpb=b3a400a044a5e4a768e26d450e3cce289481ee7a;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index cc5b398..128b554 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -2,19 +2,21 @@ package DBIx::Class::ResultSet; use strict; use warnings; -use base qw/DBIx::Class/; + +use base 'DBIx::Class'; + use DBIx::Class::Carp; use DBIx::Class::ResultSetColumn; use DBIx::Class::ResultClass::HashRefInflator; -use Scalar::Util qw/blessed weaken reftype/; +use Scalar::Util qw( blessed reftype ); +use SQL::Abstract 'is_literal_value'; use DBIx::Class::_Util qw( - dbic_internal_try dump_value - fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION + dbic_internal_try dbic_internal_catch dump_value emit_loud_diag + fail_on_internal_wantarray fail_on_internal_call + UNRESOLVABLE_CONDITION DUMMY_ALIASPAIR ); -use Try::Tiny; - -# not importing first() as it will clash with our own method -use List::Util (); +use DBIx::Class::SQLMaker::Util qw( normalize_sqla_condition extract_equality_conditions ); +use DBIx::Class::ResultSource::FromSpec::Util 'find_join_path_to_alias'; BEGIN { # De-duplication in _merge_attr() is disabled, but left in for reference @@ -466,7 +468,7 @@ sub search_rs { # see if we can keep the cache (no $rs changes) my $cache; my %safe = (alias => 1, cache => 1); - if ( ! List::Util::first { !$safe{$_} } keys %$call_attrs and ( + if ( ! grep { !$safe{$_} } keys %$call_attrs and ( ! defined $call_cond or ref $call_cond eq 'HASH' && ! keys %$call_cond @@ -490,9 +492,8 @@ sub search_rs { my @selector_attrs = qw/select as columns cols +select +as +columns include_columns/; # reset the current selector list if new selectors are supplied - if (List::Util::first { exists $call_attrs->{$_} } qw/columns cols select as/) { - delete @{$old_attrs}{(@selector_attrs, '_dark_selector')}; - } + delete @{$old_attrs}{(@selector_attrs, '_dark_selector')} + if grep { exists $call_attrs->{$_} } qw(columns cols select as); # Normalize the new selector list (operates on the passed-in attr structure) # Need to do it on every chain instead of only once on _resolved_attrs, in @@ -655,17 +656,15 @@ sub _stack_cond { (ref $_ eq 'HASH' and ! keys %$_) ) and $_ = undef for ($left, $right); - # either one of the two undef - if ( (defined $left) xor (defined $right) ) { - return defined $left ? $left : $right; - } - # both undef - elsif ( ! defined $left ) { - return undef - } - else { - return $self->result_source->schema->storage->_collapse_cond({ -and => [$left, $right] }); - } + return( + # either one of the two undef + ( (defined $left) xor (defined $right) ) ? ( defined $left ? $left : $right ) + + # both undef + : ( ! defined $left ) ? undef + + : { -and => [$left, $right] } + ); } =head2 search_literal @@ -778,9 +777,8 @@ See also L and L. sub find { my $self = shift; - my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + my $attrs = (@_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {}); - my $rsrc = $self->result_source; my $constraint_name; if (exists $attrs->{key}) { @@ -793,6 +791,8 @@ sub find { # Parse out the condition from input my $call_cond; + my $rsrc = $self->result_source; + if (ref $_[0] eq 'HASH') { $call_cond = { %{$_[0]} }; } @@ -815,25 +815,59 @@ sub find { } # process relationship data if any + my $rel_list; + for my $key (keys %$call_cond) { if ( + # either a structure or a result-ish object length ref($call_cond->{$key}) and - my $relinfo = $rsrc->relationship_info($key) + ( $rel_list ||= { map { $_ => 1 } $rsrc->relationships } ) + ->{$key} and - # implicitly skip has_many's (likely MC) - (ref (my $val = delete $call_cond->{$key}) ne 'ARRAY' ) + ! is_literal_value( $call_cond->{$key} ) + and + # implicitly skip has_many's (likely MC), via the delete() + ( ref( my $foreign_val = delete $call_cond->{$key} ) ne 'ARRAY' ) ) { - my ($rel_cond, $crosstable) = $rsrc->_resolve_condition( - $relinfo->{cond}, $val, $key, $key - ); - $self->throw_exception("Complex condition via relationship '$key' is unsupported in find()") - if $crosstable or ref($rel_cond) ne 'HASH'; + # FIXME: it seems wrong that relationship conditions take precedence...? + $call_cond = { + %$call_cond, + + %{ $rsrc->_resolve_relationship_condition( + rel_name => $key, + foreign_values => ( + (! defined blessed $foreign_val) ? $foreign_val : do { + + my $f_result_class = $rsrc->related_source($key)->result_class; + + unless( $foreign_val->isa($f_result_class) ) { + + $self->throw_exception( + 'Objects supplied to find() must inherit from ' + . "'$DBIx::Class::ResultSource::__expected_result_class_isa'" + ) unless $foreign_val->isa( + $DBIx::Class::ResultSource::__expected_result_class_isa + ); + + carp_unique( + "Objects supplied to find() via '$key' usually should inherit from " + . "the related ResultClass ('$f_result_class'), perhaps you've made " + . 'a mistake?' + ); + } + + +{ $foreign_val->get_columns }; + } + ), + infer_values_based_on => {}, - # supplement condition - # relationship conditions take precedence (?) - @{$call_cond}{keys %$rel_cond} = values %$rel_cond; + # an API where these are optional would be too cumbersome, + # instead always pass in some dummy values + DUMMY_ALIASPAIR, + )->{inferred_values} }, + }; } } @@ -842,7 +876,7 @@ sub find { if (defined $constraint_name) { $final_cond = $self->_qualify_cond_columns ( - $self->result_source->_minimal_valueset_satisfying_constraint( + $rsrc->_minimal_valueset_satisfying_constraint( constraint_name => $constraint_name, values => ($self->_merge_with_rscond($call_cond))[0], carp_on_nulls => 1, @@ -879,15 +913,15 @@ sub find { dbic_internal_try { push @unique_queries, $self->_qualify_cond_columns( - $self->result_source->_minimal_valueset_satisfying_constraint( + $rsrc->_minimal_valueset_satisfying_constraint( constraint_name => $c_name, values => ($self->_merge_with_rscond($call_cond))[0], - columns_info => ($ci ||= $self->result_source->columns_info), + columns_info => ($ci ||= $rsrc->columns_info), ), $alias ); } - catch { + dbic_internal_catch { push @fc_exceptions, $_ if $_ =~ /\bFilterColumn\b/; }; } @@ -988,7 +1022,8 @@ See also L. =cut -sub search_related { +sub search_related :DBIC_method_is_indirect_sugar { + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; return shift->related_resultset(shift)->search(@_); } @@ -999,7 +1034,8 @@ it guarantees a resultset, even in list context. =cut -sub search_related_rs { +sub search_related_rs :DBIC_method_is_indirect_sugar { + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; return shift->related_resultset(shift)->search_rs(@_); } @@ -1023,7 +1059,7 @@ sub cursor { return $self->{cursor} ||= do { my $attrs = $self->_resolved_attrs; - $self->result_source->storage->select( + $self->result_source->schema->storage->select( $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs ); }; @@ -1096,7 +1132,7 @@ sub single { } } - my $data = [ $self->result_source->storage->select_single( + my $data = [ $self->result_source->schema->storage->select_single( $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs )]; @@ -1123,9 +1159,7 @@ Returns a L instance for a column of the ResultSet =cut sub get_column { - my ($self, $column) = @_; - my $new = DBIx::Class::ResultSetColumn->new($self, $column); - return $new; + DBIx::Class::ResultSetColumn->new(@_); } =head2 search_like @@ -1165,7 +1199,7 @@ sub search_like { .' Instead use ->search({ x => { -like => "y%" } })' .' (note the outer pair of {}s - they are important!)' ); - my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + my $attrs = (@_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {}); my $query = ref $_[0] eq 'HASH' ? { %{shift()} }: {@_}; $query->{$_} = { 'like' => $query->{$_} } for keys %$query; return $class->search($query, { %$attrs }); @@ -1413,7 +1447,7 @@ sub _construct_results { : '@$rows = map { $inflator_cref->($res_class, $rsrc, { %s } ) } @$rows' ), ( join (', ', map { "\$infmap->[$_] => \$_->[$_]" } 0..$#$infmap ) ) - ); + ) . '; 1' or die; } } else { @@ -1423,17 +1457,27 @@ sub _construct_results { : 'classic_nonpruning' ; - # $args and $attrs to _mk_row_parser are separated to delineate what is - # core collapser stuff and what is dbic $rs specific - $self->{_row_parser}{$parser_type}{cref} = $rsrc->_mk_row_parser({ - eval => 1, - inflate_map => $infmap, - collapse => $attrs->{collapse}, - premultiplied => $attrs->{_main_source_premultiplied}, - hri_style => $self->{_result_inflator}{is_hri}, - prune_null_branches => $self->{_result_inflator}{is_hri} || $self->{_result_inflator}{is_core_row}, - }, $attrs) unless $self->{_row_parser}{$parser_type}{cref}; + unless( $self->{_row_parser}{$parser_type}{cref} ) { + + # $args and $attrs to _mk_row_parser are separated to delineate what is + # core collapser stuff and what is dbic $rs specific + $self->{_row_parser}{$parser_type}{src} = $rsrc->_mk_row_parser({ + inflate_map => $infmap, + collapse => $attrs->{collapse}, + premultiplied => $attrs->{_main_source_premultiplied}, + hri_style => $self->{_result_inflator}{is_hri}, + prune_null_branches => $self->{_result_inflator}{is_hri} || $self->{_result_inflator}{is_core_row}, + }, $attrs); + + $self->{_row_parser}{$parser_type}{cref} = do { + package # hide form PAUSE + DBIx::Class::__GENERATED_ROW_PARSER__; + + eval $self->{_row_parser}{$parser_type}{src}; + } || die $@; + } + # this needs to close over the *current* cursor, hence why it is not cached above my $next_cref = ($did_fetch_all or ! $attrs->{collapse}) ? undef : sub { @@ -1640,7 +1684,7 @@ sub _count_rs { # overwrite the selector (supplied by the storage) $rsrc->resultset_class->new($rsrc, { %$tmp_attrs, - select => $rsrc->storage->_count_select ($rsrc, $attrs), + select => $rsrc->schema->storage->_count_select ($rsrc, $attrs), as => 'count', })->get_column ('count'); } @@ -1671,7 +1715,7 @@ sub _count_subq_rs { # Calculate subquery selector if (my $g = $sub_attrs->{group_by}) { - my $sql_maker = $rsrc->storage->sql_maker; + my $sql_maker = $rsrc->schema->storage->sql_maker; # necessary as the group_by may refer to aliased functions my $sel_index; @@ -1738,7 +1782,7 @@ sub _count_subq_rs { return $rsrc->resultset_class ->new ($rsrc, $sub_attrs) ->as_subselect_rs - ->search ({}, { columns => { count => $rsrc->storage->_count_select ($rsrc, $attrs) } }) + ->search ({}, { columns => { count => $rsrc->schema->storage->_count_select ($rsrc, $attrs) } }) ->get_column ('count'); } @@ -1761,7 +1805,10 @@ with the passed arguments, then L. =cut -sub count_literal { shift->search_literal(@_)->count; } +sub count_literal :DBIC_method_is_indirect_sugar { + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; + shift->search_literal(@_)->count +} =head2 all @@ -1838,7 +1885,8 @@ an object for the first result (or C if the resultset is empty). =cut -sub first { +sub first :DBIC_method_is_indirect_sugar { + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; return $_[0]->reset->next; } @@ -1878,7 +1926,7 @@ sub _rs_update_delete { $storage->_prune_unused_joins ($attrs); # any non-pruneable non-local restricting joins imply subq - $needs_subq = defined List::Util::first { $_ ne $attrs->{alias} } keys %{ $join_classifications->{restricting} || {} }; + $needs_subq = grep { $_ ne $attrs->{alias} } keys %{ $join_classifications->{restricting} || {} }; } # check if the head is composite (by now all joins are thrown out unless $needs_subq) @@ -1895,7 +1943,7 @@ sub _rs_update_delete { # a condition containing 'me' or other table prefixes will not work # at all. Tell SQLMaker to dequalify idents via a gross hack. $cond = do { - my $sqla = $rsrc->storage->sql_maker; + my $sqla = $rsrc->schema->storage->sql_maker; local $sqla->{_dequalify_idents} = 1; \[ $sqla->_recurse_where($self->{cond}) ]; }; @@ -2209,6 +2257,9 @@ sub populate { # FIXME - no cref handling # At this point assume either hashes or arrays + my $rsrc = $self->result_source; + my $storage = $rsrc->schema->storage; + if(defined wantarray) { my (@results, $guard); @@ -2216,7 +2267,7 @@ sub populate { # column names only, nothing to do return if @$data == 1; - $guard = $self->result_source->schema->storage->txn_scope_guard + $guard = $storage->txn_scope_guard if @$data > 2; @results = map @@ -2226,7 +2277,7 @@ sub populate { } else { - $guard = $self->result_source->schema->storage->txn_scope_guard + $guard = $storage->txn_scope_guard if @$data > 1; @results = map { $self->new_result($_)->insert } @$data; @@ -2240,7 +2291,6 @@ sub populate { # this means we have to walk the data structure twice # whether we want this or not # jnap, I hate you ;) - my $rsrc = $self->result_source; my $rel_info = { map { $_ => $rsrc->relationship_info($_) } $rsrc->relationships }; my ($colinfo, $colnames, $slices_with_rels); @@ -2277,7 +2327,18 @@ sub populate { or ref $data->[$i][$_->{pos}] eq 'HASH' or - ( defined blessed $data->[$i][$_->{pos}] and $data->[$i][$_->{pos}]->isa('DBIx::Class::Row') ) + ( + defined blessed $data->[$i][$_->{pos}] + and + $data->[$i][$_->{pos}]->isa( + $DBIx::Class::ResultSource::__expected_result_class_isa + || + emit_loud_diag( + confess => 1, + msg => 'Global $DBIx::Class::ResultSource::__expected_result_class_isa unexpectedly unset...' + ) + ) + ) ) and 1 @@ -2285,7 +2346,18 @@ sub populate { # moar sanity check... sigh for ( ref $data->[$i][$_->{pos}] eq 'ARRAY' ? @{$data->[$i][$_->{pos}]} : $data->[$i][$_->{pos}] ) { - if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) { + if ( + defined blessed $_ + and + $_->isa( + $DBIx::Class::ResultSource::__expected_result_class_isa + || + emit_loud_diag( + confess => 1, + msg => 'Global $DBIx::Class::ResultSource::__expected_result_class_isa unexpectedly unset...' + ) + ) + ) { carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()"); return my $throwaway = $self->populate(@_); } @@ -2327,7 +2399,18 @@ sub populate { or ref $data->[$i]{$_} eq 'HASH' or - ( defined blessed $data->[$i]{$_} and $data->[$i]{$_}->isa('DBIx::Class::Row') ) + ( + defined blessed $data->[$i]{$_} + and + $data->[$i]{$_}->isa( + $DBIx::Class::ResultSource::__expected_result_class_isa + || + emit_loud_diag( + confess => 1, + msg => 'Global $DBIx::Class::ResultSource::__expected_result_class_isa unexpectedly unset...' + ) + ) + ) ) and 1 @@ -2335,7 +2418,18 @@ sub populate { # moar sanity check... sigh for ( ref $data->[$i]{$_} eq 'ARRAY' ? @{$data->[$i]{$_}} : $data->[$i]{$_} ) { - if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) { + if ( + defined blessed $_ + and + $_->isa( + $DBIx::Class::ResultSource::__expected_result_class_isa + || + emit_loud_diag( + confess => 1, + msg => 'Global $DBIx::Class::ResultSource::__expected_result_class_isa unexpectedly unset...' + ) + ) + ) { carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()"); return my $throwaway = $self->populate(@_); } @@ -2397,13 +2491,13 @@ sub populate { ### start work my $guard; - $guard = $rsrc->schema->storage->txn_scope_guard + $guard = $storage->txn_scope_guard if $slices_with_rels; ### main source data # FIXME - need to switch entirely to a coderef-based thing, # so that large sets aren't copied several times... I think - $rsrc->storage->_insert_bulk( + $storage->_insert_bulk( $rsrc, [ @$colnames, sort keys %$rs_data ], [ map { @@ -2439,8 +2533,10 @@ sub populate { $colinfo->{$rel}{fk_map} = { reverse %{ $rsrc->_resolve_relationship_condition( rel_name => $rel, - self_alias => "\xFE", # irrelevant - foreign_alias => "\xFF", # irrelevant + + # an API where these are optional would be too cumbersome, + # instead always pass in some dummy values + DUMMY_ALIASPAIR, )->{identity_map} || {} } }; } @@ -2554,11 +2650,8 @@ Passes the hashref of input on to L. sub new_result { my ($self, $values) = @_; - $self->throw_exception( "new_result takes only one argument - a hashref of values" ) - if @_ > 2; - - $self->throw_exception( "Result object instantiation requires a hashref as argument" ) - unless (ref $values eq 'HASH'); + $self->throw_exception( "Result object instantiation requires a single hashref argument" ) + if @_ > 2 or ref $values ne 'HASH'; my ($merged_cond, $cols_from_relations) = $self->_merge_with_rscond($values); @@ -2608,7 +2701,7 @@ sub _merge_with_rscond { @cols_from_relations = keys %{ $implied_data || {} }; } else { - my $eqs = $self->result_source->schema->storage->_extract_fixed_condition_columns($self->{cond}, 'consider_nulls'); + my $eqs = extract_equality_conditions( $self->{cond}, 'consider_nulls' ); $implied_data = { map { ( ($eqs->{$_}||'') eq UNRESOLVABLE_CONDITION ) ? () : ( $_ => $eqs->{$_} ) } keys %$eqs }; @@ -2722,7 +2815,7 @@ sub as_query { my $attrs = { %{ $self->_resolved_attrs } }; - my $aq = $self->result_source->storage->_select_args_to_query ( + my $aq = $self->result_source->schema->storage->_select_args_to_query ( $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs ); @@ -2768,7 +2861,7 @@ all in the call to C, even when set to C. sub find_or_new { my $self = shift; - my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + my $attrs = (@_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {}); my $hash = ref $_[0] eq 'HASH' ? shift : {@_}; if (keys %$hash and my $row = $self->find($hash, $attrs) ) { return $row; @@ -2857,7 +2950,7 @@ L. =cut -sub create { +sub create :DBIC_method_is_indirect_sugar { #my ($self, $col_data) = @_; DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; return shift->new_result(shift)->insert; @@ -2937,7 +3030,7 @@ database! sub find_or_create { my $self = shift; - my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + my $attrs = (@_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {}); my $hash = ref $_[0] eq 'HASH' ? shift : {@_}; if (keys %$hash and my $row = $self->find($hash, $attrs) ) { return $row; @@ -3003,7 +3096,7 @@ database! sub update_or_create { my $self = shift; - my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); + my $attrs = (@_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {}); my $cond = ref $_[0] eq 'HASH' ? shift : {@_}; my $row = $self->find($cond, $attrs); @@ -3066,7 +3159,7 @@ See also L, L and L. sub update_or_new { my $self = shift; - my $attrs = ( @_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {} ); + my $attrs = ( @_ > 1 && ref $_[-1] eq 'HASH' ? pop(@_) : {} ); my $cond = ref $_[0] eq 'HASH' ? shift : {@_}; my $row = $self->find( $cond, $attrs ); @@ -3175,7 +3268,7 @@ sub is_paged { sub is_ordered { my ($self) = @_; - return scalar $self->result_source->storage->_extract_order_criteria($self->{attrs}{order_by}); + return scalar $self->result_source->schema->storage->_extract_order_criteria($self->{attrs}{order_by}); } =head2 related_resultset @@ -3217,13 +3310,11 @@ sub related_resultset { my $attrs = $self->_chain_relationship($rel); - my $storage = $rsrc->schema->storage; - # Previously this atribute was deleted (instead of being set as it is now) # Doing so seems to be harmless in all available test permutations # See also 01d59a6a6 and mst's comment below # - $attrs->{alias} = $storage->relname_to_table_alias( + $attrs->{alias} = $rsrc->schema->storage->relname_to_table_alias( $rel, $attrs->{seen_join}{$rel} ); @@ -3231,8 +3322,55 @@ sub related_resultset { # since this is search_related, and we already slid the select window inwards # (the select/as attrs were deleted in the beginning), we need to flip all # left joins to inner, so we get the expected results - # read the comment on top of the actual function to see what this does - $attrs->{from} = $storage->_inner_join_to_node( $attrs->{from}, $attrs->{alias} ); + # + # The DBIC relationship chaining implementation is pretty simple - every + # new related_relationship is pushed onto the {from} stack, and the {select} + # window simply slides further in. This means that when we count somewhere + # in the middle, we got to make sure that everything in the join chain is an + # actual inner join, otherwise the count will come back with unpredictable + # results (a resultset may be generated with _some_ rows regardless of if + # the relation which the $rs currently selects has rows or not). E.g. + # $artist_rs->cds->count - normally generates: + # SELECT COUNT( * ) FROM artist me LEFT JOIN cd cds ON cds.artist = me.artistid + # which actually returns the number of artists * (number of cds || 1) + # + # So what we do here is crawl {from}, determine if the current alias is at + # the top of the stack, and if not - make sure the chain is inner-joined down + # to the root. + # + my $switch_branch = find_join_path_to_alias( + $attrs->{from}, + $attrs->{alias}, + ); + + if ( @{ $switch_branch || [] } ) { + + # So it looks like we will have to switch some stuff around. + # local() is useless here as we will be leaving the scope + # anyway, and deep cloning is just too fucking expensive + # So replace the first hashref in the node arrayref manually + my @new_from = $attrs->{from}[0]; + my $sw_idx = { map { (values %$_), 1 } @$switch_branch }; #there's one k/v per join-path + + for my $j ( @{$attrs->{from}}[ 1 .. $#{$attrs->{from}} ] ) { + my $jalias = $j->[0]{-alias}; + + if ($sw_idx->{$jalias}) { + my %attrs = %{$j->[0]}; + delete $attrs{-join_type}; + push @new_from, [ + \%attrs, + @{$j}[ 1 .. $#$j ], + ]; + } + else { + push @new_from, $j; + } + } + + $attrs->{from} = \@new_from; + } + #XXX - temp fix for result_class bug. There likely is a more elegant fix -groditi delete $attrs->{result_class}; @@ -3518,10 +3656,13 @@ sub _resolved_attrs { if ( $attrs->{rows} =~ /[^0-9]/ or $attrs->{rows} <= 0 ); } + # normalize where condition + $attrs->{where} = normalize_sqla_condition( $attrs->{where} ) + if $attrs->{where}; # default selection list $attrs->{columns} = [ $source->columns ] - unless List::Util::first { exists $attrs->{$_} } qw/columns cols select as/; + unless grep { exists $attrs->{$_} } qw/columns cols select as/; # merge selectors together for (qw/columns select as/) { @@ -3682,7 +3823,7 @@ sub _resolved_attrs { if ( ! $attrs->{_main_source_premultiplied} and - ! List::Util::first { ! $_->[0]{-is_single} } @fromlist + ! grep { ! $_->[0]{-is_single} } @fromlist ) { $attrs->{collapse} = 0; } @@ -3704,7 +3845,7 @@ sub _resolved_attrs { else { $attrs->{_grouped_by_distinct} = 1; # distinct affects only the main selection part, not what prefetch may add below - ($attrs->{group_by}, my $new_order) = $source->storage->_group_over_selection($attrs); + ($attrs->{group_by}, my $new_order) = $source->schema->storage->_group_over_selection($attrs); # FIXME possibly ignore a rewritten order_by (may turn out to be an issue) # The thinking is: if we are collapsing the subquerying prefetch engine will @@ -3912,7 +4053,7 @@ sub _merge_joinpref_attr { }, ARRAY => sub { return $_[1] if !defined $_[0]; - return $_[1] if __HM_DEDUP and List::Util::first { $_ eq $_[0] } @{$_[1]}; + return $_[1] if __HM_DEDUP and grep { $_ eq $_[0] } @{$_[1]}; return [$_[0], @{$_[1]}] }, HASH => sub { @@ -3925,7 +4066,7 @@ sub _merge_joinpref_attr { ARRAY => { SCALAR => sub { return $_[0] if !defined $_[1]; - return $_[0] if __HM_DEDUP and List::Util::first { $_ eq $_[1] } @{$_[0]}; + return $_[0] if __HM_DEDUP and grep { $_ eq $_[1] } @{$_[0]}; return [@{$_[0]}, $_[1]] }, ARRAY => sub { @@ -3938,7 +4079,7 @@ sub _merge_joinpref_attr { HASH => sub { return [ $_[1] ] if ! @{$_[0]}; return $_[0] if !keys %{$_[1]}; - return $_[0] if __HM_DEDUP and List::Util::first { $_ eq $_[1] } @{$_[0]}; + return $_[0] if __HM_DEDUP and grep { $_ eq $_[1] } @{$_[0]}; return [ @{$_[0]}, $_[1] ]; }, }, @@ -3953,7 +4094,7 @@ sub _merge_joinpref_attr { return [] if !keys %{$_[0]} and !@{$_[1]}; return [ $_[0] ] if !@{$_[1]}; return $_[1] if !keys %{$_[0]}; - return $_[1] if __HM_DEDUP and List::Util::first { $_ eq $_[0] } @{$_[1]}; + return $_[1] if __HM_DEDUP and grep { $_ eq $_[0] } @{$_[1]}; return [ $_[0], @{$_[1]} ]; }, HASH => sub {