From: Peter Rabbitson Date: Sat, 21 Apr 2012 04:15:33 +0000 (+0200) Subject: Fix lapse on limited incomplete has_many prefetches X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=27e0370da6cfee04a24491eaa4ce7aa5c878bafa;p=dbsrgits%2FDBIx-Class-Historic.git Fix lapse on limited incomplete has_many prefetches --- diff --git a/Changes b/Changes index f3515dc..c62b1aa 100644 --- a/Changes +++ b/Changes @@ -18,6 +18,8 @@ Revision history for DBIx::Class - Nomalization of retrieved GUID values * Fixes + - Fix complex has_many prefetch with resultsets not selecting identity + columns from the root result source - Fix SkipFirst and FirstSkip limit dialects (Informix and Firebird) - Fix "Skimming limit" dialects (Top, FetchFirst) to properly check the order_by criteria for stability diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index cb75f14..1f48bc6 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -88,6 +88,28 @@ sub _adjust_select_args_for_complex_prefetch { my $outer_select = [ @$select ]; my $inner_select = []; + my ($root_source, $root_source_offset); + + for my $i (0 .. $#$from) { + my $node = $from->[$i]; + my $h = (ref $node eq 'HASH') ? $node + : (ref $node eq 'ARRAY' and ref $node->[0] eq 'HASH') ? $node->[0] + : next + ; + + if ( ($h->{-alias}||'') eq $attrs->{alias} and $root_source = $h->{-rsrc} ) { + $root_source_offset = $i; + last; + } + } + + $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute') + unless $root_source; + + # use the heavy duty resolver to take care of aliased/nonaliased naming + my $colinfo = $self->_resolve_column_info($from); + my $selected_root_columns; + my ($p_start, $p_end) = @{$outer_attrs->{_prefetch_selector_range}}; for my $i (0 .. $p_start - 1, $p_end + 1 .. $#$outer_select) { my $sel = $outer_select->[$i]; @@ -96,12 +118,44 @@ sub _adjust_select_args_for_complex_prefetch { $sel->{-as} ||= $attrs->{as}[$i]; $outer_select->[$i] = join ('.', $attrs->{alias}, ($sel->{-as} || "inner_column_$i") ); } + elsif (! ref $sel and my $ci = $colinfo->{$sel}) { + $selected_root_columns->{$ci->{-colname}} = 1; + } push @$inner_select, $sel; push @{$inner_attrs->{as}}, $attrs->{as}[$i]; } + # We will need to fetch all native columns in the inner subquery, which may be a part + # of an *outer* join condition. We can not just fetch everything because a potential + # has_many restricting join collapse *will not work* on heavy data types. + # Time for more horrible SQL parsing, aughhhh + + # MASSIVE FIXME - in fact when we are fully transitioned to DQ and the support is + # is sane - we will need to trim the select list to *only* fetch stuff that is + # necessary to build joins. In the current implementation if I am selecting a blob + # and the group_by kicks in - we are fucked, and all the user can do is not select + # that column. This is silly! + + my $retardo_sqla_cache = {}; + for my $cond ( map { $_->[1] } @{$from}[$root_source_offset + 1 .. $#$from] ) { + for my $col (@{$self->_extract_condition_columns($cond, $retardo_sqla_cache)}) { + my $ci = $colinfo->{$col}; + if ( + $ci + and + $ci->{-source_alias} eq $attrs->{alias} + and + ! $selected_root_columns->{$ci->{-colname}}++ + ) { + # adding it to both to keep limits not supporting dark selectors happy + push @$inner_select, $ci->{-fq_colname}; + push @{$inner_attrs->{as}}, $ci->{-fq_colname}; + } + } + } + # construct the inner $from and lock it in a subquery # we need to prune first, because this will determine if we need a group_by below # the fake group_by is so that the pruner throws away all non-selecting, non-restricting @@ -164,28 +218,35 @@ sub _adjust_select_args_for_complex_prefetch { # - it is part of the restrictions, in which case we need to collapse the outer # result by tackling yet another group_by to the outside of the query + # work on a shallow copy $from = [ @$from ]; - # so first generate the outer_from, up to the substitution point my @outer_from; - while (my $j = shift @$from) { - $j = [ $j ] unless ref $j eq 'ARRAY'; # promote the head-from to an AoH - if ($j->[0]{-alias} eq $attrs->{alias}) { # time to swap + # we may not be the head + if ($root_source_offset) { + # first generate the outer_from, up to the substitution point + @outer_from = splice @$from, 0, $root_source_offset; - push @outer_from, [ - { - -alias => $attrs->{alias}, - -rsrc => $j->[0]{-rsrc}, - $attrs->{alias} => $inner_subq, - }, - @{$j}[1 .. $#$j], - ]; - last; # we'll take care of what's left in $from below - } - else { - push @outer_from, $j; - } + my $root_node = shift @$from; + + push @outer_from, [ + { + -alias => $attrs->{alias}, + -rsrc => $root_node->[0]{-rsrc}, + $attrs->{alias} => $inner_subq, + }, + @{$root_node}[1 .. $#$root_node], + ]; + } + else { + my $root_node = shift @$from; + + @outer_from = { + -alias => $attrs->{alias}, + -rsrc => $root_node->{-rsrc}, + $attrs->{alias} => $inner_subq, + }; } # scan the *remaining* from spec against different attributes, and see which joins are needed @@ -216,9 +277,6 @@ sub _adjust_select_args_for_complex_prefetch { } } - # demote the outer_from head - $outer_from[0] = $outer_from[0][0]; - if ($need_outer_group_by and ! $outer_attrs->{group_by}) { my $unprocessed_order_chunks; @@ -591,11 +649,11 @@ sub _inner_join_to_node { # yet another atrocity: attempt to extract all columns from a # where condition by hooking _quote sub _extract_condition_columns { - my ($self, $cond, $sql_maker) = @_; + my ($self, $cond, $sql_maker_cache) = @_; return [] unless $cond; - $sql_maker ||= $self->{_sql_ident_capturer} ||= do { + my $sm = $sql_maker_cache->{condparser} ||= $self->{_sql_ident_capturer} ||= do { # FIXME - replace with a Moo trait my $orig_sm_class = ref $self->sql_maker; my $smic_class = "${orig_sm_class}::_IdentCapture_"; @@ -638,9 +696,9 @@ sub _extract_condition_columns { $smic_class->new(); }; - $sql_maker->_recurse_where($cond); + $sm->_recurse_where($cond); - return [ sort keys %{$sql_maker->_get_captured_idents} ]; + return [ sort keys %{$sm->_get_captured_idents} ]; } sub _extract_order_criteria { diff --git a/t/prefetch/incomplete.t b/t/prefetch/incomplete.t index 781c1e1..4cfbdfc 100644 --- a/t/prefetch/incomplete.t +++ b/t/prefetch/incomplete.t @@ -109,4 +109,19 @@ throws_ok( 'Sensible error message on mis-specified "as"', ); +# check complex limiting prefetch without the join-able columns +{ + my $pref_rs = $schema->resultset('Owners')->search({}, { + rows => 3, + offset => 1, + columns => 'name', # only the owner name, still prefetch all the books + prefetch => 'books', + }); + + lives_ok { + is ($pref_rs->all, 1, 'Expected count of objects on limtied prefetch') + } "Complex limited prefetch works with non-selected join condition"; +} + + done_testing;