1 package #hide from PAUSE
2 DBIx::Class::Storage::DBIHacks;
5 # This module contains code supporting a battery of special cases and tests for
6 # many corner cases pushing the envelope of what DBIC can do. When work on
7 # these utilities began in mid 2009 (51a296b402c) it wasn't immediately obvious
8 # that these pieces, despite their misleading on-first-sighe-flakiness, will
9 # become part of the generic query rewriting machinery of DBIC, allowing it to
10 # both generate and process queries representing incredibly complex sets with
11 # reasonable efficiency.
13 # Now (end of 2015), more than 6 years later the routines in this class have
14 # stabilized enough, and are meticulously covered with tests, to a point where
15 # an effort to formalize them into user-facing APIs might be worthwhile.
17 # An implementor working on publicizing and/or replacing the routines with a
18 # more modern SQL generation framework should keep in mind that pretty much all
19 # existing tests are constructed on the basis of real-world code used in
20 # production somewhere.
22 # Please hack on this responsibly ;)
28 use base 'DBIx::Class::Storage';
31 use Scalar::Util 'blessed';
32 use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize dump_value);
33 use SQL::Abstract qw(is_plain_value is_literal_value);
34 use DBIx::Class::Carp;
38 # This code will remove non-selecting/non-restricting joins from
39 # {from} specs, aiding the RDBMS query optimizer
41 sub _prune_unused_joins {
42 my ($self, $attrs) = @_;
44 # only standard {from} specs are supported, and we could be disabled in general
45 return ($attrs->{from}, {}) unless (
46 ref $attrs->{from} eq 'ARRAY'
50 ref $attrs->{from}[0] eq 'HASH'
52 ref $attrs->{from}[1] eq 'ARRAY'
54 $self->_use_join_optimizer
58 $attrs->{_precalculated_aliastypes}
60 $self->_resolve_aliastypes_from_select_args($attrs)
63 my $new_aliastypes = { %$orig_aliastypes };
65 # we will be recreating this entirely
66 my @reclassify = 'joining';
68 # a grouped set will not be affected by amount of rows. Thus any
69 # purely multiplicator classifications can go
70 # (will be reintroduced below if needed by something else)
71 push @reclassify, qw(multiplying premultiplied)
72 if $attrs->{_force_prune_multiplying_joins} or $attrs->{group_by};
74 # nuke what will be recalculated
75 delete @{$new_aliastypes}{@reclassify};
77 my @newfrom = $attrs->{from}[0]; # FROM head is always present
79 # recalculate what we need once the multipliers are potentially gone
80 # ignore premultiplies, since they do not add any value to anything
82 for ( @{$new_aliastypes}{grep { $_ ne 'premultiplied' } keys %$new_aliastypes }) {
83 # add all requested aliases
84 $need_joins{$_} = 1 for keys %$_;
86 # add all their parents (as per joinpath which is an AoH { table => alias })
87 $need_joins{$_} = 1 for map { values %$_ } map { @{$_->{-parents}} } values %$_;
90 for my $j (@{$attrs->{from}}[1..$#{$attrs->{from}}]) {
91 push @newfrom, $j if (
92 (! defined $j->[0]{-alias}) # legacy crap
94 $need_joins{$j->[0]{-alias}}
98 # we have a new set of joiners - for everything we nuked pull the classification
99 # off the original stack
100 for my $ctype (@reclassify) {
101 $new_aliastypes->{$ctype} = { map
102 { $need_joins{$_} ? ( $_ => $orig_aliastypes->{$ctype}{$_} ) : () }
103 keys %{$orig_aliastypes->{$ctype}}
107 return ( \@newfrom, $new_aliastypes );
111 # This is the code producing joined subqueries like:
112 # SELECT me.*, other.* FROM ( SELECT me.* FROM ... ) JOIN other ON ...
114 sub _adjust_select_args_for_complex_prefetch {
115 my ($self, $attrs) = @_;
117 $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute') unless (
118 ref $attrs->{from} eq 'ARRAY'
120 @{$attrs->{from}} > 1
122 ref $attrs->{from}[0] eq 'HASH'
124 ref $attrs->{from}[1] eq 'ARRAY'
127 my $root_alias = $attrs->{alias};
129 # generate inner/outer attribute lists, remove stuff that doesn't apply
130 my $outer_attrs = { %$attrs };
131 delete @{$outer_attrs}{qw(from bind rows offset group_by _grouped_by_distinct having)};
133 my $inner_attrs = { %$attrs, _simple_passthrough_construction => 1 };
134 delete @{$inner_attrs}{qw(for collapse select as)};
136 # there is no point of ordering the insides if there is no limit
137 delete $inner_attrs->{order_by} if (
138 delete $inner_attrs->{_order_is_artificial}
140 ! $inner_attrs->{rows}
143 # generate the inner/outer select lists
144 # for inside we consider only stuff *not* brought in by the prefetch
145 # on the outside we substitute any function for its alias
146 $outer_attrs->{select} = [ @{$attrs->{select}} ];
148 my ($root_node, $root_node_offset);
150 for my $i (0 .. $#{$inner_attrs->{from}}) {
151 my $node = $inner_attrs->{from}[$i];
152 my $h = (ref $node eq 'HASH') ? $node
153 : (ref $node eq 'ARRAY' and ref $node->[0] eq 'HASH') ? $node->[0]
157 if ( ($h->{-alias}||'') eq $root_alias and $h->{-rsrc} ) {
159 $root_node_offset = $i;
164 $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute')
167 # use the heavy duty resolver to take care of aliased/nonaliased naming
168 my $colinfo = $self->_resolve_column_info($inner_attrs->{from});
169 my $selected_root_columns;
171 for my $i (0 .. $#{$outer_attrs->{select}}) {
172 my $sel = $outer_attrs->{select}->[$i];
175 $colinfo->{$sel} and $colinfo->{$sel}{-source_alias} ne $root_alias
178 if (ref $sel eq 'HASH' ) {
179 $sel->{-as} ||= $attrs->{as}[$i];
180 $outer_attrs->{select}->[$i] = join ('.', $root_alias, ($sel->{-as} || "inner_column_$i") );
182 elsif (! ref $sel and my $ci = $colinfo->{$sel}) {
183 $selected_root_columns->{$ci->{-colname}} = 1;
186 push @{$inner_attrs->{select}}, $sel;
188 push @{$inner_attrs->{as}}, $attrs->{as}[$i];
191 my $inner_aliastypes = $self->_resolve_aliastypes_from_select_args($inner_attrs);
193 # In the inner subq we will need to fetch *only* native columns which may
194 # be a part of an *outer* join condition, or an order_by (which needs to be
195 # preserved outside), or wheres. In other words everything but the inner
197 # We can not just fetch everything because a potential has_many restricting
198 # join collapse *will not work* on heavy data types.
200 # essentially a map of all non-selecting seen columns
201 # the sort is there for a nicer select list
205 { keys %{$_->{-seen_columns}||{}} }
207 { values %{$inner_aliastypes->{$_}} }
209 { $_ ne 'selecting' }
210 keys %$inner_aliastypes
212 my $ci = $colinfo->{$_} or next;
214 $ci->{-source_alias} eq $root_alias
216 ! $selected_root_columns->{$ci->{-colname}}++
218 # adding it to both to keep limits not supporting dark selectors happy
219 push @{$inner_attrs->{select}}, $ci->{-fq_colname};
220 push @{$inner_attrs->{as}}, $ci->{-fq_colname};
224 # construct the inner {from} and lock it in a subquery
225 # we need to prune first, because this will determine if we need a group_by below
226 # throw away all non-selecting, non-restricting multijoins
227 # (since we def. do not care about multiplication of the contents of the subquery)
228 my $inner_subq = do {
230 # must use it here regardless of user requests (vastly gentler on optimizer)
231 local $self->{_use_join_optimizer} = 1;
233 # throw away multijoins since we def. do not care about those inside the subquery
234 # $inner_aliastypes *will* be redefined at this point
235 ($inner_attrs->{from}, $inner_aliastypes ) = $self->_prune_unused_joins ({
237 _force_prune_multiplying_joins => 1,
238 _precalculated_aliastypes => $inner_aliastypes,
241 # uh-oh a multiplier (which is not us) left in, this is a problem for limits
242 # we will need to add a group_by to collapse the resultset for proper counts
244 grep { $_ ne $root_alias } keys %{ $inner_aliastypes->{multiplying} || {} }
246 # if there are user-supplied groups - assume user knows wtf they are up to
247 ( ! $inner_aliastypes->{grouping} or $inner_attrs->{_grouped_by_distinct} )
250 my $cur_sel = { map { $_ => 1 } @{$inner_attrs->{select}} };
252 # *possibly* supplement the main selection with pks if not already
253 # there, as they will have to be a part of the group_by to collapse
255 my $inner_select_with_extras;
256 my @pks = map { "$root_alias.$_" } $root_node->{-rsrc}->primary_columns
257 or $self->throw_exception( sprintf
258 'Unable to perform complex limited prefetch off %s without declared primary key',
259 $root_node->{-rsrc}->source_name,
262 push @{ $inner_select_with_extras ||= [ @{$inner_attrs->{select}} ] }, $col
263 unless $cur_sel->{$col}++;
266 ($inner_attrs->{group_by}, $inner_attrs->{order_by}) = $self->_group_over_selection({
268 $inner_select_with_extras ? ( select => $inner_select_with_extras ) : (),
269 _aliastypes => $inner_aliastypes,
273 # we already optimized $inner_attrs->{from} above
274 # and already local()ized
275 $self->{_use_join_optimizer} = 0;
277 # generate the subquery
278 $self->_select_args_to_query (
279 @{$inner_attrs}{qw(from select where)},
284 # Generate the outer from - this is relatively easy (really just replace
285 # the join slot with the subquery), with a major caveat - we can not
286 # join anything that is non-selecting (not part of the prefetch), but at
287 # the same time is a multi-type relationship, as it will explode the result.
289 # There are two possibilities here
290 # - either the join is non-restricting, in which case we simply throw it away
291 # - it is part of the restrictions, in which case we need to collapse the outer
292 # result by tackling yet another group_by to the outside of the query
294 # work on a shallow copy
295 my @orig_from = @{$attrs->{from}};
298 $outer_attrs->{from} = \ my @outer_from;
300 # we may not be the head
301 if ($root_node_offset) {
302 # first generate the outer_from, up to the substitution point
303 @outer_from = splice @orig_from, 0, $root_node_offset;
305 # substitute the subq at the right spot
308 -alias => $root_alias,
309 -rsrc => $root_node->{-rsrc},
310 $root_alias => $inner_subq,
312 # preserve attrs from what is now the head of the from after the splice
313 @{$orig_from[0]}[1 .. $#{$orig_from[0]}],
318 -alias => $root_alias,
319 -rsrc => $root_node->{-rsrc},
320 $root_alias => $inner_subq,
324 shift @orig_from; # what we just replaced above
326 # scan the *remaining* from spec against different attributes, and see which joins are needed
328 my $outer_aliastypes = $outer_attrs->{_aliastypes} =
329 $self->_resolve_aliastypes_from_select_args({ %$outer_attrs, from => \@orig_from });
332 my ($outer_select_chain, @outer_nonselecting_chains) = map { +{
333 map { $_ => 1 } map { values %$_} map { @{$_->{-parents}} } values %{ $outer_aliastypes->{$_} || {} }
334 } } qw/selecting restricting grouping ordering/;
336 # see what's left - throw away if not selecting/restricting
337 my $may_need_outer_group_by;
338 while (my $j = shift @orig_from) {
339 my $alias = $j->[0]{-alias};
342 $outer_select_chain->{$alias}
346 elsif (grep { $_->{$alias} } @outer_nonselecting_chains ) {
347 push @outer_from, $j;
348 $may_need_outer_group_by ||= $outer_aliastypes->{multiplying}{$alias} ? 1 : 0;
352 # also throw in a synthetic group_by if a non-selecting multiplier,
353 # to guard against cross-join explosions
354 # the logic is somewhat fragile, but relies on the idea that if a user supplied
355 # a group by on their own - they know what they were doing
356 if ( $may_need_outer_group_by and $attrs->{_grouped_by_distinct} ) {
357 ($outer_attrs->{group_by}, $outer_attrs->{order_by}) = $self->_group_over_selection ({
359 from => \@outer_from,
363 # FIXME: The {where} ends up in both the inner and outer query, i.e. *twice*
365 # This is rather horrific, and while we currently *do* have enough
366 # introspection tooling available to attempt a stab at properly deciding
367 # whether or not to include the where condition on the outside, the
368 # machinery is still too slow to apply it here.
369 # Thus for the time being we do not attempt any sanitation of the where
370 # clause and just pass it through on both sides of the subquery. This *will*
371 # be addressed at a later stage, most likely after folding the SQL generator
372 # into SQLMaker proper
374 # OTOH it can be seen as a plus: <ash> (notes that this query would make a DBA cry ;)
379 # This is probably the ickiest, yet most relied upon part of the codebase:
380 # this is the place where we take arbitrary SQL input and break it into its
381 # constituent parts, making sure we know which *sources* are used in what
382 # *capacity* ( selecting / restricting / grouping / ordering / joining, etc )
383 # Although the method is pretty horrific, the worst thing that can happen is
384 # for a classification failure, which in turn will result in a vocal exception,
385 # and will lead to a relatively prompt fix.
386 # The code has been slowly improving and is covered with a formiddable battery
387 # of tests, so can be considered "reliably stable" at this point (Oct 2015).
389 # A note to implementors attempting to "replace" this - keep in mind that while
390 # there are multiple optimization avenues, the actual "scan literal elements"
391 # part *MAY NEVER BE REMOVED*, even if it is limited only ot the (future) AST
392 # nodes that are deemed opaque (i.e. contain literal expressions). The use of
393 # blackbox literals is at this point firmly a user-facing API, and is one of
394 # *the* reasons DBIC remains as flexible as it is. In other words, when working
395 # on this keep in mind that the following is widespread and *encouraged* way
396 # of using DBIC in the wild when push comes to shove:
399 # select => \[ $random, @stuff],
400 # from => \[ $random, @stuff ],
401 # where => \[ $random, @stuff ],
402 # group_by => \[ $random, @stuff ],
403 # order_by => \[ $random, @stuff ],
406 # Various incarnations of the above are reflected in many of the tests. If one
407 # gets to fail, you get to fix it. A "this is crazy, nobody does that" is not
408 # acceptable going forward.
410 sub _resolve_aliastypes_from_select_args {
411 my ( $self, $attrs ) = @_;
413 $self->throw_exception ('Unable to analyze custom {from}')
414 if ref $attrs->{from} ne 'ARRAY';
416 # what we will return
419 # see what aliases are there to work with
420 # and record who is a multiplier and who is premultiplied
422 for my $node (@{$attrs->{from}}) {
425 $j = $j->[0] if ref $j eq 'ARRAY';
426 my $al = $j->{-alias}
429 $alias_list->{$al} = $j;
431 $aliases_by_type->{multiplying}{$al} ||= { -parents => $j->{-join_path}||[] }
432 # not array == {from} head == can't be multiplying
433 if ref($node) eq 'ARRAY' and ! $j->{-is_single};
435 $aliases_by_type->{premultiplied}{$al} ||= { -parents => $j->{-join_path}||[] }
436 # parts of the path that are not us but are multiplying
437 if grep { $aliases_by_type->{multiplying}{$_} }
440 @{ $j->{-join_path}||[] }
443 # get a column to source/alias map (including unambiguous unqualified ones)
444 my $colinfo = $self->_resolve_column_info ($attrs->{from});
446 # set up a botched SQLA
447 my $sql_maker = $self->sql_maker;
449 # these are throw away results, do not pollute the bind stack
450 local $sql_maker->{where_bind};
451 local $sql_maker->{group_bind};
452 local $sql_maker->{having_bind};
453 local $sql_maker->{from_bind};
455 # we can't scan properly without any quoting (\b doesn't cut it
456 # everywhere), so unless there is proper quoting set - use our
457 # own weird impossible character.
458 # Also in the case of no quoting, we need to explicitly disable
459 # name_sep, otherwise sorry nasty legacy syntax like
460 # { 'count(foo.id)' => { '>' => 3 } } will stop working >:(
461 local $sql_maker->{quote_char} = $sql_maker->{quote_char};
462 local $sql_maker->{name_sep} = $sql_maker->{name_sep};
464 unless (defined $sql_maker->{quote_char} and length $sql_maker->{quote_char}) {
465 $sql_maker->{quote_char} = ["\x00", "\xFF"];
466 # if we don't unset it we screw up retarded but unfortunately working
467 # 'MAX(foo.bar)' => { '>', 3 }
468 $sql_maker->{name_sep} = '';
471 my ($lquote, $rquote, $sep) = map { quotemeta $_ } ($sql_maker->_quote_chars, $sql_maker->name_sep);
473 # generate sql chunks
476 ($sql_maker->_recurse_where ($attrs->{where}))[0],
477 $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} }),
480 $sql_maker->_parse_rs_attrs ({ group_by => $attrs->{group_by} }),
483 $sql_maker->_recurse_from (
484 ref $attrs->{from}[0] eq 'ARRAY' ? $attrs->{from}[0][0] : $attrs->{from}[0],
485 @{$attrs->{from}}[1 .. $#{$attrs->{from}}],
489 # kill all selectors which look like a proper subquery
490 # this is a sucky heuristic *BUT* - if we get it wrong the query will simply
491 # fail to run, so we are relatively safe
493 { $_ !~ / \A \s* \( \s* SELECT \s+ .+? \s+ FROM \s+ .+? \) \s* \z /xsi }
495 { ($sql_maker->_recurse_fields($_))[0] }
500 ( my $sql = (ref $_ ? $_->[0] : $_) ) =~ s/ \s+ (?: ASC | DESC ) \s* \z //xi;
503 $sql_maker->_order_by_chunks( $attrs->{order_by} ),
507 # we will be bulk-scanning anyway - pieces will not matter in that case,
508 # thus join everything up
509 # throw away empty-string chunks, and make sure no binds snuck in
510 # note that we operate over @{$to_scan->{$type}}, hence the
511 # semi-mindbending ... map ... for values ...
512 ( $_ = join ' ', map {
514 ( ! defined $_ ) ? ()
515 : ( length ref $_ ) ? $self->throw_exception(
516 "Unexpected ref in scan-plan: " . dump_value $_
518 : ( $_ =~ /^\s*$/ ) ? ()
521 } @$_ ) for values %$to_scan;
523 # throw away empty to-scan's
525 length $to_scan->{$_}
527 delete $to_scan->{$_}
528 ) for keys %$to_scan;
532 # these will be used for matching in the loop below
533 my $all_aliases = join ' | ', map { quotemeta $_ } keys %$alias_list;
535 $lquote ( $all_aliases ) $rquote $sep (?: $lquote ([^$rquote]+) $rquote )?
537 \b ( $all_aliases ) \. ( [^\s\)\($rquote]+ )?
541 my $all_unq_columns = join ' | ',
545 # using a regex here shows up on profiles, boggle
546 { index( $_, '.') < 0 }
549 my $unq_col_re = $all_unq_columns
551 $lquote ( $all_unq_columns ) $rquote
553 (?: \A | \s ) ( $all_unq_columns ) (?: \s | \z )
559 # the actual scan, per type
560 for my $type (keys %$to_scan) {
563 # now loop through all fully qualified columns and get the corresponding
564 # alias (should work even if they are in scalarrefs)
566 # The regex captures in multiples of 4, with one of the two pairs being
567 # undef. There may be a *lot* of matches, hence the convoluted loop
568 my @matches = $to_scan->{$type} =~ /$fq_col_re/g;
570 while( $i < $#matches ) {
575 $aliases_by_type->{$type}{$matches[$i]} ||= { -parents => $alias_list->{$matches[$i]}{-join_path}||[] };
577 $aliases_by_type->{$type}{$matches[$i]}{-seen_columns}{"$matches[$i].$matches[$i+1]"} = "$matches[$i].$matches[$i+1]"
578 if defined $matches[$i+1];
587 # now loop through unqualified column names, and try to locate them within
588 # the chunks, if there are any unqualified columns in the 1st place
589 next unless $unq_col_re;
591 # The regex captures in multiples of 2, one of the two being undef
592 for ( $to_scan->{$type} =~ /$unq_col_re/g ) {
594 my $alias = $colinfo->{$_}{-source_alias} or next;
595 $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] };
596 $aliases_by_type->{$type}{$alias}{-seen_columns}{"$alias.$_"} = $_
601 # Add any non-left joins to the restriction list (such joins are indeed restrictions)
605 ! $aliases_by_type->{restricting}{ $_->{-alias} }
610 $_->{-join_type} !~ /^left (?: \s+ outer)? $/xi
613 $aliases_by_type->{restricting}{ $_->{-alias} } = { -parents => $_->{-join_path}||[] }
614 ) for values %$alias_list;
619 keys %{$aliases_by_type->{$_}}
621 delete $aliases_by_type->{$_}
622 ) for keys %$aliases_by_type;
628 # This is the engine behind { distinct => 1 } and the general
629 # complex prefetch grouper
630 sub _group_over_selection {
631 my ($self, $attrs) = @_;
633 my $colinfos = $self->_resolve_column_info ($attrs->{from});
635 my (@group_by, %group_index);
637 # the logic is: if it is a { func => val } we assume an aggregate,
638 # otherwise if \'...' or \[...] we assume the user knows what is
639 # going on thus group over it
640 for (@{$attrs->{select}}) {
641 if (! ref($_) or ref ($_) ne 'HASH' ) {
644 if ($colinfos->{$_} and $_ !~ /\./ ) {
645 # add a fully qualified version as well
646 $group_index{"$colinfos->{$_}{-source_alias}.$_"}++;
651 my @order_by = $self->_extract_order_criteria($attrs->{order_by})
652 or return (\@group_by, $attrs->{order_by});
654 # add any order_by parts that are not already present in the group_by
655 # to maintain SQL cross-compatibility and general sanity
657 # also in case the original selection is *not* unique, or in case part
658 # of the ORDER BY refers to a multiplier - we will need to replace the
659 # skipped order_by elements with their MIN/MAX equivalents as to maintain
660 # the proper overall order without polluting the group criteria (and
661 # possibly changing the outcome entirely)
663 my ($leftovers, $sql_maker, @new_order_by, $order_chunks, $aliastypes);
665 my $group_already_unique = $self->_columns_comprise_identifying_set($colinfos, \@group_by);
667 for my $o_idx (0 .. $#order_by) {
669 # if the chunk is already a min/max function - there is nothing left to touch
670 next if $order_by[$o_idx][0] =~ /^ (?: min | max ) \s* \( .+ \) $/ix;
672 # only consider real columns (for functions the user got to do an explicit group_by)
675 @{$order_by[$o_idx]} != 1
677 # only declare an unknown *plain* identifier as "leftover" if we are called with
678 # aliastypes to examine. If there are none - we are still in _resolve_attrs, and
679 # can just assume the user knows what they want
680 ( ! ( $chunk_ci = $colinfos->{$order_by[$o_idx][0]} ) and $attrs->{_aliastypes} )
682 push @$leftovers, $order_by[$o_idx][0];
685 next unless $chunk_ci;
687 # no duplication of group criteria
688 next if $group_index{$chunk_ci->{-fq_colname}};
691 $attrs->{_aliastypes}
693 $self->_resolve_aliastypes_from_select_args({
694 from => $attrs->{from},
695 order_by => $attrs->{order_by},
697 ) if $group_already_unique;
699 # check that we are not ordering by a multiplier (if a check is requested at all)
701 $group_already_unique
703 ! $aliastypes->{multiplying}{$chunk_ci->{-source_alias}}
705 ! $aliastypes->{premultiplied}{$chunk_ci->{-source_alias}}
707 push @group_by, $chunk_ci->{-fq_colname};
708 $group_index{$chunk_ci->{-fq_colname}}++
711 # We need to order by external columns without adding them to the group
712 # (eiehter a non-unique selection, or a multi-external)
714 # This doesn't really make sense in SQL, however from DBICs point
715 # of view is rather valid (e.g. order the leftmost objects by whatever
716 # criteria and get the offset/rows many). There is a way around
717 # this however in SQL - we simply tae the direction of each piece
718 # of the external order and convert them to MIN(X) for ASC or MAX(X)
719 # for DESC, and group_by the root columns. The end result should be
720 # exactly what we expect
723 # both populated on the first loop over $o_idx
724 $sql_maker ||= $self->sql_maker;
726 map { ref $_ eq 'ARRAY' ? $_ : [ $_ ] } $sql_maker->_order_by_chunks($attrs->{order_by})
729 my ($chunk, $is_desc) = $sql_maker->_split_order_chunk($order_chunks->[$o_idx][0]);
731 # we reached that far - wrap any part of the order_by that "responded"
732 # to an ordering alias into a MIN/MAX
733 $new_order_by[$o_idx] = \[
734 sprintf( '%s( %s )%s',
735 $self->_minmax_operator_for_datatype($chunk_ci->{data_type}, $is_desc),
737 ($is_desc ? ' DESC' : ''),
739 @ {$order_chunks->[$o_idx]} [ 1 .. $#{$order_chunks->[$o_idx]} ]
744 $self->throw_exception ( sprintf
745 'Unable to programatically derive a required group_by from the supplied '
746 . 'order_by criteria. To proceed either add an explicit group_by, or '
747 . 'simplify your order_by to only include plain columns '
748 . '(supplied order_by: %s)',
749 join ', ', map { "'$_'" } @$leftovers,
752 # recreate the untouched order parts
754 $new_order_by[$_] ||= \ $order_chunks->[$_] for ( 0 .. $#$order_chunks );
759 (@new_order_by ? \@new_order_by : $attrs->{order_by} ), # same ref as original == unchanged
763 sub _minmax_operator_for_datatype {
764 #my ($self, $datatype, $want_max) = @_;
766 $_[2] ? 'MAX' : 'MIN';
769 sub _resolve_ident_sources {
770 my ($self, $ident) = @_;
772 my $alias2source = {};
774 # the reason this is so contrived is that $ident may be a {from}
775 # structure, specifying multiple tables to join
776 if ( blessed $ident && $ident->isa("DBIx::Class::ResultSource") ) {
777 # this is compat mode for insert/update/delete which do not deal with aliases
778 $alias2source->{me} = $ident;
780 elsif (ref $ident eq 'ARRAY') {
784 if (ref $_ eq 'HASH') {
787 if (ref $_ eq 'ARRAY' and ref $_->[0] eq 'HASH') {
791 $alias2source->{$tabinfo->{-alias}} = $tabinfo->{-rsrc}
792 if ($tabinfo->{-rsrc});
796 return $alias2source;
799 # Takes $ident, \@column_names
801 # returns { $column_name => \%column_info, ... }
802 # also note: this adds -result_source => $rsrc to the column info
804 # If no columns_names are supplied returns info about *all* columns
806 sub _resolve_column_info {
807 my ($self, $ident, $colnames) = @_;
809 return {} if $colnames and ! @$colnames;
811 my $sources = $self->_resolve_ident_sources($ident);
813 $_ = { rsrc => $_, colinfos => $_->columns_info }
814 for values %$sources;
816 my (%seen_cols, @auto_colnames);
818 # compile a global list of column names, to be able to properly
819 # disambiguate unqualified column names (if at all possible)
820 for my $alias (keys %$sources) {
822 ++$seen_cols{$_}{$alias}
826 push @auto_colnames, "$alias.$_"
827 ) for keys %{ $sources->{$alias}{colinfos} };
832 ( grep { keys %{$seen_cols{$_}} == 1 } keys %seen_cols ),
837 my ($colname, $source_alias) = reverse split /\./, $_;
842 # if the column was seen exactly once - we know which rsrc it came from
846 keys %{$seen_cols{$colname}} == 1
848 ( %{$seen_cols{$colname}} )[0]
854 $self->throw_exception(
855 "No such column '$colname' on source " . $sources->{$assumed_alias}{rsrc}->source_name
856 ) unless $seen_cols{$colname}{$assumed_alias};
859 %{ $sources->{$assumed_alias}{colinfos}{$colname} },
860 -result_source => $sources->{$assumed_alias}{rsrc},
861 -source_alias => $assumed_alias,
862 -fq_colname => "$assumed_alias.$colname",
863 -colname => $colname,
866 $return{"$assumed_alias.$colname"} = $return{$_}
867 unless $source_alias;
873 # The DBIC relationship chaining implementation is pretty simple - every
874 # new related_relationship is pushed onto the {from} stack, and the {select}
875 # window simply slides further in. This means that when we count somewhere
876 # in the middle, we got to make sure that everything in the join chain is an
877 # actual inner join, otherwise the count will come back with unpredictable
878 # results (a resultset may be generated with _some_ rows regardless of if
879 # the relation which the $rs currently selects has rows or not). E.g.
880 # $artist_rs->cds->count - normally generates:
881 # SELECT COUNT( * ) FROM artist me LEFT JOIN cd cds ON cds.artist = me.artistid
882 # which actually returns the number of artists * (number of cds || 1)
884 # So what we do here is crawl {from}, determine if the current alias is at
885 # the top of the stack, and if not - make sure the chain is inner-joined down
888 sub _inner_join_to_node {
889 my ($self, $from, $alias) = @_;
891 my $switch_branch = $self->_find_join_path_to_node($from, $alias);
893 return $from unless @{$switch_branch||[]};
895 # So it looks like we will have to switch some stuff around.
896 # local() is useless here as we will be leaving the scope
897 # anyway, and deep cloning is just too fucking expensive
898 # So replace the first hashref in the node arrayref manually
899 my @new_from = ($from->[0]);
900 my $sw_idx = { map { (values %$_), 1 } @$switch_branch }; #there's one k/v per join-path
902 for my $j (@{$from}[1 .. $#$from]) {
903 my $jalias = $j->[0]{-alias};
905 if ($sw_idx->{$jalias}) {
906 my %attrs = %{$j->[0]};
907 delete $attrs{-join_type};
921 sub _find_join_path_to_node {
922 my ($self, $from, $target_alias) = @_;
924 # subqueries and other oddness are naturally not supported
928 ref $from->[0] ne 'HASH'
930 ! defined $from->[0]{-alias}
933 # no path - the head is the alias
934 return [] if $from->[0]{-alias} eq $target_alias;
936 for my $i (1 .. $#$from) {
937 return $from->[$i][0]{-join_path} if ( ($from->[$i][0]{-alias}||'') eq $target_alias );
940 # something else went quite wrong
944 sub _extract_order_criteria {
945 my ($self, $order_by, $sql_maker) = @_;
948 my ($sql_maker, $order_by, $orig_quote_chars) = @_;
950 return scalar $sql_maker->_order_by_chunks ($order_by)
953 my ($lq, $rq, $sep) = map { quotemeta($_) } (
954 ($orig_quote_chars ? @$orig_quote_chars : $sql_maker->_quote_chars),
959 for ($sql_maker->_order_by_chunks ($order_by) ) {
960 my $chunk = ref $_ ? [ @$_ ] : [ $_ ];
961 ($chunk->[0]) = $sql_maker->_split_order_chunk($chunk->[0]);
963 # order criteria may have come back pre-quoted (literals and whatnot)
964 # this is fragile, but the best we can currently do
965 $chunk->[0] =~ s/^ $lq (.+?) $rq $sep $lq (.+?) $rq $/"$1.$2"/xe
966 or $chunk->[0] =~ s/^ $lq (.+) $rq $/$1/x;
968 push @chunks, $chunk;
975 return $parser->($sql_maker, $order_by);
978 $sql_maker = $self->sql_maker;
980 # pass these in to deal with literals coming from
981 # the user or the deep guts of prefetch
982 my $orig_quote_chars = [$sql_maker->_quote_chars];
984 local $sql_maker->{quote_char};
985 return $parser->($sql_maker, $order_by, $orig_quote_chars);
989 sub _order_by_is_stable {
990 my ($self, $ident, $order_by, $where) = @_;
993 ( map { $_->[0] } $self->_extract_order_criteria($order_by) ),
994 ( $where ? keys %{ $self->_extract_fixed_condition_columns($where) } : () ),
997 my $colinfo = $self->_resolve_column_info($ident, \@cols);
999 return keys %$colinfo
1000 ? $self->_columns_comprise_identifying_set( $colinfo, \@cols )
1005 sub _columns_comprise_identifying_set {
1006 my ($self, $colinfo, $columns) = @_;
1009 $cols_per_src -> {$_->{-source_alias}} -> {$_->{-colname}} = $_
1010 for grep { defined $_ } @{$colinfo}{@$columns};
1012 for (values %$cols_per_src) {
1013 my $src = (values %$_)[0]->{-result_source};
1014 return 1 if $src->_identifying_column_set($_);
1020 # this is almost similar to _order_by_is_stable, except it takes
1021 # a single rsrc, and will succeed only if the first portion of the order
1023 # returns that portion as a colinfo hashref on success
1024 sub _extract_colinfo_of_stable_main_source_order_by_portion {
1025 my ($self, $attrs) = @_;
1027 my $nodes = $self->_find_join_path_to_node($attrs->{from}, $attrs->{alias});
1029 return unless defined $nodes;
1033 ( $self->_extract_order_criteria($attrs->{order_by}) )
1035 return unless @ord_cols;
1037 my $valid_aliases = { map { $_ => 1 } (
1038 $attrs->{from}[0]{-alias},
1039 map { values %$_ } @$nodes,
1042 my $colinfos = $self->_resolve_column_info($attrs->{from});
1044 my ($colinfos_to_return, $seen_main_src_cols);
1046 for my $col (@ord_cols) {
1047 # if order criteria is unresolvable - there is nothing we can do
1048 my $colinfo = $colinfos->{$col} or last;
1050 # if we reached the end of the allowed aliases - also nothing we can do
1051 last unless $valid_aliases->{$colinfo->{-source_alias}};
1053 $colinfos_to_return->{$col} = $colinfo;
1055 $seen_main_src_cols->{$colinfo->{-colname}} = 1
1056 if $colinfo->{-source_alias} eq $attrs->{alias};
1059 # FIXME the condition may be singling out things on its own, so we
1060 # conceivable could come back wi "stable-ordered by nothing"
1061 # not confient enough in the parser yet, so punt for the time being
1062 return unless $seen_main_src_cols;
1064 my $main_src_fixed_cols_from_cond = [ $attrs->{where}
1068 ( $colinfos->{$_} and $colinfos->{$_}{-source_alias} eq $attrs->{alias} )
1069 ? $colinfos->{$_}{-colname}
1072 keys %{ $self->_extract_fixed_condition_columns($attrs->{where}) }
1077 return $attrs->{result_source}->_identifying_column_set([
1078 keys %$seen_main_src_cols,
1079 @$main_src_fixed_cols_from_cond,
1080 ]) ? $colinfos_to_return : ();
1083 # Attempts to flatten a passed in SQLA condition as much as possible towards
1084 # a plain hashref, *without* altering its semantics. Required by
1085 # create/populate being able to extract definitive conditions from preexisting
1086 # resultset {where} stacks
1088 # FIXME - while relatively robust, this is still imperfect, one of the first
1089 # things to tackle when we get access to a formalized AST. Note that this code
1090 # is covered by a *ridiculous* amount of tests, so starting with porting this
1091 # code would be a rather good exercise
1092 sub _collapse_cond {
1093 my ($self, $where, $where_is_anded_array) = @_;
1100 elsif ($where_is_anded_array or ref $where eq 'HASH') {
1104 my @pieces = $where_is_anded_array ? @$where : $where;
1106 my $chunk = shift @pieces;
1108 if (ref $chunk eq 'HASH') {
1109 for (sort keys %$chunk) {
1111 # Match SQLA 1.79 behavior
1112 unless( length $_ ) {
1113 is_literal_value($chunk->{$_})
1114 ? carp 'Hash-pairs consisting of an empty string with a literal are deprecated, use -and => [ $literal ] instead'
1115 : $self->throw_exception("Supplying an empty left hand side argument is not supported in hash-pairs")
1119 push @pairs, $_ => $chunk->{$_};
1122 elsif (ref $chunk eq 'ARRAY') {
1123 push @pairs, -or => $chunk
1126 elsif ( ! length ref $chunk) {
1128 # Match SQLA 1.79 behavior
1129 $self->throw_exception("Supplying an empty left hand side argument is not supported in array-pairs")
1130 if $where_is_anded_array and (! defined $chunk or ! length $chunk);
1132 push @pairs, $chunk, shift @pieces;
1135 push @pairs, '', $chunk;
1139 return unless @pairs;
1141 my @conds = $self->_collapse_cond_unroll_pairs(\@pairs)
1144 # Consolidate various @conds back into something more compact
1145 for my $c (@conds) {
1146 if (ref $c ne 'HASH') {
1147 push @{$fin->{-and}}, $c;
1150 for my $col (sort keys %$c) {
1152 # consolidate all -and nodes
1153 if ($col =~ /^\-and$/i) {
1154 push @{$fin->{-and}},
1155 ref $c->{$col} eq 'ARRAY' ? @{$c->{$col}}
1156 : ref $c->{$col} eq 'HASH' ? %{$c->{$col}}
1157 : { $col => $c->{$col} }
1160 elsif ($col =~ /^\-/) {
1161 push @{$fin->{-and}}, { $col => $c->{$col} };
1163 elsif (exists $fin->{$col}) {
1164 $fin->{$col} = [ -and => map {
1165 (ref $_ eq 'ARRAY' and ($_->[0]||'') =~ /^\-and$/i )
1169 } ($fin->{$col}, $c->{$col}) ];
1172 $fin->{$col} = $c->{$col};
1178 elsif (ref $where eq 'ARRAY') {
1179 # we are always at top-level here, it is safe to dump empty *standalone* pieces
1182 for (my $i = 0; $i <= $#$where; $i++ ) {
1184 # Match SQLA 1.79 behavior
1185 $self->throw_exception(
1186 "Supplying an empty left hand side argument is not supported in array-pairs"
1187 ) if (! defined $where->[$i] or ! length $where->[$i]);
1189 my $logic_mod = lc ( ($where->[$i] =~ /^(\-(?:and|or))$/i)[0] || '' );
1193 $self->throw_exception("Unsupported top-level op/arg pair: [ $logic_mod => $where->[$i] ]")
1194 unless ref $where->[$i] eq 'HASH' or ref $where->[$i] eq 'ARRAY';
1196 my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] })
1199 my @keys = keys %$sub_elt;
1200 if ( @keys == 1 and $keys[0] !~ /^\-/ ) {
1201 $fin_idx->{ "COL_$keys[0]_" . serialize $sub_elt } = $sub_elt;
1204 $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt;
1207 elsif (! length ref $where->[$i] ) {
1208 my $sub_elt = $self->_collapse_cond({ @{$where}[$i, $i+1] })
1211 $fin_idx->{ "COL_$where->[$i]_" . serialize $sub_elt } = $sub_elt;
1215 $fin_idx->{ "SER_" . serialize $where->[$i] } = $self->_collapse_cond( $where->[$i] ) || next;
1222 elsif ( keys %$fin_idx == 1 ) {
1223 $fin = (values %$fin_idx)[0];
1228 # at this point everything is at most one level deep - unroll if needed
1229 for (sort keys %$fin_idx) {
1230 if ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) {
1231 my ($l, $r) = %{$fin_idx->{$_}};
1237 ( @$r == 1 and $l =~ /^\-and$/i )
1250 $l =~ /^\-(?:and|or)$/i
1260 push @or, $fin_idx->{$_};
1268 # not a hash not an array
1269 $fin = { -and => [ $where ] };
1272 # unroll single-element -and's
1278 my $and = delete $fin->{-and};
1281 # at this point we have @$and == 1
1283 ref $and->[0] eq 'HASH'
1285 ! grep { exists $fin->{$_} } keys %{$and->[0]}
1292 $fin->{-and} = $and;
1297 # compress same-column conds found in $fin
1298 for my $col ( grep { $_ !~ /^\-/ } keys %$fin ) {
1299 next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') =~ /^\-and$/i;
1300 my $val_bag = { map {
1301 (! defined $_ ) ? ( UNDEF => undef )
1302 : ( ! length ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ )
1303 : ( ( 'SER_' . serialize $_ ) => $_ )
1304 } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] };
1306 if (keys %$val_bag == 1 ) {
1307 ($fin->{$col}) = values %$val_bag;
1310 $fin->{$col} = [ -and => map { $val_bag->{$_} } sort keys %$val_bag ];
1314 return keys %$fin ? $fin : ();
1317 sub _collapse_cond_unroll_pairs {
1318 my ($self, $pairs) = @_;
1323 my ($lhs, $rhs) = splice @$pairs, 0, 2;
1325 if (! length $lhs) {
1326 push @conds, $self->_collapse_cond($rhs);
1328 elsif ( $lhs =~ /^\-and$/i ) {
1329 push @conds, $self->_collapse_cond($rhs, (ref $rhs eq 'ARRAY'));
1331 elsif ( $lhs =~ /^\-or$/i ) {
1332 push @conds, $self->_collapse_cond(
1333 (ref $rhs eq 'HASH') ? [ map { $_ => $rhs->{$_} } sort keys %$rhs ] : $rhs
1337 if (ref $rhs eq 'HASH' and ! keys %$rhs) {
1338 # FIXME - SQLA seems to be doing... nothing...?
1340 # normalize top level -ident, for saner extract_fixed_condition_columns code
1341 elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-ident}) {
1342 push @conds, { $lhs => { '=', $rhs } };
1344 elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-value} and is_plain_value $rhs->{-value}) {
1345 push @conds, { $lhs => $rhs->{-value} };
1347 elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) {
1348 if ( length ref $rhs->{'='} and is_literal_value $rhs->{'='} ) {
1349 push @conds, { $lhs => $rhs };
1352 for my $p ($self->_collapse_cond_unroll_pairs([ $lhs => $rhs->{'='} ])) {
1354 # extra sanity check
1356 local $Data::Dumper::Deepcopy = 1;
1357 $self->throw_exception(
1358 "Internal error: unexpected collapse unroll:"
1359 . dump_value { in => { $lhs => $rhs }, out => $p }
1368 # the unroller recursion may return a '=' prepended value already
1369 ref $r eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}
1374 : { $l => { '=' => $r } }
1379 elsif (ref $rhs eq 'ARRAY') {
1380 # some of these conditionals encounter multi-values - roll them out using
1381 # an unshift, which will cause extra looping in the while{} above
1383 push @conds, { $lhs => [] };
1385 elsif ( ($rhs->[0]||'') =~ /^\-(?:and|or)$/i ) {
1386 $self->throw_exception("Value modifier not followed by any values: $lhs => [ $rhs->[0] ] ")
1389 if( $rhs->[0] =~ /^\-and$/i ) {
1390 unshift @$pairs, map { $lhs => $_ } @{$rhs}[1..$#$rhs];
1392 # if not an AND then it's an OR
1394 unshift @$pairs, $lhs => $rhs->[1];
1397 push @conds, { $lhs => [ @{$rhs}[1..$#$rhs] ] };
1400 elsif (@$rhs == 1) {
1401 unshift @$pairs, $lhs => $rhs->[0];
1404 push @conds, { $lhs => $rhs };
1407 # unroll func + { -value => ... }
1411 ( my ($subop) = keys %$rhs ) == 1
1413 length ref ((values %$rhs)[0])
1415 my $vref = is_plain_value( (values %$rhs)[0] )
1417 push @conds, { $lhs => { $subop => $$vref } }
1420 push @conds, { $lhs => $rhs };
1428 # Analyzes a given condition and attempts to extract all columns
1429 # with a definitive fixed-condition criteria. Returns a hashref
1430 # of k/v pairs suitable to be passed to set_columns(), with a
1431 # MAJOR CAVEAT - multi-value (contradictory) equalities are still
1432 # represented as a reference to the UNRESOVABLE_CONDITION constant
1433 # The reason we do this is that some codepaths only care about the
1434 # codition being stable, as opposed to actually making sense
1436 # The normal mode is used to figure out if a resultset is constrained
1437 # to a column which is part of a unique constraint, which in turn
1438 # allows us to better predict how ordering will behave etc.
1440 # With the optional "consider_nulls" boolean argument, the function
1441 # is instead used to infer inambiguous values from conditions
1442 # (e.g. the inheritance of resultset conditions on new_result)
1444 sub _extract_fixed_condition_columns {
1445 my ($self, $where, $consider_nulls) = @_;
1446 my $where_hash = $self->_collapse_cond($_[1]);
1450 for $c (keys %$where_hash) {
1453 if (!defined ($v = $where_hash->{$c}) ) {
1454 $vals->{UNDEF} = $v if $consider_nulls
1461 if (exists $v->{-value}) {
1462 if (defined $v->{-value}) {
1463 $vals->{"VAL_$v->{-value}"} = $v->{-value}
1465 elsif( $consider_nulls ) {
1466 $vals->{UNDEF} = $v->{-value};
1469 # do not need to check for plain values - _collapse_cond did it for us
1471 length ref $v->{'='}
1474 ( ref $v->{'='} eq 'HASH' and keys %{$v->{'='}} == 1 and exists $v->{'='}{-ident} )
1476 is_literal_value($v->{'='})
1479 $vals->{ 'SER_' . serialize $v->{'='} } = $v->{'='};
1487 $vals->{"VAL_$v"} = $v;
1489 elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') {
1490 for ( @{$v}[1..$#$v] ) {
1491 my $subval = $self->_extract_fixed_condition_columns({ $c => $_ }, 'consider nulls'); # always fish nulls out on recursion
1492 next unless exists $subval->{$c}; # didn't find anything
1494 ! defined $subval->{$c} ? 'UNDEF'
1495 : ( ! length ref $subval->{$c} or is_plain_value $subval->{$c} ) ? "VAL_$subval->{$c}"
1496 : ( 'SER_' . serialize $subval->{$c} )
1501 if (keys %$vals == 1) {
1502 ($res->{$c}) = (values %$vals)
1503 unless !$consider_nulls and exists $vals->{UNDEF};
1505 elsif (keys %$vals > 1) {
1506 $res->{$c} = UNRESOLVABLE_CONDITION;