X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBIHacks.pm;h=7da10cc4da2502b9088af8326429bfbf09b8176b;hb=229401a04e99e27e256fdcd24b3c34087c9c2bc1;hp=4c07f5e28e3f9a8ff52da691990e4e6eab5653ba;hpb=c1f3f2e8a79c6a4081c9949fa30da09d18d64d3b;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 4c07f5e..7da10cc 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -17,6 +17,7 @@ use List::Util 'first'; use Scalar::Util 'blessed'; use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize); use SQL::Abstract qw(is_plain_value is_literal_value); +use DBIx::Class::Carp; use namespace::clean; # @@ -416,7 +417,7 @@ sub _resolve_aliastypes_from_select_args { # generate sql chunks my $to_scan = { restricting => [ - $sql_maker->_recurse_where ($attrs->{where}), + ($sql_maker->_recurse_where ($attrs->{where}))[0], $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} }), ], grouping => [ @@ -429,111 +430,115 @@ sub _resolve_aliastypes_from_select_args { ), ], selecting => [ - map { ($sql_maker->_recurse_fields($_))[0] } @{$attrs->{select}}, + # kill all selectors which look like a proper subquery + # this is a sucky heuristic *BUT* - if we get it wrong the query will simply + # fail to run, so we are relatively safe + grep + { $_ !~ / \A \s* \( \s* SELECT \s+ .+? \s+ FROM \s+ .+? \) \s* \z /xsi } + map + { ($sql_maker->_recurse_fields($_))[0] } + @{$attrs->{select}} ], - ordering => [ - map { $_->[0] } $self->_extract_order_criteria ($attrs->{order_by}, $sql_maker), + ordering => [ map + { + ( my $sql = (ref $_ ? $_->[0] : $_) ) =~ s/ \s+ (?: ASC | DESC ) \s* \z //xi; + $sql; + } + $sql_maker->_order_by_chunks( $attrs->{order_by} ), ], }; - # throw away empty chunks and all 2-value arrayrefs: the thinking is that these are - # bind value specs left in by the sloppy renderer above. It is ok to do this - # at this point, since we are going to end up rewriting this crap anyway - for my $v (values %$to_scan) { - my @nv; - for (@$v) { - next if ( - ! defined $_ - or - ( - ref $_ eq 'ARRAY' - and - ( @$_ == 0 or @$_ == 2 ) - ) - ); + # throw away empty-string chunks, and make sure no binds snuck in + # note that we operate over @{$to_scan->{$type}}, hence the + # semi-mindbending ... map ... for values ... + ( $_ = [ map { - if (ref $_) { - require Data::Dumper::Concise; - $self->throw_exception("Unexpected ref in scan-plan: " . Data::Dumper::Concise::Dumper($v) ); - } + (not $_) ? () + : (length ref $_) ? (require Data::Dumper::Concise && $self->throw_exception( + "Unexpected ref in scan-plan: " . Data::Dumper::Concise::Dumper($_) + )) + : $_ - push @nv, $_; - } + } @$_ ] ) for values %$to_scan; - $v = \@nv; - } + # throw away empty to-scan's + ( + @{$to_scan->{$_}} + or + delete $to_scan->{$_} + ) for keys %$to_scan; - # kill all selectors which look like a proper subquery - # this is a sucky heuristic *BUT* - if we get it wrong the query will simply - # fail to run, so we are relatively safe - $to_scan->{selecting} = [ grep { - $_ !~ / \A \s* \( \s* SELECT \s+ .+? \s+ FROM \s+ .+? \) \s* \z /xsi - } @{ $to_scan->{selecting} || [] } ]; - # first see if we have any exact matches (qualified or unqualified) + # the actual scan, per type for my $type (keys %$to_scan) { + + # first see if we have any exact matches (qualified or unqualified) for my $piece (@{$to_scan->{$type}}) { if ($colinfo->{$piece} and my $alias = $colinfo->{$piece}{-source_alias}) { $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] }; $aliases_by_type->{$type}{$alias}{-seen_columns}{$colinfo->{$piece}{-fq_colname}} = $piece; } } - } - # now loop through all fully qualified columns and get the corresponding - # alias (should work even if they are in scalarrefs) - for my $alias (keys %$alias_list) { - my $al_re = qr/ - $lquote $alias $rquote $sep (?: $lquote ([^$rquote]+) $rquote )? - | - \b $alias \. ([^\s\)\($rquote]+)? - /x; - - for my $type (keys %$to_scan) { - for my $piece (@{$to_scan->{$type}}) { - if (my @matches = $piece =~ /$al_re/g) { - $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] }; - $aliases_by_type->{$type}{$alias}{-seen_columns}{"$alias.$_"} = "$alias.$_" - for grep { defined $_ } @matches; - } + # we will be bulk-scanning anyway - pieces will not matter in that case + # (unlike in the direct-equivalence above) + my $scan_string = join ' ', @{$to_scan->{$type}}; + + # now loop through all fully qualified columns and get the corresponding + # alias (should work even if they are in scalarrefs) + for my $alias (keys %$alias_list) { + my $al_re = qr/ + $lquote $alias $rquote $sep (?: $lquote ([^$rquote]+) $rquote )? + | + \b $alias \. ([^\s\)\($rquote]+)? + /x; + + if (my @matches = $scan_string =~ /$al_re/g) { + $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] }; + $aliases_by_type->{$type}{$alias}{-seen_columns}{"$alias.$_"} = "$alias.$_" + for grep { defined $_ } @matches; } } - } - # now loop through unqualified column names, and try to locate them within - # the chunks - for my $col (keys %$colinfo) { - next if $col =~ / \. /x; # if column is qualified it was caught by the above + # now loop through unqualified column names, and try to locate them within + # the chunks + for my $col (keys %$colinfo) { + next if $col =~ / \. /x; # if column is qualified it was caught by the above - my $col_re = qr/ $lquote ($col) $rquote /x; + my $col_re = qr/ $lquote ($col) $rquote /x; - for my $type (keys %$to_scan) { - for my $piece (@{$to_scan->{$type}}) { - if ( my @matches = $piece =~ /$col_re/g) { - my $alias = $colinfo->{$col}{-source_alias}; - $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] }; - $aliases_by_type->{$type}{$alias}{-seen_columns}{"$alias.$_"} = $_ - for grep { defined $_ } @matches; - } + if ( my @matches = $scan_string =~ /$col_re/g) { + my $alias = $colinfo->{$col}{-source_alias}; + $aliases_by_type->{$type}{$alias} ||= { -parents => $alias_list->{$alias}{-join_path}||[] }; + $aliases_by_type->{$type}{$alias}{-seen_columns}{"$alias.$_"} = $_ + for grep { defined $_ } @matches; } } } # Add any non-left joins to the restriction list (such joins are indeed restrictions) - for my $j (values %$alias_list) { - my $alias = $j->{-alias} or next; - $aliases_by_type->{restricting}{$alias} ||= { -parents => $j->{-join_path}||[] } if ( - (not $j->{-join_type}) + ( + $_->{-alias} + and + ! $aliases_by_type->{restricting}{ $_->{-alias} } + and + ( + not $_->{-join_type} or - ($j->{-join_type} !~ /^left (?: \s+ outer)? $/xi) - ); - } + $_->{-join_type} !~ /^left (?: \s+ outer)? $/xi + ) + and + $aliases_by_type->{restricting}{ $_->{-alias} } = { -parents => $_->{-join_path}||[] } + ) for values %$alias_list; - for (keys %$aliases_by_type) { - delete $aliases_by_type->{$_} unless keys %{$aliases_by_type->{$_}}; - } + # final cleanup + ( + keys %{$aliases_by_type->{$_}} + or + delete $aliases_by_type->{$_} + ) for keys %$aliases_by_type; - return $aliases_by_type; + $aliases_by_type; } # This is the engine behind { distinct => 1 } and the general @@ -714,53 +719,63 @@ sub _resolve_column_info { return {} if $colnames and ! @$colnames; - my $alias2src = $self->_resolve_ident_sources($ident); + my $sources = $self->_resolve_ident_sources($ident); + + $_ = { rsrc => $_, colinfos => $_->columns_info } + for values %$sources; my (%seen_cols, @auto_colnames); # compile a global list of column names, to be able to properly # disambiguate unqualified column names (if at all possible) - for my $alias (keys %$alias2src) { - my $rsrc = $alias2src->{$alias}; - for my $colname ($rsrc->columns) { - push @{$seen_cols{$colname}}, $alias; - push @auto_colnames, "$alias.$colname" unless $colnames; - } + for my $alias (keys %$sources) { + ( + ++$seen_cols{$_}{$alias} + and + ! $colnames + and + push @auto_colnames, "$alias.$_" + ) for keys %{ $sources->{$alias}{colinfos} }; } $colnames ||= [ @auto_colnames, - grep { @{$seen_cols{$_}} == 1 } (keys %seen_cols), + ( grep { keys %{$seen_cols{$_}} == 1 } keys %seen_cols ), ]; - my (%return, $colinfos); - foreach my $col (@$colnames) { - my ($source_alias, $colname) = $col =~ m/^ (?: ([^\.]+) \. )? (.+) $/x; - - # if the column was seen exactly once - we know which rsrc it came from - $source_alias ||= $seen_cols{$colname}[0] - if ($seen_cols{$colname} and @{$seen_cols{$colname}} == 1); + my %return; + for (@$colnames) { + my ($colname, $source_alias) = reverse split /\./, $_; - next unless $source_alias; + my $assumed_alias = + $source_alias + || + # if the column was seen exactly once - we know which rsrc it came from + ( + $seen_cols{$colname} + and + keys %{$seen_cols{$colname}} == 1 + and + ( %{$seen_cols{$colname}} )[0] + ) + || + next + ; - my $rsrc = $alias2src->{$source_alias} - or next; + $self->throw_exception( + "No such column '$colname' on source " . $sources->{$assumed_alias}{rsrc}->source_name + ) unless $seen_cols{$colname}{$assumed_alias}; - $return{$col} = { - %{ - ( $colinfos->{$source_alias} ||= $rsrc->columns_info )->{$colname} - || - $self->throw_exception( - "No such column '$colname' on source " . $rsrc->source_name - ); - }, - -result_source => $rsrc, - -source_alias => $source_alias, - -fq_colname => $col eq $colname ? "$source_alias.$col" : $col, + $return{$_} = { + %{ $sources->{$assumed_alias}{colinfos}{$colname} }, + -result_source => $sources->{$assumed_alias}{rsrc}, + -source_alias => $assumed_alias, + -fq_colname => "$assumed_alias.$colname", -colname => $colname, }; - $return{"$source_alias.$colname"} = $return{$col} if $col eq $colname; + $return{"$assumed_alias.$colname"} = $return{$_} + unless $source_alias; } return \%return; @@ -1000,13 +1015,29 @@ sub _collapse_cond { my $chunk = shift @pieces; if (ref $chunk eq 'HASH') { - push @pairs, map { $_ => $chunk->{$_} } sort keys %$chunk; + for (sort keys %$chunk) { + + # Match SQLA 1.79 behavior + if ($_ eq '') { + is_literal_value($chunk->{$_}) + ? carp 'Hash-pairs consisting of an empty string with a literal are deprecated, use -and => [ $literal ] instead' + : $self->throw_exception("Supplying an empty left hand side argument is not supported in hash-pairs") + ; + } + + push @pairs, $_ => $chunk->{$_}; + } } elsif (ref $chunk eq 'ARRAY') { push @pairs, -or => $chunk if @$chunk; } elsif ( ! length ref $chunk) { + + # Match SQLA 1.79 behavior + $self->throw_exception("Supplying an empty left hand side argument is not supported in array-pairs") + if $where_is_anded_array and (! defined $chunk or $chunk eq ''); + push @pairs, $chunk, shift @pieces; } else { @@ -1059,6 +1090,11 @@ sub _collapse_cond { for (my $i = 0; $i <= $#$where; $i++ ) { + # Match SQLA 1.79 behavior + $self->throw_exception( + "Supplying an empty left hand side argument is not supported in array-pairs" + ) if (! defined $where->[$i] or ! length $where->[$i]); + my $logic_mod = lc ( ($where->[$i] =~ /^(\-(?:and|or))$/i)[0] || '' ); if ($logic_mod) { @@ -1069,7 +1105,13 @@ sub _collapse_cond { my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] }) or next; - $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt; + my @keys = keys %$sub_elt; + if ( @keys == 1 and $keys[0] !~ /^\-/ ) { + $fin_idx->{ "COL_$keys[0]_" . serialize $sub_elt } = $sub_elt; + } + else { + $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt; + } } elsif (! length ref $where->[$i] ) { my $sub_elt = $self->_collapse_cond({ @{$where}[$i, $i+1] }) @@ -1083,20 +1125,57 @@ sub _collapse_cond { } } - return unless $fin_idx; + if (! $fin_idx) { + return; + } + elsif ( keys %$fin_idx == 1 ) { + $fin = (values %$fin_idx)[0]; + } + else { + my @or; + + # at this point everything is at most one level deep - unroll if needed + for (sort keys %$fin_idx) { + if ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) { + my ($l, $r) = %{$fin_idx->{$_}}; + + if ( + ref $r eq 'ARRAY' + and + ( + ( @$r == 1 and $l =~ /^\-and$/i ) + or + $l =~ /^\-or$/i + ) + ) { + push @or, @$r + } - $fin = ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { - -or => [ map { - # unroll single-element hashes - ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) - ? %{$fin_idx->{$_}} - : $fin_idx->{$_} - } sort keys %$fin_idx ] - }; + elsif ( + ref $r eq 'HASH' + and + keys %$r == 1 + and + $l =~ /^\-(?:and|or)$/i + ) { + push @or, %$r; + } + + else { + push @or, $l, $r; + } + } + else { + push @or, $fin_idx->{$_}; + } + } + + $fin->{-or} = \@or; + } } else { # not a hash not an array - $fin = { '' => $where }; + $fin = { -and => [ $where ] }; } # unroll single-element -and's @@ -1118,14 +1197,18 @@ sub _collapse_cond { %$fin, %{$and->[0]} }; } + else { + $fin->{-and} = $and; + last; + } } # compress same-column conds found in $fin for my $col ( grep { $_ !~ /^\-/ } keys %$fin ) { next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') =~ /^\-and$/i; my $val_bag = { map { - (! defined $_ ) ? ( UNDEF => undef ) - : ( ! ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) + (! defined $_ ) ? ( UNDEF => undef ) + : ( ! length ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) : ( ( 'SER_' . serialize $_ ) => $_ ) } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] }; @@ -1163,6 +1246,7 @@ sub _collapse_cond_unroll_pairs { if (ref $rhs eq 'HASH' and ! keys %$rhs) { # FIXME - SQLA seems to be doing... nothing...? } + # normalize top level -ident, for saner extract_fixed_condition_columns code elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-ident}) { push @conds, { $lhs => { '=', $rhs } }; } @@ -1170,7 +1254,7 @@ sub _collapse_cond_unroll_pairs { push @conds, { $lhs => $rhs->{-value} }; } elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) { - if( is_literal_value $rhs->{'='}) { + if ( length ref $rhs->{'='} and is_literal_value $rhs->{'='} ) { push @conds, { $lhs => $rhs }; } else { @@ -1188,7 +1272,14 @@ sub _collapse_cond_unroll_pairs { my ($l, $r) = %$p; - push @conds, ( ! length ref $r or is_plain_value($r) ) + push @conds, ( + ! length ref $r + or + # the unroller recursion may return a '=' prepended value already + ref $r eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='} + or + is_plain_value($r) + ) ? { $l => $r } : { $l => { '=' => $r } } ; @@ -1233,7 +1324,7 @@ sub _collapse_cond_unroll_pairs { and my $vref = is_plain_value( (values %$rhs)[0] ) ) { - push @conds, { $lhs => { $subop => @$vref } } + push @conds, { $lhs => { $subop => $$vref } } } else { push @conds, { $lhs => $rhs }; @@ -1286,7 +1377,15 @@ sub _extract_fixed_condition_columns { } } # do not need to check for plain values - _collapse_cond did it for us - elsif(length ref $v->{'='} and is_literal_value($v->{'='}) ) { + elsif( + length ref $v->{'='} + and + ( + ( ref $v->{'='} eq 'HASH' and keys %{$v->{'='}} == 1 and exists $v->{'='}{-ident} ) + or + is_literal_value($v->{'='}) + ) + ) { $vals->{ 'SER_' . serialize $v->{'='} } = $v->{'='}; } }