X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=b3eda3e01a9a7dc7c042d98f9446ea2fc2bdb0a5;hb=bbd9b939803656cf1a2e30d3fbf5314c67210d5b;hp=9678c28646d0406e44ae36acdeff096966bf74bd;hpb=eb58c082cf9c35760d8fc199483d38c1d926b2e4;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 9678c28..b3eda3e 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -10,11 +10,11 @@ use mro 'c3'; use DBIx::Class::Carp; use Scalar::Util qw/refaddr weaken reftype blessed/; use List::Util qw/first/; -use Sub::Name 'subname'; use Context::Preserve 'preserve_context'; use Try::Tiny; -use overload (); use Data::Compare (); # no imports!!! guard against insane architecture +use SQL::Abstract qw(is_plain_value is_literal_value); +use DBIx::Class::_Util qw(quote_sub perlstring); use namespace::clean; # default cursor class, overridable in connect_info attributes @@ -32,7 +32,7 @@ __PACKAGE__->datetime_parser_type('DateTime::Format::MySQL'); # historic default __PACKAGE__->sql_name_sep('.'); __PACKAGE__->mk_group_accessors('simple' => qw/ - _connect_info _dbi_connect_info _dbic_connect_attributes _driver_determined + _connect_info _dbic_connect_attributes _driver_determined _dbh _dbh_details _conn_pid _sql_maker _sql_maker_opts _dbh_autocommit _perform_autoinc_retrieval _autoinc_supplied_for_op /); @@ -79,20 +79,24 @@ __PACKAGE__->_use_join_optimizer (1); sub _determine_supports_join_optimizer { 1 }; # Each of these methods need _determine_driver called before itself -# in order to function reliably. This is a purely DRY optimization +# in order to function reliably. We also need to separate accessors +# from plain old method calls, since an accessor called as a setter +# does *not* need the driver determination loop fired (and in fact +# can produce hard to find bugs, like e.g. losing on_connect_* +# semantics on fresh connections) # -# get_(use)_dbms_capability need to be called on the correct Storage -# class, as _use_X may be hardcoded class-wide, and _supports_X calls -# _determine_supports_X which obv. needs a correct driver as well -my @rdbms_specific_methods = qw/ +# The construct below is simply a parameterized around() +my $storage_accessor_idx = { map { $_ => 1 } qw( sqlt_type - deployment_statements + datetime_parser_type sql_maker cursor_class +)}; +for my $meth (keys %$storage_accessor_idx, qw( + deployment_statements build_datetime_parser - datetime_parser_type txn_begin @@ -110,16 +114,16 @@ my @rdbms_specific_methods = qw/ _server_info _get_server_version -/; - -for my $meth (@rdbms_specific_methods) { +)) { my $orig = __PACKAGE__->can ($meth) or die "$meth is not a ::Storage::DBI method!"; - no strict qw/refs/; - no warnings qw/redefine/; - *{__PACKAGE__ ."::$meth"} = subname $meth => sub { + my $is_getter = $storage_accessor_idx->{$meth} ? 0 : 1; + + quote_sub + __PACKAGE__ ."::$meth", sprintf( <<'EOC', $is_getter, perlstring $meth ), { '$orig' => \$orig }; + if ( # only fire when invoked on an instance, a valid class-based invocation # would e.g. be setting a default for an inherited accessor @@ -129,20 +133,26 @@ for my $meth (@rdbms_specific_methods) { and ! $_[0]->{_in_determine_driver} and - ($_[0]->_dbi_connect_info||[])->[0] + # if this is a known *setter* - just set it, no need to connect + # and determine the driver + ( %1$s or @_ <= 1 ) + and + # Only try to determine stuff if we have *something* that either is or can + # provide a DSN. Allows for bare $schema's generated with a plain ->connect() + # to still be marginally useful + $_[0]->_dbi_connect_info->[0] ) { $_[0]->_determine_driver; - # This for some reason crashes and burns on perl 5.8.1 - # IFF the method ends up throwing an exception - #goto $_[0]->can ($meth); + # work around http://rt.perl.org/rt3/Public/Bug/Display.html?id=35878 + goto $_[0]->can(%2$s) unless DBIx::Class::_ENV_::BROKEN_GOTO; - my $cref = $_[0]->can ($meth); + my $cref = $_[0]->can(%2$s); goto $cref; } goto $orig; - }; +EOC } =head1 NAME @@ -202,6 +212,12 @@ sub new { my %seek_and_destroy; sub _arm_global_destructor { + + # quick "garbage collection" pass - prevents the registry + # from slowly growing with a bunch of undef-valued keys + defined $seek_and_destroy{$_} or delete $seek_and_destroy{$_} + for keys %seek_and_destroy; + weaken ( $seek_and_destroy{ refaddr($_[0]) } = $_[0] ); @@ -608,23 +624,6 @@ sub connect_info { $info = $self->_normalize_connect_info($info) if ref $info eq 'ARRAY'; - for my $storage_opt (keys %{ $info->{storage_options} }) { - my $value = $info->{storage_options}{$storage_opt}; - - $self->$storage_opt($value); - } - - # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only - # the new set of options - $self->_sql_maker(undef); - $self->_sql_maker_opts({}); - - for my $sql_maker_opt (keys %{ $info->{sql_maker_options} }) { - my $value = $info->{sql_maker_options}{$sql_maker_opt}; - - $self->_sql_maker_opts->{$sql_maker_opt} = $value; - } - my %attrs = ( %{ $self->_default_dbi_connect_attributes || {} }, %{ $info->{attributes} || {} }, @@ -643,16 +642,58 @@ sub connect_info { push @args, \%attrs if keys %attrs; } + + # this is the authoritative "always an arrayref" thing fed to DBI->connect + # OR a single-element coderef-based $dbh factory $self->_dbi_connect_info(\@args); + # extract the individual storage options + for my $storage_opt (keys %{ $info->{storage_options} }) { + my $value = $info->{storage_options}{$storage_opt}; + + $self->$storage_opt($value); + } + + # Extract the individual sqlmaker options + # + # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only + # the new set of options + $self->_sql_maker(undef); + $self->_sql_maker_opts({}); + + for my $sql_maker_opt (keys %{ $info->{sql_maker_options} }) { + my $value = $info->{sql_maker_options}{$sql_maker_opt}; + + $self->_sql_maker_opts->{$sql_maker_opt} = $value; + } + # FIXME - dirty: - # save attributes them in a separate accessor so they are always + # save attributes in a separate accessor so they are always # introspectable, even in case of a CODE $dbhmaker $self->_dbic_connect_attributes (\%attrs); return $self->_connect_info; } +sub _dbi_connect_info { + my $self = shift; + + return $self->{_dbi_connect_info} = $_[0] + if @_; + + my $conninfo = $self->{_dbi_connect_info} || []; + + # last ditch effort to grab a DSN + if ( ! defined $conninfo->[0] and $ENV{DBI_DSN} ) { + my @new_conninfo = @$conninfo; + $new_conninfo[0] = $ENV{DBI_DSN}; + $conninfo = \@new_conninfo; + } + + return $conninfo; +} + + sub _normalize_connect_info { my ($self, $info_arg) = @_; my %info; @@ -788,7 +829,7 @@ Example: sub dbh_do { my $self = shift; - my $run_target = shift; + my $run_target = shift; # either a coderef or a method name # short circuit when we know there is no need for a runner # @@ -805,10 +846,15 @@ sub dbh_do { DBIx::Class::Storage::BlockRunner->new( storage => $self, - run_code => sub { $self->$run_target ($self->_get_dbh, @$args ) }, wrap_txn => 0, - retry_handler => sub { ! ( $_[0]->retried_count or $_[0]->storage->connected ) }, - )->run; + retry_handler => sub { + $_[0]->failed_attempt_count == 1 + and + ! $_[0]->storage->connected + }, + )->run(sub { + $self->$run_target ($self->_get_dbh, @$args ) + }); } sub txn_do { @@ -946,8 +992,13 @@ sub _get_dbh { return $self->_dbh; } +# *DELIBERATELY* not a setter (for the time being) +# Too intertwined with everything else for any kind of sanity sub sql_maker { - my ($self) = @_; + my $self = shift; + + $self->throw_exception('sql_maker() is not a setter method') if @_; + unless ($self->_sql_maker) { my $sql_maker_class = $self->sql_maker_class; @@ -1009,11 +1060,11 @@ sub _init {} sub _populate_dbh { my ($self) = @_; - my @info = @{$self->_dbi_connect_info || []}; $self->_dbh(undef); # in case ->connected failed we might get sent here $self->_dbh_details({}); # reset everything we know + $self->_sql_maker(undef); # this may also end up being different - $self->_dbh($self->_connect(@info)); + $self->_dbh($self->_connect); $self->_conn_pid($$) unless DBIx::Class::_ENV_::BROKEN_FORK; # on win32 these are in fact threads @@ -1144,15 +1195,28 @@ sub _describe_connection { require DBI::Const::GetInfoReturn; my $self = shift; - $self->ensure_connected; + + my $drv; + try { + $drv = $self->_extract_driver_from_connect_info; + $self->ensure_connected; + }; + + $drv = "DBD::$drv" if $drv; my $res = { DBIC_DSN => $self->_dbi_connect_info->[0], DBI_VER => DBI->VERSION, DBIC_VER => DBIx::Class->VERSION, DBIC_DRIVER => ref $self, + $drv ? ( + DBD => $drv, + DBD_VER => try { $drv->VERSION }, + ) : (), }; + # try to grab data even if we never managed to connect + # will cover us in cases of an oddly broken half-connect for my $inf ( #keys %DBI::Const::GetInfoType::GetInfoType, qw/ @@ -1213,20 +1277,7 @@ sub _determine_driver { $started_connected = 1; } else { - # if connect_info is a CODEREF, we have no choice but to connect - if (ref $self->_dbi_connect_info->[0] && - reftype $self->_dbi_connect_info->[0] eq 'CODE') { - $self->_populate_dbh; - $driver = $self->_dbh->{Driver}{Name}; - } - else { - # try to use dsn to not require being connected, the driver may still - # force a connection in _rebless to determine version - # (dsn may not be supplied at all if all we do is make a mock-schema) - my $dsn = $self->_dbi_connect_info->[0] || $ENV{DBI_DSN} || ''; - ($driver) = $dsn =~ /dbi:([^:]+):/i; - $driver ||= $ENV{DBI_DRIVER}; - } + $driver = $self->_extract_driver_from_connect_info; } if ($driver) { @@ -1260,7 +1311,7 @@ sub _determine_driver { "Your storage subclass @{[ ref $self ]} provides (or inherits) the method " . 'source_bind_attributes() for which support has been removed as of Jan 2013. ' . 'If you are not sure how to proceed please contact the development team via ' - . 'http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT' + . DBIx::Class::_ENV_::HELP_URL ); } @@ -1271,6 +1322,31 @@ sub _determine_driver { } } +sub _extract_driver_from_connect_info { + my $self = shift; + + my $drv; + + # if connect_info is a CODEREF, we have no choice but to connect + if ( + ref $self->_dbi_connect_info->[0] + and + reftype $self->_dbi_connect_info->[0] eq 'CODE' + ) { + $self->_populate_dbh; + $drv = $self->_dbh->{Driver}{Name}; + } + else { + # try to use dsn to not require being connected, the driver may still + # force a connection later in _rebless to determine version + # (dsn may not be supplied at all if all we do is make a mock-schema) + ($drv) = ($self->_dbi_connect_info->[0] || '') =~ /^dbi:([^:]+):/i; + $drv ||= $ENV{DBI_DRIVER}; + } + + return $drv; +} + sub _determine_connector_driver { my ($self, $conn) = @_; @@ -1377,10 +1453,12 @@ sub _do_query { } sub _connect { - my ($self, @info) = @_; + my $self = shift; + + my $info = $self->_dbi_connect_info; $self->throw_exception("You did not provide any connection_info") - if ( ! defined $info[0] and ! $ENV{DBI_DSN} and ! $ENV{DBI_DRIVER} ); + unless defined $info->[0]; my ($old_connect_via, $dbh); @@ -1410,12 +1488,12 @@ sub _connect { }; try { - if(ref $info[0] eq 'CODE') { - $dbh = $info[0]->(); + if(ref $info->[0] eq 'CODE') { + $dbh = $info->[0]->(); } else { require DBI; - $dbh = DBI->connect(@info); + $dbh = DBI->connect(@$info); } die $DBI::errstr unless $dbh; @@ -1423,8 +1501,8 @@ sub _connect { die sprintf ("%s fresh DBI handle with a *false* 'Active' attribute. " . 'This handle is disconnected as far as DBIC is concerned, and we can ' . 'not continue', - ref $info[0] eq 'CODE' - ? "Connection coderef $info[0] returned a" + ref $info->[0] eq 'CODE' + ? "Connection coderef $info->[0] returned a" : 'DBI->connect($schema->storage->connect_info) resulted in a' ) unless $dbh->FETCH('Active'); @@ -1439,7 +1517,7 @@ sub _connect { # Default via _default_dbi_connect_attributes is 1, hence it was an explicit # request, or an external handle. Complain and set anyway unless ($dbh->{RaiseError}) { - carp( ref $info[0] eq 'CODE' + carp( ref $info->[0] eq 'CODE' ? "The 'RaiseError' of the externally supplied DBI handle is set to false. " ."DBIx::Class will toggle it back to true, unless the 'unsafe' connect " @@ -1460,7 +1538,7 @@ sub _connect { }; $self->_dbh_autocommit($dbh->{AutoCommit}); - $dbh; + return $dbh; } sub txn_begin { @@ -1556,17 +1634,13 @@ sub _exec_txn_rollback { shift->_dbh->rollback; } -# generate some identical methods -for my $meth (qw/svp_begin svp_release svp_rollback/) { - no strict qw/refs/; - *{__PACKAGE__ ."::$meth"} = subname $meth => sub { - my $self = shift; - $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK; - $self->throw_exception("Unable to $meth() on a disconnected storage") - unless $self->_dbh; - $self->next::method(@_); - }; -} +# generate the DBI-specific stubs, which then fallback to ::Storage proper +quote_sub __PACKAGE__ . "::$_" => sprintf (<<'EOS', $_) for qw(svp_begin svp_release svp_rollback); + $_[0]->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK; + $_[0]->throw_exception('Unable to %s() on a disconnected storage') + unless $_[0]->_dbh; + shift->next::method(@_); +EOS # This used to be the top-half of _execute. It was split out to make it # easier to override in NoBindVars without duping the rest. It takes up @@ -1618,13 +1692,10 @@ sub _gen_sql_bind { sub _resolve_bindattrs { my ($self, $ident, $bind, $colinfos) = @_; - $colinfos ||= {}; - my $resolve_bindinfo = sub { #my $infohash = shift; - %$colinfos = %{ $self->_resolve_column_info($ident) } - unless keys %$colinfos; + $colinfos ||= { %{ $self->_resolve_column_info($ident) } }; my $ret; if (my $col = $_[0]->{dbic_colname}) { @@ -1644,10 +1715,16 @@ sub _resolve_bindattrs { my $resolved = ( ref $_ ne 'ARRAY' or @$_ != 2 ) ? [ {}, $_ ] : ( ! defined $_->[0] ) ? [ {}, $_->[1] ] - : (ref $_->[0] eq 'HASH') ? [ (exists $_->[0]{dbd_attrs} or $_->[0]{sqlt_datatype}) - ? $_->[0] - : $resolve_bindinfo->($_->[0]) - , $_->[1] ] + : (ref $_->[0] eq 'HASH') ? [( + ! keys %{$_->[0]} + or + exists $_->[0]{dbd_attrs} + or + $_->[0]{sqlt_datatype} + ) ? $_->[0] + : $resolve_bindinfo->($_->[0]) + , $_->[1] + ] : (ref $_->[0] eq 'SCALAR') ? [ { sqlt_datatype => ${$_->[0]} }, $_->[1] ] : [ $resolve_bindinfo->( { dbic_colname => $_->[0] } @@ -1661,7 +1738,7 @@ sub _resolve_bindattrs { and length ref $resolved->[1] and - ! overload::Method($resolved->[1], '""') + ! is_plain_value $resolved->[1] ) { require Data::Dumper; local $Data::Dumper::Maxdepth = 1; @@ -1815,14 +1892,15 @@ sub _bind_sth_params { ); } else { - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - my $v = ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') ) + # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD + my $v = ( length ref $bind->[$i][1] and is_plain_value $bind->[$i][1] ) ? "$bind->[$i][1]" : $bind->[$i][1] ; + $sth->bind_param( $i + 1, + # The temp-var is CRUCIAL - DO NOT REMOVE IT, breaks older DBD::SQLite RT#79576 $v, $bind_attrs->[$i], ); @@ -1843,9 +1921,7 @@ sub _prefetch_autovalues { ( ! exists $to_insert->{$col} or - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') + is_literal_value($to_insert->{$col}) ) ) { $values{$col} = $self->_sequence_fetch( @@ -1882,11 +1958,9 @@ sub insert { } # nothing to retrieve when explicit values are supplied - next if (defined $to_insert->{$col} and ! ( - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') - )); + next if ( + defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col}) + ); # the 'scalar keys' is a trick to preserve the ->columns declaration order $retrieve_cols{$col} = scalar keys %retrieve_cols if ( @@ -1964,24 +2038,20 @@ sub insert { sub insert_bulk { my ($self, $source, $cols, $data) = @_; - my @col_range = (0..$#$cols); - - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - # - # forcibly stringify whatever is stringifiable + # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD + # For the time being forcibly stringify whatever is stringifiable # ResultSet::populate() hands us a copy - safe to mangle for my $r (0 .. $#$data) { for my $c (0 .. $#{$data->[$r]}) { $data->[$r][$c] = "$data->[$r][$c]" - if ( length ref $data->[$r][$c] and overload::Method($data->[$r][$c], '""') ); + if ( length ref $data->[$r][$c] and is_plain_value $data->[$r][$c] ); } } my $colinfos = $source->columns_info($cols); local $self->{_autoinc_supplied_for_op} = - (first { $_->{is_auto_increment} } values %$colinfos) + (grep { $_->{is_auto_increment} } values %$colinfos) ? 1 : 0 ; @@ -2008,16 +2078,16 @@ sub insert_bulk { # can be later matched up by address), because we want to supply a real # value on which perhaps e.g. datatype checks will be performed my ($proto_data, $value_type_by_col_idx); - for my $i (@col_range) { - my $colname = $cols->[$i]; - if (ref $data->[0][$i] eq 'SCALAR') { + for my $col_idx (0..$#$cols) { + my $colname = $cols->[$col_idx]; + if (ref $data->[0][$col_idx] eq 'SCALAR') { # no bind value at all - no type - $proto_data->{$colname} = $data->[0][$i]; + $proto_data->{$colname} = $data->[0][$col_idx]; } - elsif (ref $data->[0][$i] eq 'REF' and ref ${$data->[0][$i]} eq 'ARRAY' ) { + elsif (ref $data->[0][$col_idx] eq 'REF' and ref ${$data->[0][$col_idx]} eq 'ARRAY' ) { # repack, so we don't end up mangling the original \[] - my ($sql, @bind) = @${$data->[0][$i]}; + my ($sql, @bind) = @${$data->[0][$col_idx]}; # normalization of user supplied stuff my $resolved_bind = $self->_resolve_bindattrs( @@ -2026,23 +2096,23 @@ sub insert_bulk { # store value-less (attrs only) bind info - we will be comparing all # supplied binds against this for sanity - $value_type_by_col_idx->{$i} = [ map { $_->[0] } @$resolved_bind ]; + $value_type_by_col_idx->{$col_idx} = [ map { $_->[0] } @$resolved_bind ]; $proto_data->{$colname} = \[ $sql, map { [ # inject slice order to use for $proto_bind construction - { %{$resolved_bind->[$_][0]}, _bind_data_slice_idx => $i, _literal_bind_subindex => $_+1 } + { %{$resolved_bind->[$_][0]}, _bind_data_slice_idx => $col_idx, _literal_bind_subindex => $_+1 } => $resolved_bind->[$_][1] ] } (0 .. $#bind) ]; } else { - $value_type_by_col_idx->{$i} = undef; + $value_type_by_col_idx->{$col_idx} = undef; $proto_data->{$colname} = \[ '?', [ - { dbic_colname => $colname, _bind_data_slice_idx => $i } + { dbic_colname => $colname, _bind_data_slice_idx => $col_idx } => - $data->[0][$i] + $data->[0][$col_idx] ] ]; } } @@ -2075,13 +2145,13 @@ sub insert_bulk { Data::Dumper::Concise::Dumper ({ map { $cols->[$_] => $data->[$r_idx][$_] - } @col_range + } 0..$#$cols }), } ); }; - for my $col_idx (@col_range) { + for my $col_idx (0..$#$cols) { my $reference_val = $data->[0][$col_idx]; for my $row_idx (1..$#$data) { # we are comparing against what we got from [0] above, hence start from 1 @@ -2104,7 +2174,7 @@ sub insert_bulk { } } elsif (! defined $value_type_by_col_idx->{$col_idx} ) { # regular non-literal value - if (ref $val eq 'SCALAR' or (ref $val eq 'REF' and ref $$val eq 'ARRAY') ) { + if (is_literal_value($val)) { $bad_slice_report_cref->("Literal SQL found where a plain bind value is expected", $row_idx, $col_idx); } } @@ -2185,16 +2255,13 @@ sub insert_bulk { sub _dbh_execute_for_fetch { my ($self, $source, $sth, $proto_bind, $cols, $data) = @_; - my @idx_range = ( 0 .. $#$proto_bind ); - # If we have any bind attributes to take care of, we will bind the # proto-bind data (which will never be used by execute_for_fetch) # However since column bindtypes are "sticky", this is sufficient # to get the DBD to apply the bindtype to all values later on - my $bind_attrs = $self->_dbi_attrs_for_bind($source, $proto_bind); - for my $i (@idx_range) { + for my $i (0 .. $#$proto_bind) { $sth->bind_param ( $i+1, # DBI bind indexes are 1-based $proto_bind->[$i][1], @@ -2214,12 +2281,21 @@ sub _dbh_execute_for_fetch { my $fetch_tuple = sub { return undef if ++$fetch_row_idx > $#$data; - return [ map { defined $_->{_literal_bind_subindex} - ? ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]} - ->[ $_->{_literal_bind_subindex} ] - ->[1] - : $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] - } map { $_->[0] } @$proto_bind]; + return [ map { + ! defined $_->{_literal_bind_subindex} + + ? $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] + + # There are no attributes to resolve here - we already did everything + # when we constructed proto_bind. However we still want to sanity-check + # what the user supplied, so pass stuff through to the resolver *anyway* + : $self->_resolve_bindattrs ( + undef, # a fake rsrc + [ ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]}->[ $_->{_literal_bind_subindex} ] ], + {}, # a fake column_info bag + )->[0][1] + + } map { $_->[0] } @$proto_bind ]; }; my $tuple_status = []; @@ -2342,24 +2418,16 @@ sub _select_args { # soooooo much better now. But that is also another # battle... #return ( - # 'select', @{$orig_attrs->{_sqlmaker_select_args}} - #) if $orig_attrs->{_sqlmaker_select_args}; + # 'select', $orig_attrs->{!args_as_stored_at_the_end_of_this_method!} + #) if $orig_attrs->{!args_as_stored_at_the_end_of_this_method!}; my $sql_maker = $self->sql_maker; - my $alias2source = $self->_resolve_ident_sources ($ident); my $attrs = { %$orig_attrs, select => $select, from => $ident, where => $where, - - # limit dialects use this stuff - # yes, some CDBICompat crap does not supply an {alias} >.< - ( $orig_attrs->{alias} and $alias2source->{$orig_attrs->{alias}} ) - ? ( _rsroot_rsrc => $alias2source->{$orig_attrs->{alias}} ) - : () - , }; # Sanity check the attributes (SQLMaker does it too, but @@ -2396,7 +2464,7 @@ sub _select_args { # are happy (this includes MySQL in strict_mode) # If any of the other joined tables are referenced in the group_by # however - the user is on their own - ( $prefetch_needs_subquery or $attrs->{_related_results_construction} ) + ( $prefetch_needs_subquery or ! $attrs->{_simple_passthrough_construction} ) and $attrs->{group_by} and @@ -2439,7 +2507,19 @@ sub _select_args { ($attrs->{from}, $attrs->{_aliastypes}) = $self->_prune_unused_joins ($attrs); } + # FIXME this is a gross, inefficient, largely incorrect and fragile hack + # during the result inflation stage we *need* to know what was the aliastype + # map as sqla saw it when the final pieces of SQL were being assembled + # Originally we simply carried around the entirety of $attrs, but this + # resulted in resultsets that are being reused growing continuously, as + # the hash in question grew deeper and deeper. + # Instead hand-pick what to take with us here (we actually don't need much + # at this point just the map itself) + $orig_attrs->{_last_sqlmaker_alias_map} = $attrs->{_aliastypes}; + ### + # my $alias2source = $self->_resolve_ident_sources ($ident); + # # This would be the point to deflate anything found in $attrs->{where} # (and leave $attrs->{bind} intact). Problem is - inflators historically # expect a result object. And all we have is a resultsource (it is trivial @@ -2449,9 +2529,7 @@ sub _select_args { # invoked, and that's just bad... ### - return ( 'select', @{ $orig_attrs->{_sqlmaker_select_args} = [ - @{$attrs}{qw(from select where)}, $attrs, @limit_args - ]} ); + return ( 'select', @{$attrs}{qw(from select where)}, $attrs, @limit_args ); } # Returns a counting SELECT for a simple count @@ -2789,6 +2867,7 @@ sub create_ddl_dir { add_drop_table => 1, ignore_constraint_names => 1, ignore_index_names => 1, + quote_identifiers => $self->sql_maker->_quoting_enabled, %{$sqltargs || {}} }; @@ -2883,10 +2962,21 @@ sub create_ddl_dir { unless $dest_schema->name; } - my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db, - $dest_schema, $db, - $sqltargs - ); + my $diff = do { + # FIXME - this is a terrible workaround for + # https://github.com/dbsrgits/sql-translator/commit/2d23c1e + # Fixing it in this sloppy manner so that we don't hve to + # lockstep an SQLT release as well. Needs to be removed at + # some point, and SQLT dep bumped + local $SQL::Translator::Producer::SQLite::NO_QUOTES + if $SQL::Translator::Producer::SQLite::NO_QUOTES; + + SQL::Translator::Diff::schema_diff($source_schema, $db, + $dest_schema, $db, + $sqltargs + ); + }; + if(!open $file, ">$difffile") { $self->throw_exception("Can't write to $difffile ($!)"); next; @@ -2946,6 +3036,9 @@ sub deployment_statements { $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources} if exists $sqltargs->{sources}; + $sqltargs->{quote_identifiers} = $self->sql_maker->_quoting_enabled + unless exists $sqltargs->{quote_identifiers}; + my $tr = SQL::Translator->new( producer => "SQL::Translator::Producer::${type}", %$sqltargs,