X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=a213c9588ca03d10aa7cdc6f13335018bab55d3d;hb=3705e3b2801ea6a8f770b6f0c528b119bea92fe9;hp=23a7f71e30002658cd4496e8bb34fa1e9d363fbf;hpb=7d534e689b7f9820dda4272bf6702fc3e9e86f0d;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 23a7f71..a213c95 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -13,8 +13,8 @@ use List::Util qw/first/; use Sub::Name 'subname'; use Context::Preserve 'preserve_context'; use Try::Tiny; -use overload (); use Data::Compare (); # no imports!!! guard against insane architecture +use DBIx::Class::_Util qw(is_plain_value is_literal_value); use namespace::clean; # default cursor class, overridable in connect_info attributes @@ -79,20 +79,24 @@ __PACKAGE__->_use_join_optimizer (1); sub _determine_supports_join_optimizer { 1 }; # Each of these methods need _determine_driver called before itself -# in order to function reliably. This is a purely DRY optimization +# in order to function reliably. We also need to separate accessors +# from plain old method calls, since an accessor called as a setter +# does *not* need the driver determination loop fired (and in fact +# can produce hard to find bugs, like e.g. losing on_connect_* +# semantics on fresh connections) # -# get_(use)_dbms_capability need to be called on the correct Storage -# class, as _use_X may be hardcoded class-wide, and _supports_X calls -# _determine_supports_X which obv. needs a correct driver as well -my @rdbms_specific_methods = qw/ +# The construct below is simply a parameterized around() +my $storage_accessor_idx = { map { $_ => 1 } qw( sqlt_type - deployment_statements + datetime_parser_type sql_maker cursor_class +)}; +for my $meth (keys %$storage_accessor_idx, qw( + deployment_statements build_datetime_parser - datetime_parser_type txn_begin @@ -110,15 +114,13 @@ my @rdbms_specific_methods = qw/ _server_info _get_server_version -/; - -for my $meth (@rdbms_specific_methods) { +)) { my $orig = __PACKAGE__->can ($meth) or die "$meth is not a ::Storage::DBI method!"; - no strict qw/refs/; - no warnings qw/redefine/; + no strict 'refs'; + no warnings 'redefine'; *{__PACKAGE__ ."::$meth"} = subname $meth => sub { if ( # only fire when invoked on an instance, a valid class-based invocation @@ -129,6 +131,10 @@ for my $meth (@rdbms_specific_methods) { and ! $_[0]->{_in_determine_driver} and + # if this is a known *setter* - just set it, no need to connect + # and determine the driver + ! ( $storage_accessor_idx->{$meth} and @_ > 1 ) + and # Only try to determine stuff if we have *something* that either is or can # provide a DSN. Allows for bare $schema's generated with a plain ->connect() # to still be marginally useful @@ -985,8 +991,13 @@ sub _get_dbh { return $self->_dbh; } +# *DELIBERATELY* not a setter (for the time being) +# Too intertwined with everything else for any kind of sanity sub sql_maker { - my ($self) = @_; + my $self = shift; + + $self->throw_exception('sql_maker() is not a setter method') if @_; + unless ($self->_sql_maker) { my $sql_maker_class = $self->sql_maker_class; @@ -1050,6 +1061,7 @@ sub _populate_dbh { $self->_dbh(undef); # in case ->connected failed we might get sent here $self->_dbh_details({}); # reset everything we know + $self->_sql_maker(undef); # this may also end up being different $self->_dbh($self->_connect); @@ -1683,13 +1695,10 @@ sub _gen_sql_bind { sub _resolve_bindattrs { my ($self, $ident, $bind, $colinfos) = @_; - $colinfos ||= {}; - my $resolve_bindinfo = sub { #my $infohash = shift; - %$colinfos = %{ $self->_resolve_column_info($ident) } - unless keys %$colinfos; + $colinfos ||= { %{ $self->_resolve_column_info($ident) } }; my $ret; if (my $col = $_[0]->{dbic_colname}) { @@ -1709,10 +1718,16 @@ sub _resolve_bindattrs { my $resolved = ( ref $_ ne 'ARRAY' or @$_ != 2 ) ? [ {}, $_ ] : ( ! defined $_->[0] ) ? [ {}, $_->[1] ] - : (ref $_->[0] eq 'HASH') ? [ (exists $_->[0]{dbd_attrs} or $_->[0]{sqlt_datatype}) - ? $_->[0] - : $resolve_bindinfo->($_->[0]) - , $_->[1] ] + : (ref $_->[0] eq 'HASH') ? [( + ! keys %{$_->[0]} + or + exists $_->[0]{dbd_attrs} + or + $_->[0]{sqlt_datatype} + ) ? $_->[0] + : $resolve_bindinfo->($_->[0]) + , $_->[1] + ] : (ref $_->[0] eq 'SCALAR') ? [ { sqlt_datatype => ${$_->[0]} }, $_->[1] ] : [ $resolve_bindinfo->( { dbic_colname => $_->[0] } @@ -1726,7 +1741,7 @@ sub _resolve_bindattrs { and length ref $resolved->[1] and - ! overload::Method($resolved->[1], '""') + ! is_plain_value $resolved->[1] ) { require Data::Dumper; local $Data::Dumper::Maxdepth = 1; @@ -1880,15 +1895,9 @@ sub _bind_sth_params { ); } else { - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - my $v = ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') ) - ? "$bind->[$i][1]" - : $bind->[$i][1] - ; $sth->bind_param( $i + 1, - $v, + $bind->[$i][1], $bind_attrs->[$i], ); } @@ -1908,9 +1917,7 @@ sub _prefetch_autovalues { ( ! exists $to_insert->{$col} or - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') + is_literal_value($to_insert->{$col}) ) ) { $values{$col} = $self->_sequence_fetch( @@ -1947,11 +1954,9 @@ sub insert { } # nothing to retrieve when explicit values are supplied - next if (defined $to_insert->{$col} and ! ( - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') - )); + next if ( + defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col}) + ); # the 'scalar keys' is a trick to preserve the ->columns declaration order $retrieve_cols{$col} = scalar keys %retrieve_cols if ( @@ -2031,18 +2036,6 @@ sub insert_bulk { my @col_range = (0..$#$cols); - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - # - # forcibly stringify whatever is stringifiable - # ResultSet::populate() hands us a copy - safe to mangle - for my $r (0 .. $#$data) { - for my $c (0 .. $#{$data->[$r]}) { - $data->[$r][$c] = "$data->[$r][$c]" - if ( length ref $data->[$r][$c] and overload::Method($data->[$r][$c], '""') ); - } - } - my $colinfos = $source->columns_info($cols); local $self->{_autoinc_supplied_for_op} = @@ -2169,7 +2162,7 @@ sub insert_bulk { } } elsif (! defined $value_type_by_col_idx->{$col_idx} ) { # regular non-literal value - if (ref $val eq 'SCALAR' or (ref $val eq 'REF' and ref $$val eq 'ARRAY') ) { + if (is_literal_value($val)) { $bad_slice_report_cref->("Literal SQL found where a plain bind value is expected", $row_idx, $col_idx); } } @@ -2279,12 +2272,21 @@ sub _dbh_execute_for_fetch { my $fetch_tuple = sub { return undef if ++$fetch_row_idx > $#$data; - return [ map { defined $_->{_literal_bind_subindex} - ? ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]} - ->[ $_->{_literal_bind_subindex} ] - ->[1] - : $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] - } map { $_->[0] } @$proto_bind]; + return [ map { + ! defined $_->{_literal_bind_subindex} + + ? $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] + + # There are no attributes to resolve here - we already did everything + # when we constructed proto_bind. However we still want to sanity-check + # what the user supplied, so pass stuff through to the resolver *anyway* + : $self->_resolve_bindattrs ( + undef, # a fake rsrc + [ ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]}->[ $_->{_literal_bind_subindex} ] ], + {}, # a fake column_info bag + )->[0][1] + + } map { $_->[0] } @$proto_bind ]; }; my $tuple_status = [];