X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=99a895e8e92527ac2d2babde5336a08ac202718a;hb=c6ec79000b160e7491d9ab9d95d6e69c473b0862;hp=d390dc6858158b7745ac03a86a7457aac0b69608;hpb=86cdddbe2781f77d81d27cdd83910543c313a8f0;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index d390dc6..99a895e 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -9,14 +9,13 @@ use mro 'c3'; use DBIx::Class::Carp; use Scalar::Util qw/refaddr weaken reftype blessed/; -use List::Util qw/first/; use Context::Preserve 'preserve_context'; -use Try::Tiny; use SQL::Abstract qw(is_plain_value is_literal_value); use DBIx::Class::_Util qw( - quote_sub perlstring serialize - dbic_internal_try + quote_sub perlstring serialize dump_value + dbic_internal_try dbic_internal_catch detected_reinvoked_destructor scope_guard + mkdir_p ); use namespace::clean; @@ -225,6 +224,11 @@ sub new { weaken ( $seek_and_destroy{ refaddr($_[0]) } = $_[0] ); + + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } END { @@ -239,9 +243,14 @@ sub new { # disarm the handle if not native to this process (see comment on top) $_->_verify_pid for @instances; } + + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } - sub CLONE { + sub DBIx::Class::__DBI_Storage_iThreads_handler__::CLONE { # As per DBI's recommendation, DBIC disconnects all handles as # soon as possible (DBIC will reconnect only on demand from within # the thread) @@ -255,6 +264,11 @@ sub new { # properly renumber existing refs $_->_arm_global_destructor } + + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } } @@ -270,11 +284,10 @@ sub DESTROY { $_[0]->_dbh(undef); # not calling ->disconnect here - we are being destroyed - nothing to reset - # this op is necessary, since the very last perl runtime statement - # triggers a global destruction shootout, and the $SIG localization - # may very well be destroyed before perl actually gets to do the - # $dbh undef - 1; + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } # handle pid changes correctly - do not destroy parent's connection @@ -288,7 +301,10 @@ sub _verify_pid { $_[0]->disconnect; } - return; + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } =head2 connect_info @@ -886,10 +902,8 @@ sub disconnect { my $g = scope_guard { - { - local $@ if DBIx::Class::_ENV_::UNSTABLE_DOLLARAT; - eval { $self->_dbh->disconnect }; - } + defined( $self->_dbh ) + and dbic_internal_try { $self->_dbh->disconnect }; $self->_dbh(undef); $self->_dbh_details({}); @@ -911,6 +925,11 @@ sub disconnect { # stops the "implicit rollback on disconnect" warning $self->_exec_txn_rollback unless $self->_dbh_autocommit; } + + # Dummy NEXTSTATE ensuring the all temporaries on the stack are garbage + # collected before leaving this scope. Depending on the code above, this + # may very well be just a preventive measure guarding future modifications + undef; } =head2 with_deferred_fk_checks @@ -1154,7 +1173,7 @@ sub _server_info { my $server_version = dbic_internal_try { $self->_get_server_version - } catch { + } dbic_internal_catch { # driver determination *may* use this codepath # in which case we must rethrow $self->throw_exception($_) if $self->{_in_determine_driver}; @@ -1284,7 +1303,9 @@ sub _determine_driver { if ((not $self->_driver_determined) && (not $self->{_in_determine_driver})) { my $started_connected = 0; - local $self->{_in_determine_driver} = 1; + + local $self->{_in_determine_driver} = 1 + unless $self->{_in_determine_driver}; if (ref($self) eq __PACKAGE__) { my $driver; @@ -1299,7 +1320,17 @@ sub _determine_driver { if ($driver) { my $storage_class = "DBIx::Class::Storage::DBI::${driver}"; if ($self->load_optional_class($storage_class)) { - mro::set_mro($storage_class, 'c3'); + + no strict 'refs'; + mro::set_mro($storage_class, 'c3') if + ( + ${"${storage_class}::__INITIAL_MRO_UPON_DBIC_LOAD__"} + ||= mro::get_mro($storage_class) + ) + ne + 'c3' + ; + bless $self, $storage_class; $self->_rebless(); } @@ -1356,7 +1387,16 @@ sub _extract_driver_from_connect_info { # try to use dsn to not require being connected, the driver may still # force a connection later in _rebless to determine version # (dsn may not be supplied at all if all we do is make a mock-schema) - ($drv) = ($self->_dbi_connect_info->[0] || '') =~ /^dbi:([^:]+):/i; + # + # Use the same regex as the one used by DBI itself (even if the use of + # \w is odd given unicode): + # https://metacpan.org/source/TIMB/DBI-1.634/DBI.pm#L621 + # + # DO NOT use https://metacpan.org/source/TIMB/DBI-1.634/DBI.pm#L559-566 + # as there is a long-standing precedent of not loading DBI.pm until the + # very moment we are actually connecting + # + ($drv) = ($self->_dbi_connect_info->[0] || '') =~ /^dbi:(\w*)/i; $drv ||= $ENV{DBI_DRIVER}; } @@ -1398,12 +1438,10 @@ sub _get_rdbms_name { shift->_dbh_get_info('SQL_DBMS_NAME') } sub _warn_undetermined_driver { my ($self, $msg) = @_; - require Data::Dumper::Concise; - carp_once ($msg . ' While we will attempt to continue anyway, the results ' . 'are likely to be underwhelming. Please upgrade DBIC, and if this message ' . "does not go away, file a bugreport including the following info:\n" - . Data::Dumper::Concise::Dumper($self->_describe_connection) + . dump_value $self->_describe_connection ); } @@ -1430,7 +1468,7 @@ sub _do_connection_actions { $self->throw_exception (sprintf ("Don't know how to process conection actions of type '%s'", ref($call)) ); } } - catch { + dbic_internal_catch { if ( $method_prefix =~ /^connect/ ) { # this is an on_connect cycle - we can't just throw while leaving # a handle in an undefined state in our storage object @@ -1580,7 +1618,7 @@ sub _connect { $dbh_error_handler_installer->($self, $dbh); } } - catch { + dbic_internal_catch { $self->throw_exception("DBI Connection failed: $_") }; @@ -1715,10 +1753,8 @@ sub _gen_sql_bind { and $op eq 'select' and - first { - length ref $_->[1] - and - blessed($_->[1]) + grep { + defined blessed($_->[1]) and $_->[1]->isa('DateTime') } @$bind @@ -1785,7 +1821,7 @@ sub _format_for_trace { map { defined( $_ && $_->[1] ) - ? qq{'$_->[1]'} + ? sprintf( "'%s'", "$_->[1]" ) # because overload : q{NULL} } @{$_[1] || []}; } @@ -1962,19 +1998,43 @@ sub insert { # they can be fused once again with the final return $to_insert = { %$to_insert, %$prefetched_values }; - # FIXME - we seem to assume undef values as non-supplied. This is wrong. - # Investigate what does it take to s/defined/exists/ my %pcols = map { $_ => 1 } $source->primary_columns; + my (%retrieve_cols, $autoinc_supplied, $retrieve_autoinc_col); + for my $col ($source->columns) { + + # first autoinc wins - this is why ->columns() in-order iteration is important + # + # FIXME - there ought to be a sanity-check for multiple is_auto_increment settings + # or something... + # if ($col_infos->{$col}{is_auto_increment}) { + + # FIXME - we seem to assume undef values as non-supplied. + # This is wrong. + # Investigate what does it take to s/defined/exists/ + # ( fails t/cdbi/copy.t amoong other things ) $autoinc_supplied ||= 1 if defined $to_insert->{$col}; + $retrieve_autoinc_col ||= $col unless $autoinc_supplied; } # nothing to retrieve when explicit values are supplied next if ( - defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col}) + # FIXME - we seem to assume undef values as non-supplied. + # This is wrong. + # Investigate what does it take to s/defined/exists/ + # ( fails t/cdbi/copy.t amoong other things ) + defined $to_insert->{$col} + and + ( + # not a ref - cheaper to check before a call to is_literal_value() + ! length ref $to_insert->{$col} + or + # not a literal we *MAY* need to pull out ( see check below ) + ! is_literal_value( $to_insert->{$col} ) + ) ); # the 'scalar keys' is a trick to preserve the ->columns declaration order @@ -1985,6 +2045,35 @@ sub insert { ); }; + # corner case of a non-supplied PK which is *not* declared as autoinc + if ( + ! $autoinc_supplied + and + ! defined $retrieve_autoinc_col + and + # FIXME - first come-first serve, suboptimal... + ($retrieve_autoinc_col) = ( grep + { + $pcols{$_} + and + ! $col_infos->{$_}{retrieve_on_insert} + and + ! defined $col_infos->{$_}{is_auto_increment} + } + sort + { $retrieve_cols{$a} <=> $retrieve_cols{$b} } + keys %retrieve_cols + ) + ) { + carp_unique( + "Missing value for primary key column '$retrieve_autoinc_col' on " + . "@{[ $source->source_name ]} - perhaps you forgot to set its " + . "'is_auto_increment' attribute during add_columns()? Treating " + . "'$retrieve_autoinc_col' implicitly as an autoinc, and attempting " + . 'value retrieval' + ); + } + local $self->{_autoinc_supplied_for_op} = $autoinc_supplied; local $self->{_perform_autoinc_retrieval} = $retrieve_autoinc_col; @@ -2012,7 +2101,7 @@ sub insert { @ir_container = $sth->fetchrow_array; $sth->finish; - } catch { + } dbic_internal_catch { # Evict the $sth from the cache in case we got here, since the finish() # is crucial, at least on older Firebirds, possibly on other engines too # @@ -2179,13 +2268,12 @@ sub _insert_bulk { $msg, $cols->[$c_idx], do { - require Data::Dumper::Concise; local $Data::Dumper::Maxdepth = 5; - Data::Dumper::Concise::Dumper ({ + dump_value { map { $cols->[$_] => $data->[$r_idx][$_] } 0..$#$cols - }), + }; } ); }; @@ -2355,7 +2443,7 @@ sub _dbh_execute_for_fetch { $tuple_status, ); } - catch { + dbic_internal_catch { $err = shift; }; @@ -2371,7 +2459,7 @@ sub _dbh_execute_for_fetch { dbic_internal_try { $sth->finish } - catch { + dbic_internal_catch { $err = shift unless defined $err }; @@ -2382,10 +2470,9 @@ sub _dbh_execute_for_fetch { $self->throw_exception("Unexpected populate error: $err") if ($i > $#$tuple_status); - require Data::Dumper::Concise; $self->throw_exception(sprintf "execute_for_fetch() aborted with '%s' at populate slice:\n%s", ($tuple_status->[$i][1] || $err), - Data::Dumper::Concise::Dumper( { map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols) } ), + dump_value { map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols) }, ); } @@ -2403,7 +2490,7 @@ sub _dbh_execute_inserts_with_no_binds { $sth->execute foreach 1..$count; } - catch { + dbic_internal_catch { $err = shift; }; @@ -2411,7 +2498,7 @@ sub _dbh_execute_inserts_with_no_binds { dbic_internal_try { $sth->finish } - catch { + dbic_internal_catch { $err = shift unless defined $err; }; @@ -2639,7 +2726,7 @@ sub _dbh_columns_info_for { $result{$col_name} = \%column_info; } - } catch { + } dbic_internal_catch { %result = (); }; @@ -2915,20 +3002,18 @@ them. sub create_ddl_dir { my ($self, $schema, $databases, $version, $dir, $preversion, $sqltargs) = @_; - unless ($dir) { + require DBIx::Class::Optional::Dependencies; + if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')) { + $self->throw_exception("Can't create a ddl file without $missing"); + } + + if (!$dir) { carp "No directory given, using ./\n"; $dir = './'; - } else { - -d $dir - or - (require File::Path and File::Path::mkpath (["$dir"])) # mkpath does not like objects (i.e. Path::Class::Dir) - or - $self->throw_exception( - "Failed to create '$dir': " . ($! || $@ || 'error unknown') - ); } - - $self->throw_exception ("Directory '$dir' does not exist\n") unless(-d $dir); + else { + mkdir_p( $dir ) unless -d $dir; + } $databases ||= ['MySQL', 'SQLite', 'PostgreSQL']; $databases = [ $databases ] if(ref($databases) ne 'ARRAY'); @@ -2944,10 +3029,6 @@ sub create_ddl_dir { %{$sqltargs || {}} }; - if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')) { - $self->throw_exception("Can't create a ddl file without $missing"); - } - my $sqlt = SQL::Translator->new( $sqltargs ); $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); @@ -3086,6 +3167,11 @@ See L for a list of values for C<$sqlt_args>. sub deployment_statements { my ($self, $schema, $type, $version, $dir, $sqltargs) = @_; + + $self->throw_exception( + 'Calling deployment_statements() in void context makes no sense' + ) unless defined wantarray; + $type ||= $self->sqlt_type; $version ||= $schema->schema_version || '1.x'; $dir ||= './'; @@ -3101,6 +3187,7 @@ sub deployment_statements { return join('', @rows); } + require DBIx::Class::Optional::Dependencies; if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('deploy') ) { $self->throw_exception("Can't deploy without a pregenerated 'ddl_dir' directory or $missing"); } @@ -3145,7 +3232,7 @@ sub deploy { # do a dbh_do cycle here, as we need some error checking in # place (even though we will ignore errors) $self->dbh_do (sub { $_[1]->do($line) }); - } catch { + } dbic_internal_catch { carp qq{$_ (running "${line}")}; }; $self->_query_end($line);