X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=9c622f8428dabf5f69a5bd3807ecfe48bae0f8e1;hb=1e4f9fb3b8bd1f54518bc2942554099356fa6524;hp=a9a83819864b2feb5022c006b1dec34b909d4376;hpb=1f9712ad52d92dd562fcda6e1846370d45342a13;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index a9a8381..9c622f8 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -8,7 +8,6 @@ use base qw/DBIx::Class::Storage::DBIHacks DBIx::Class::Storage/; use mro 'c3'; use DBIx::Class::Carp; -use DBIx::Class::Exception; use Scalar::Util qw/refaddr weaken reftype blessed/; use List::Util qw/first/; use Sub::Name 'subname'; @@ -177,7 +176,6 @@ sub new { $new->_sql_maker_opts({}); $new->_dbh_details({}); $new->{_in_do_block} = 0; - $new->{_dbh_gen} = 0; # read below to see what this does $new->_arm_global_destructor; @@ -217,17 +215,17 @@ sub new { # soon as possible (DBIC will reconnect only on demand from within # the thread) my @instances = grep { defined $_ } values %seek_and_destroy; + %seek_and_destroy = (); + for (@instances) { - $_->{_dbh_gen}++; # so that existing cursors will drop as well $_->_dbh(undef); $_->transaction_depth(0); $_->savepoints([]); - } - # properly renumber all existing refs - %seek_and_destroy = (); - $_->_arm_global_destructor for @instances; + # properly renumber existing refs + $_->_arm_global_destructor + } } } @@ -253,7 +251,6 @@ sub _verify_pid { my $pid = $self->_conn_pid; if( defined $pid and $pid != $$ and my $dbh = $self->_dbh ) { $dbh->{InactiveDestroy} = 1; - $self->{_dbh_gen}++; $self->_dbh(undef); $self->transaction_depth(0); $self->savepoints([]); @@ -794,7 +791,10 @@ sub dbh_do { return $self->$run_target($self->_get_dbh, @_) if $self->{_in_do_block} or $self->transaction_depth; - my $args = \@_; + # take a ref instead of a copy, to preserve @_ aliasing + # semantics within the coderef, but only if needed + # (pseudoforking doesn't like this trick much) + my $args = @_ ? \@_ : []; DBIx::Class::Storage::BlockRunner->new( storage => $self, @@ -833,7 +833,6 @@ sub disconnect { %{ $self->_dbh->{CachedKids} } = (); $self->_dbh->disconnect; $self->_dbh(undef); - $self->{_dbh_gen}++; } } @@ -1179,7 +1178,9 @@ sub _describe_connection { SQL_TXN_ISOLATION_OPTION / ) { - my $v = $self->_dbh_get_info($inf); + # some drivers barf on things they do not know about instead + # of returning undef + my $v = try { $self->_dbh_get_info($inf) }; next unless defined $v; #my $key = sprintf( '%s(%s)', $inf, $DBI::Const::GetInfoType::GetInfoType{$inf} ); @@ -1386,10 +1387,17 @@ sub _connect { $dbh = DBI->connect(@info); } - if (!$dbh) { - die $DBI::errstr; - } + die $DBI::errstr unless $dbh; + + die sprintf ("%s fresh DBI handle with a *false* 'Active' attribute. " + . 'This handle is disconnected as far as DBIC is concerned, and we can ' + . 'not continue', + ref $info[0] eq 'CODE' + ? "Connection coderef $info[0] returned a" + : 'DBI->connect($schema->storage->connect_info) resulted in a' + ) unless $dbh->FETCH('Active'); + # sanity checks unless asked otherwise unless ($self->unsafe) { $self->throw_exception( @@ -2277,24 +2285,33 @@ sub _select_args { $attrs->{rows} = $sql_maker->__max_int; } - my @limit; + my ($complex_prefetch, @limit); - # see if we need to tear the prefetch apart otherwise delegate the limiting to the - # storage, unless software limit was requested + # see if we will need to tear the prefetch apart to satisfy group_by == select + # this is *extremely tricky* to get right + # + # Follows heavy but necessary analyzis of the group_by - if it refers to any + # sort of non-root column assume the user knows what they are doing and do + # not try to be clever if ( - #limited has_many - ( $attrs->{rows} && keys %{$attrs->{collapse}} ) - || - # grouped prefetch (to satisfy group_by == select) - ( $attrs->{group_by} - && - @{$attrs->{group_by}} - && - $attrs->{_prefetch_selector_range} - ) + $attrs->{_related_results_construction} + and + $attrs->{group_by} + and + @{$attrs->{group_by}} + and + my $grp_aliases = try { + $self->_resolve_aliastypes_from_select_args( $attrs->{from}, undef, undef, { group_by => $attrs->{group_by} } ) + } ) { - ($ident, $select, $where, $attrs) - = $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs); + $complex_prefetch = ! defined first { $_ ne $rs_alias } keys %{ $grp_aliases->{grouping} || {} }; + } + + $complex_prefetch ||= ( $attrs->{rows} && $attrs->{collapse} ); + + if ($complex_prefetch) { + ($ident, $select, $where, $attrs) = + $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs); } elsif (! $attrs->{software_limit} ) { push @limit, ( @@ -2305,6 +2322,8 @@ sub _select_args { # try to simplify the joinmap further (prune unreferenced type-single joins) if ( + ! $complex_prefetch + and ref $ident and reftype $ident eq 'ARRAY'