X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=b107d2467b6aee7c35a91ca87e908cad7ed17974;hb=038b8126a538e130bd8799b365a682a2e4d77347;hp=b7e969fad43920da4360821fce703177f60bb708;hpb=9345b14c6c86aa8888bf5d47a569ee9bbde24f47;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index b7e969f..b107d24 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -12,9 +12,11 @@ use DBIx::Class::Exception; use Scalar::Util qw/refaddr weaken reftype blessed/; use List::Util qw/first/; use Sub::Name 'subname'; +use Context::Preserve 'preserve_context'; use Try::Tiny; use overload (); use Data::Compare (); # no imports!!! guard against insane architecture +use DBI::Const::GetInfoType (); # no import of retarded global hash use namespace::clean; # default cursor class, overridable in connect_info attributes @@ -34,6 +36,7 @@ __PACKAGE__->sql_name_sep('.'); __PACKAGE__->mk_group_accessors('simple' => qw/ _connect_info _dbi_connect_info _dbic_connect_attributes _driver_determined _dbh _dbh_details _conn_pid _sql_maker _sql_maker_opts _dbh_autocommit + _perform_autoinc_retrieval _autoinc_supplied_for_op /); # the values for these accessors are picked out (and deleted) from @@ -1104,12 +1107,18 @@ sub _server_info { } sub _get_server_version { - shift->_dbh_get_info(18); + shift->_dbh_get_info('SQL_DBMS_VER'); } sub _dbh_get_info { my ($self, $info) = @_; + if ($info =~ /[^0-9]/) { + $info = $DBI::Const::GetInfoType::GetInfoType{$info}; + $self->throw_exception("Info type '$_[1]' not provided by DBI::Const::GetInfoType") + unless defined $info; + } + return try { $self->_get_dbh->get_info($info) } || undef; } @@ -1216,9 +1225,11 @@ sub _do_query { my $attrs = shift @do_args; my @bind = map { [ undef, $_ ] } @do_args; - $self->_query_start($sql, \@bind); - $self->_get_dbh->do($sql, $attrs, @do_args); - $self->_query_end($sql, \@bind); + $self->dbh_do(sub { + $_[0]->_query_start($sql, \@bind); + $_[1]->do($sql, $attrs, @do_args); + $_[0]->_query_end($sql, \@bind); + }); } return $self; @@ -1232,10 +1243,7 @@ sub _connect { my ($old_connect_via, $dbh); - if ($INC{'Apache/DBI.pm'} && $ENV{MOD_PERL}) { - $old_connect_via = $DBI::connect_via; - $DBI::connect_via = 'connect'; - } + local $DBI::connect_via = 'connect' if $INC{'Apache/DBI.pm'} && $ENV{MOD_PERL}; try { if(ref $info[0] eq 'CODE') { @@ -1297,9 +1305,6 @@ sub _connect { } catch { $self->throw_exception("DBI Connection failed: $_") - } - finally { - $DBI::connect_via = $old_connect_via if $old_connect_via; }; $self->_dbh_autocommit($dbh->{AutoCommit}); @@ -1664,10 +1669,17 @@ sub insert { # they can be fused once again with the final return $to_insert = { %$to_insert, %$prefetched_values }; + # FIXME - we seem to assume undef values as non-supplied. This is wrong. + # Investigate what does it take to s/defined/exists/ my $col_infos = $source->columns_info; my %pcols = map { $_ => 1 } $source->primary_columns; - my %retrieve_cols; + my (%retrieve_cols, $autoinc_supplied, $retrieve_autoinc_col); for my $col ($source->columns) { + if ($col_infos->{$col}{is_auto_increment}) { + $autoinc_supplied ||= 1 if defined $to_insert->{$col}; + $retrieve_autoinc_col ||= $col unless $autoinc_supplied; + } + # nothing to retrieve when explicit values are supplied next if (defined $to_insert->{$col} and ! ( ref $to_insert->{$col} eq 'SCALAR' @@ -1683,6 +1695,9 @@ sub insert { ); }; + local $self->{_autoinc_supplied_for_op} = $autoinc_supplied; + local $self->{_perform_autoinc_retrieval} = $retrieve_autoinc_col; + my ($sqla_opts, @ir_container); if (%retrieve_cols and $self->_use_insert_returning) { $sqla_opts->{returning_container} = \@ir_container @@ -1761,7 +1776,13 @@ sub insert_bulk { } } - my $colinfo_cache = {}; # since we will run _resolve_bindattrs on the same $source a lot + my $colinfos = $source->columns_info($cols); + + local $self->{_autoinc_supplied_for_op} = + (first { $_->{is_auto_increment} } values %$colinfos) + ? 1 + : 0 + ; # get a slice type index based on first row of data # a "column" in this context may refer to more than one bind value @@ -1798,7 +1819,7 @@ sub insert_bulk { # normalization of user supplied stuff my $resolved_bind = $self->_resolve_bindattrs( - $source, \@bind, $colinfo_cache, + $source, \@bind, $colinfos, ); # store value-less (attrs only) bind info - we will be comparing all @@ -1914,7 +1935,7 @@ sub insert_bulk { map { $_->[0] } @{$self->_resolve_bindattrs( - $source, [ @{$$val}[1 .. $#$$val] ], $colinfo_cache, + $source, [ @{$$val}[1 .. $#$$val] ], $colinfos, )} ], )) { @@ -1952,7 +1973,7 @@ sub insert_bulk { $guard->commit; - return (wantarray ? ($rv, $sth, @$proto_bind) : $rv); + return wantarray ? ($rv, $sth, @$proto_bind) : $rv; } # execute_for_fetch is capable of returning data just fine (it means it @@ -2097,6 +2118,10 @@ sub _select { sub _select_args_to_query { my $self = shift; + $self->throw_exception( + "Unable to generate limited query representation with 'software_limit' enabled" + ) if ($_[3]->{software_limit} and ($_[3]->{offset} or $_[3]->{rows}) ); + # my ($op, $ident, $select, $cond, $rs_attrs, $rows, $offset) # = $self->_select_args($ident, $select, $cond, $attrs); my ($op, $ident, @args) = @@ -2729,18 +2754,12 @@ sub deployment_statements { data => $schema, ); - my @ret; - if (wantarray) { - @ret = $tr->translate; - } - else { - $ret[0] = $tr->translate; - } - - $self->throw_exception( 'Unable to produce deployment statements: ' . $tr->error) - unless (@ret && defined $ret[0]); - - return wantarray ? @ret : $ret[0]; + return preserve_context { + $tr->translate + } after => sub { + $self->throw_exception( 'Unable to produce deployment statements: ' . $tr->error) + unless defined $_[0]; + }; } # FIXME deploy() currently does not accurately report sql errors