X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=99896dade27f55a179f9bb42855c6ddd59ddab44;hb=e2c0df8e0b707050eb005ac6f68548f857a36acf;hp=de6272eb11dec49fb288d820a55b1e9db3c2eb9d;hpb=b715120665fb799fa423dd8422920e4153c29928;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index de6272e..99896da 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -14,7 +14,7 @@ use IO::File; __PACKAGE__->mk_group_accessors( 'simple' => qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid - cursor on_connect_do transaction_depth/ + disable_sth_caching cursor on_connect_do transaction_depth/ ); BEGIN { @@ -58,9 +58,10 @@ WHERE ROW_NUM BETWEEN $offset AND $last # While we're at it, this should make LIMIT queries more efficient, # without digging into things too deeply +use Scalar::Util 'blessed'; sub _find_syntax { my ($self, $syntax) = @_; - my $dbhname = ref $syntax eq 'HASH' ? $syntax->{Driver}{Name} : ''; + my $dbhname = blessed($syntax) ? $syntax->{Driver}{Name} : $syntax; if(ref($self) && $dbhname && $dbhname eq 'DB2') { return 'RowNumberOver'; } @@ -304,6 +305,8 @@ sub new { $new->cursor("DBIx::Class::Storage::DBI::Cursor"); $new->transaction_depth(0); $new->_sql_maker_opts({}); + $new->{_in_dbh_do} = 0; + $new->{_dbh_gen} = 0; $new; } @@ -332,6 +335,11 @@ This can be set to an arrayref of literal sql statements, which will be executed immediately after making the connection to the database every time we [re-]connect. +=item disable_sth_caching + +If set to a true value, this option will disable the caching of +statement handles via L. + =item limit_dialect Sets the limit dialect. This is useful for JDBC-bridge among others @@ -409,6 +417,7 @@ Examples: quote_char => q{`}, name_sep => q{@}, on_connect_do => ['SET search_path TO myschema,otherschema,public'], + disable_sth_caching => 1, }, ] ); @@ -428,8 +437,10 @@ sub connect_info { my $info = [ @$info_arg ]; # copy because we can alter it my $last_info = $info->[-1]; if(ref $last_info eq 'HASH') { - if(my $on_connect_do = delete $last_info->{on_connect_do}) { - $self->on_connect_do($on_connect_do); + for my $storage_opt (qw/on_connect_do disable_sth_caching/) { + if(my $value = delete $last_info->{$storage_opt}) { + $self->$storage_opt($value); + } } for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { if(my $opt_val = delete $last_info->{$sql_maker_opt}) { @@ -482,11 +493,12 @@ sub dbh_do { my $self = shift; my $coderef = shift; - return $coderef->($self, $self->_dbh, @_) if $self->{_in_txn_do}; - ref $coderef eq 'CODE' or $self->throw_exception ('$coderef must be a CODE reference'); + return $coderef->($self, $self->_dbh, @_) if $self->{_in_dbh_do}; + local $self->{_in_dbh_do} = 1; + my @result; my $want_array = wantarray; @@ -517,7 +529,7 @@ sub dbh_do { # This is basically a blend of dbh_do above and DBIx::Class::Storage::txn_do. # It also informs dbh_do to bypass itself while under the direction of txn_do, -# via $self->{_in_txn_do} (this saves some redundant eval and errorcheck, etc) +# via $self->{_in_dbh_do} (this saves some redundant eval and errorcheck, etc) sub txn_do { my $self = shift; my $coderef = shift; @@ -525,7 +537,7 @@ sub txn_do { ref $coderef eq 'CODE' or $self->throw_exception ('$coderef must be a CODE reference'); - local $self->{_in_txn_do} = 1; + local $self->{_in_dbh_do} = 1; my @result; my $want_array = wantarray; @@ -588,6 +600,7 @@ sub disconnect { $self->_dbh->rollback unless $self->_dbh->{AutoCommit}; $self->_dbh->disconnect; $self->_dbh(undef); + $self->{_dbh_gen}++; } } @@ -596,7 +609,9 @@ sub connected { if(my $dbh = $self->_dbh) { if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) { - return $self->_dbh(undef); + $self->_dbh(undef); + $self->{_dbh_gen}++; + return; } else { $self->_verify_pid; @@ -616,6 +631,7 @@ sub _verify_pid { $self->_dbh->{InactiveDestroy} = 1; $self->_dbh(undef); + $self->{_dbh_gen}++; return; } @@ -700,6 +716,7 @@ sub _connect { $dbh = DBI->connect(@info); $dbh->{RaiseError} = 1; $dbh->{PrintError} = 0; + $dbh->{PrintWarn} = 0; } }; @@ -807,13 +824,7 @@ sub _execute { $self->debugobj->query_start($sql, @debug_bind); } - my $sth = eval { $self->sth($sql) }; - - if (!$sth || $@) { - $self->throw_exception( - 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql" - ); - } + my $sth = $self->sth($sql); my $rv; if ($sth) { @@ -843,6 +854,54 @@ sub insert { return $to_insert; } +## Still not quite perfect, and EXPERIMENTAL +## Currently it is assumed that all values passed will be "normal", i.e. not +## scalar refs, or at least, all the same type as the first set, the statement is +## only prepped once. +sub insert_bulk { + my ($self, $table, $cols, $data) = @_; + my %colvalues; + @colvalues{@$cols} = (0..$#$cols); + my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues); +# print STDERR "BIND".Dumper(\@bind); + + if ($self->debug) { + my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind; + $self->debugobj->query_start($sql, @debug_bind); + } + my $sth = $self->sth($sql); + +# @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args + + my $rv; + ## This must be an arrayref, else nothing works! + my $tuple_status = []; +# use Data::Dumper; +# print STDERR Dumper($data); + if ($sth) { + my $time = time(); + $rv = eval { $sth->execute_array({ ArrayTupleFetch => sub { my $values = shift @$data; return if !$values; return [ @{$values}[@bind] ]}, + ArrayTupleStatus => $tuple_status }) }; +# print STDERR Dumper($tuple_status); +# print STDERR "RV: $rv\n"; + if ($@ || !defined $rv) { + my $errors = ''; + foreach my $tuple (@$tuple_status) + { + $errors .= "\n" . $tuple->[1] if(ref $tuple); + } + $self->throw_exception("Error executing '$sql': ".($@ || $errors)); + } + } else { + $self->throw_exception("'$sql' did not generate a statement."); + } + if ($self->debug) { + my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind; + $self->debugobj->query_end($sql, @debug_bind); + } + return (wantarray ? ($rv, $sth, @bind) : $rv); +} + sub update { return shift->_execute('update' => [], @_); } @@ -917,8 +976,17 @@ Returns a L sth (statement handle) for the supplied SQL. sub _dbh_sth { my ($self, $dbh, $sql) = @_; + # 3 is the if_active parameter which avoids active sth re-use - $dbh->prepare_cached($sql, {}, 3); + my $sth = $self->disable_sth_caching + ? $dbh->prepare($sql) + : $dbh->prepare_cached($sql, {}, 3); + + $self->throw_exception( + 'no sth generated via sql (' . ($@ || $dbh->errstr) . "): $sql" + ) if !$sth; + + $sth; } sub sth { @@ -1011,7 +1079,7 @@ sub sqlt_type { shift->dbh->{Driver}->{Name} } =over 4 -=item Arguments: $schema \@databases, $version, $directory, $sqlt_args +=item Arguments: $schema \@databases, $version, $directory, $preversion, $sqlt_args =back @@ -1025,7 +1093,7 @@ across all databases, or fully handle complex relationships. sub create_ddl_dir { - my ($self, $schema, $databases, $version, $dir, $sqltargs) = @_; + my ($self, $schema, $databases, $version, $dir, $preversion, $sqltargs) = @_; if(!$dir || !-d $dir) { @@ -1038,14 +1106,18 @@ sub create_ddl_dir $sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} }; eval "use SQL::Translator"; - $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; + $self->throw_exception("Can't create a ddl file without SQL::Translator: $@") if $@; - my $sqlt = SQL::Translator->new($sqltargs); + my $sqlt = SQL::Translator->new({ +# debug => 1, + add_drop_table => 1, + }); foreach my $db (@$databases) { $sqlt->reset(); $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); # $sqlt->parser_args({'DBIx::Class' => $schema); + $sqlt = $self->configure_sqlt($sqlt, $db); $sqlt->data($schema); $sqlt->producer($db); @@ -1053,24 +1125,97 @@ sub create_ddl_dir my $filename = $schema->ddl_filename($db, $dir, $version); if(-e $filename) { - $self->throw_exception("$filename already exists, skipping $db"); + warn("$filename already exists, skipping $db"); next; } - open($file, ">$filename") - or $self->throw_exception("Can't open $filename for writing ($!)"); + my $output = $sqlt->translate; -#use Data::Dumper; -# print join(":", keys %{$schema->source_registrations}); -# print Dumper($sqlt->schema); if(!$output) { - $self->throw_exception("Failed to translate to $db. (" . $sqlt->error . ")"); + warn("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); next; } + if(!open($file, ">$filename")) + { + $self->throw_exception("Can't open $filename for writing ($!)"); + next; + } print $file $output; close($file); + + if($preversion) + { + eval "use SQL::Translator::Diff"; + if($@) + { + warn("Can't diff versions without SQL::Translator::Diff: $@"); + next; + } + + my $prefilename = $schema->ddl_filename($db, $dir, $preversion); +# print "Previous version $prefilename\n"; + if(!-e $prefilename) + { + warn("No previous schema file found ($prefilename)"); + next; + } + #### We need to reparse the SQLite file we just wrote, so that + ## Diff doesnt get all confoosed, and Diff is *very* confused. + ## FIXME: rip Diff to pieces! +# my $target_schema = $sqlt->schema; +# unless ( $target_schema->name ) { +# $target_schema->name( $filename ); +# } + my @input; + push @input, {file => $prefilename, parser => $db}; + push @input, {file => $filename, parser => $db}; + my ( $source_schema, $source_db, $target_schema, $target_db ) = map { + my $file = $_->{'file'}; + my $parser = $_->{'parser'}; + + my $t = SQL::Translator->new; + $t->debug( 0 ); + $t->trace( 0 ); + $t->parser( $parser ) or die $t->error; + my $out = $t->translate( $file ) or die $t->error; + my $schema = $t->schema; + unless ( $schema->name ) { + $schema->name( $file ); + } + ($schema, $parser); + } @input; + + my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db, + $target_schema, $db, + {} + ); + my $difffile = $schema->ddl_filename($db, $dir, $version, $preversion); + print STDERR "Diff: $difffile: $db, $dir, $version, $preversion \n"; + if(-e $difffile) + { + warn("$difffile already exists, skipping"); + next; + } + if(!open $file, ">$difffile") + { + $self->throw_exception("Can't write to $difffile ($!)"); + next; + } + print $file $diff; + close($file); + } } +} +sub configure_sqlt() { + my $self = shift; + my $tr = shift; + my $db = shift || $self->sqlt_type; + if ($db eq 'PostgreSQL') { + $tr->quote_table_names(0); + $tr->quote_field_names(0); + } + return $tr; } =head2 deployment_statements @@ -1103,6 +1248,17 @@ sub deployment_statements { $type ||= $self->sqlt_type; $version ||= $schema->VERSION || '1.x'; $dir ||= './'; + my $filename = $schema->ddl_filename($type, $dir, $version); + if(-f $filename) + { + my $file; + open($file, "<$filename") + or $self->throw_exception("Can't open $filename ($!)"); + my @rows = <$file>; + close($file); + return join('', @rows); + } + eval "use SQL::Translator"; if(!$@) { @@ -1115,21 +1271,9 @@ sub deployment_statements { return "SQL::Translator::Producer::${type}"->can('produce')->($tr); } - my $filename = $schema->ddl_filename($type, $dir, $version); - if(!-f $filename) - { -# $schema->create_ddl_dir([ $type ], $version, $dir, $sqltargs); - $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy"); - return; - } - my $file; - open($file, "<$filename") - or $self->throw_exception("Can't open $filename ($!)"); - my @rows = <$file>; - close($file); - - return join('', @rows); - + $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy"); + return; + } sub deploy {