X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=99896dade27f55a179f9bb42855c6ddd59ddab44;hb=e2c0df8e0b707050eb005ac6f68548f857a36acf;hp=dff783b845ae15ec0bebdcb3133696aa2aee5ee6;hpb=82cc0386e2680c65131a3460ff395c141abe0069;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index dff783b..99896da 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -10,8 +10,13 @@ use SQL::Abstract::Limit; use DBIx::Class::Storage::DBI::Cursor; use DBIx::Class::Storage::Statistics; use IO::File; -use Scalar::Util qw/weaken/; -use Carp::Clan qw/DBIx::Class/; + +__PACKAGE__->mk_group_accessors( + 'simple' => + qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid + disable_sth_caching cursor on_connect_do transaction_depth/ +); + BEGIN { package DBIC::SQL::Abstract; # Would merge upstream, but nate doesn't reply :( @@ -30,10 +35,37 @@ sub new { $self; } +sub _RowNumberOver { + my ($self, $sql, $order, $rows, $offset ) = @_; + + $offset += 1; + my $last = $rows + $offset; + my ( $order_by ) = $self->_order_by( $order ); + + $sql = <<""; +SELECT * FROM +( + SELECT Q1.*, ROW_NUMBER() OVER( ) AS ROW_NUM FROM ( + $sql + $order_by + ) Q1 +) Q2 +WHERE ROW_NUM BETWEEN $offset AND $last + + return $sql; +} + + # While we're at it, this should make LIMIT queries more efficient, # without digging into things too deeply +use Scalar::Util 'blessed'; sub _find_syntax { my ($self, $syntax) = @_; + my $dbhname = blessed($syntax) ? $syntax->{Driver}{Name} : $syntax; + if(ref($self) && $dbhname && $dbhname eq 'DB2') { + return 'RowNumberOver'; + } + $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax); } @@ -251,14 +283,6 @@ sub name_sep { } # End of BEGIN block -use base qw/DBIx::Class/; - -__PACKAGE__->load_components(qw/AccessorGroup/); - -__PACKAGE__->mk_group_accessors('simple' => - qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid - debug debugobj cursor on_connect_do transaction_depth schema/); - =head1 NAME DBIx::Class::Storage::DBI - DBI storage handler @@ -267,69 +291,24 @@ DBIx::Class::Storage::DBI - DBI storage handler =head1 DESCRIPTION -This class represents the connection to the database +This class represents the connection to an RDBMS via L. See +L for general information. This pod only +documents DBI-specific methods and behaviors. =head1 METHODS -=head2 new - -Constructor. Only argument is the schema which instantiated us. - =cut sub new { - my ($self, $schema) = @_; + my $new = shift->next::method(@_); - my $new = bless({}, ref $self || $self); - - $new->set_schema($schema); $new->cursor("DBIx::Class::Storage::DBI::Cursor"); $new->transaction_depth(0); - - $new->debugobj(new DBIx::Class::Storage::Statistics()); - - my $fh; - - my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} - || $ENV{DBIC_TRACE}; - - if (defined($debug_env) && ($debug_env =~ /=(.+)$/)) { - $fh = IO::File->new($1, 'w') - or $new->throw_exception("Cannot open trace file $1"); - } else { - $fh = IO::File->new('>&STDERR'); - } - $new->debugfh($fh); - $new->debug(1) if $debug_env; $new->_sql_maker_opts({}); - return $new; -} - -=head2 set_schema - -Used to reset the schema class or object which owns this -storage object, such as after a C. + $new->{_in_dbh_do} = 0; + $new->{_dbh_gen} = 0; -=cut - -sub set_schema { - my ($self, $schema) = @_; - $self->schema($schema); - weaken($self->{schema}) if ref $self->{schema}; -} - - -=head2 throw_exception - -Throws an exception - croaks. - -=cut - -sub throw_exception { - my $self = shift; - - $self->schema->throw_exception(@_) if $self->schema; - croak @_; + $new; } =head2 connect_info @@ -356,6 +335,11 @@ This can be set to an arrayref of literal sql statements, which will be executed immediately after making the connection to the database every time we [re-]connect. +=item disable_sth_caching + +If set to a true value, this option will disable the caching of +statement handles via L. + =item limit_dialect Sets the limit dialect. This is useful for JDBC-bridge among others @@ -433,80 +417,87 @@ Examples: quote_char => q{`}, name_sep => q{@}, on_connect_do => ['SET search_path TO myschema,otherschema,public'], + disable_sth_caching => 1, }, ] ); -=head2 on_connect_do - -This method is deprecated in favor of setting via L. - -=head2 debug - -Causes SQL trace information to be emitted on the C object. -(or C if C has not specifically been set). +=cut -This is the equivalent to setting L in your -shell environment. +sub connect_info { + my ($self, $info_arg) = @_; -=head2 debugfh + return $self->_connect_info if !$info_arg; -Set or retrieve the filehandle used for trace/debug output. This should be -an IO::Handle compatible ojbect (only the C method is used. Initially -set to be STDERR - although see information on the -L environment variable. + # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only + # the new set of options + $self->_sql_maker(undef); + $self->_sql_maker_opts({}); -=cut + my $info = [ @$info_arg ]; # copy because we can alter it + my $last_info = $info->[-1]; + if(ref $last_info eq 'HASH') { + for my $storage_opt (qw/on_connect_do disable_sth_caching/) { + if(my $value = delete $last_info->{$storage_opt}) { + $self->$storage_opt($value); + } + } + for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { + if(my $opt_val = delete $last_info->{$sql_maker_opt}) { + $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val; + } + } -sub debugfh { - my $self = shift; + # Get rid of any trailing empty hashref + pop(@$info) if !keys %$last_info; + } - if ($self->debugobj->can('debugfh')) { - return $self->debugobj->debugfh(@_); - } + $self->_connect_info($info); } -=head2 debugobj - -Sets or retrieves the object used for metric collection. Defaults to an instance -of L that is campatible with the original -method of using a coderef as a callback. See the aforementioned Statistics -class for more information. +=head2 on_connect_do -=head2 debugcb +This method is deprecated in favor of setting via L. -Sets a callback to be executed each time a statement is run; takes a sub -reference. Callback is executed as $sub->($op, $info) where $op is -SELECT/INSERT/UPDATE/DELETE and $info is what would normally be printed. +=head2 dbh_do -See L for a better way. +Arguments: $subref, @extra_coderef_args? -=cut +Execute the given subref using the new exception-based connection management. -sub debugcb { - my $self = shift; +The first two arguments will be the storage object that C was called +on and a database handle to use. Any additional arguments will be passed +verbatim to the called subref as arguments 2 and onwards. - if ($self->debugobj->can('callback')) { - return $self->debugobj->callback(@_); - } -} +Using this (instead of $self->_dbh or $self->dbh) ensures correct +exception handling and reconnection (or failover in future subclasses). -=head2 dbh_do +Your subref should have no side-effects outside of the database, as +there is the potential for your subref to be partially double-executed +if the database connection was stale/dysfunctional. -Execute the given subref with the underlying database handle as its -first argument, using the new exception-based connection management. Example: my @stuff = $schema->storage->dbh_do( sub { - shift->selectrow_array("SELECT * FROM foo") - } + my ($storage, $dbh, @cols) = @_; + my $cols = join(q{, }, @cols); + $dbh->selectrow_array("SELECT $cols FROM foo"); + }, + @column_list ); =cut sub dbh_do { - my ($self, $todo) = @_; + my $self = shift; + my $coderef = shift; + + ref $coderef eq 'CODE' or $self->throw_exception + ('$coderef must be a CODE reference'); + + return $coderef->($self, $self->_dbh, @_) if $self->{_in_dbh_do}; + local $self->{_in_dbh_do} = 1; my @result; my $want_array = wantarray; @@ -514,34 +505,90 @@ sub dbh_do { eval { $self->_verify_pid if $self->_dbh; $self->_populate_dbh if !$self->_dbh; - my $dbh = $self->_dbh; if($want_array) { - @result = $todo->($dbh); + @result = $coderef->($self, $self->_dbh, @_); } elsif(defined $want_array) { - $result[0] = $todo->($dbh); + $result[0] = $coderef->($self, $self->_dbh, @_); } else { - $todo->($dbh); + $coderef->($self, $self->_dbh, @_); } }; - if($@) { + my $exception = $@; + if(!$exception) { return $want_array ? @result : $result[0] } + + $self->throw_exception($exception) if $self->connected; + + # We were not connected - reconnect and retry, but let any + # exception fall right through this time + $self->_populate_dbh; + $coderef->($self, $self->_dbh, @_); +} + +# This is basically a blend of dbh_do above and DBIx::Class::Storage::txn_do. +# It also informs dbh_do to bypass itself while under the direction of txn_do, +# via $self->{_in_dbh_do} (this saves some redundant eval and errorcheck, etc) +sub txn_do { + my $self = shift; + my $coderef = shift; + + ref $coderef eq 'CODE' or $self->throw_exception + ('$coderef must be a CODE reference'); + + local $self->{_in_dbh_do} = 1; + + my @result; + my $want_array = wantarray; + + my $tried = 0; + while(1) { + eval { + $self->_verify_pid if $self->_dbh; + $self->_populate_dbh if !$self->_dbh; + + $self->txn_begin; + if($want_array) { + @result = $coderef->(@_); + } + elsif(defined $want_array) { + $result[0] = $coderef->(@_); + } + else { + $coderef->(@_); + } + $self->txn_commit; + }; + my $exception = $@; - $self->connected - ? $self->throw_exception($exception) - : $self->_populate_dbh; + if(!$exception) { return $want_array ? @result : $result[0] } + + if($tried++ > 0 || $self->connected) { + eval { $self->txn_rollback }; + my $rollback_exception = $@; + if($rollback_exception) { + my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION"; + $self->throw_exception($exception) # propagate nested rollback + if $rollback_exception =~ /$exception_class/; + + $self->throw_exception( + "Transaction aborted: ${exception}. " + . "Rollback failed: ${rollback_exception}" + ); + } + $self->throw_exception($exception) + } - my $dbh = $self->_dbh; - return $todo->($dbh); + # We were not connected, and was first try - reconnect and retry + # via the while loop + $self->_populate_dbh; } - - return $want_array ? @result : $result[0]; } =head2 disconnect -Disconnect the L handle, performing a rollback first if the +Our C method also performs a rollback first if the database is not in C mode. =cut @@ -553,22 +600,18 @@ sub disconnect { $self->_dbh->rollback unless $self->_dbh->{AutoCommit}; $self->_dbh->disconnect; $self->_dbh(undef); + $self->{_dbh_gen}++; } } -=head2 connected - -Check if the L handle is connected. Returns true if the handle -is connected. - -=cut - sub connected { my ($self) = @_; if(my $dbh = $self->_dbh) { if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) { - return $self->_dbh(undef); + $self->_dbh(undef); + $self->{_dbh_gen}++; + return; } else { $self->_verify_pid; @@ -588,17 +631,11 @@ sub _verify_pid { $self->_dbh->{InactiveDestroy} = 1; $self->_dbh(undef); + $self->{_dbh_gen}++; return; } -=head2 ensure_connected - -Check whether the database handle is connected - if not then make a -connection. - -=cut - sub ensure_connected { my ($self) = @_; @@ -626,13 +663,6 @@ sub _sql_maker_args { return ( limit_dialect => $self->dbh, %{$self->_sql_maker_opts} ); } -=head2 sql_maker - -Returns a C object - normally an object of class -C. - -=cut - sub sql_maker { my ($self) = @_; unless ($self->_sql_maker) { @@ -641,35 +671,6 @@ sub sql_maker { return $self->_sql_maker; } -sub connect_info { - my ($self, $info_arg) = @_; - - return $self->_connect_info if !$info_arg; - - # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only - # the new set of options - $self->_sql_maker(undef); - $self->_sql_maker_opts({}); - - my $info = [ @$info_arg ]; # copy because we can alter it - my $last_info = $info->[-1]; - if(ref $last_info eq 'HASH') { - if(my $on_connect_do = delete $last_info->{on_connect_do}) { - $self->on_connect_do($on_connect_do); - } - for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { - if(my $opt_val = delete $last_info->{$sql_maker_opt}) { - $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val; - } - } - - # Get rid of any trailing empty hashref - pop(@$info) if !keys %$last_info; - } - - $self->_connect_info($info); -} - sub _populate_dbh { my ($self) = @_; my @info = @{$self->_connect_info || []}; @@ -715,6 +716,7 @@ sub _connect { $dbh = DBI->connect(@info); $dbh->{RaiseError} = 1; $dbh->{PrintError} = 0; + $dbh->{PrintWarn} = 0; } }; @@ -727,90 +729,69 @@ sub _connect { $dbh; } -=head2 txn_begin - -Calls begin_work on the current dbh. - -See L for the txn_do() method, which allows for -an entire code block to be executed transactionally. - -=cut +sub _dbh_txn_begin { + my ($self, $dbh) = @_; + if ($dbh->{AutoCommit}) { + $self->debugobj->txn_begin() + if ($self->debug); + $dbh->begin_work; + } +} sub txn_begin { my $self = shift; - if ($self->{transaction_depth}++ == 0) { - $self->dbh_do(sub { - my $dbh = shift; - if ($dbh->{AutoCommit}) { - $self->debugobj->txn_begin() - if ($self->debug); - $dbh->begin_work; - } - }); - } + $self->dbh_do($self->can('_dbh_txn_begin')) + if $self->{transaction_depth}++ == 0; } -=head2 txn_commit - -Issues a commit against the current dbh. - -=cut +sub _dbh_txn_commit { + my ($self, $dbh) = @_; + if ($self->{transaction_depth} == 0) { + unless ($dbh->{AutoCommit}) { + $self->debugobj->txn_commit() + if ($self->debug); + $dbh->commit; + } + } + else { + if (--$self->{transaction_depth} == 0) { + $self->debugobj->txn_commit() + if ($self->debug); + $dbh->commit; + } + } +} sub txn_commit { my $self = shift; - $self->dbh_do(sub { - my $dbh = shift; - if ($self->{transaction_depth} == 0) { - unless ($dbh->{AutoCommit}) { - $self->debugobj->txn_commit() - if ($self->debug); - $dbh->commit; - } + $self->dbh_do($self->can('_dbh_txn_commit')); +} + +sub _dbh_txn_rollback { + my ($self, $dbh) = @_; + if ($self->{transaction_depth} == 0) { + unless ($dbh->{AutoCommit}) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; + } + } + else { + if (--$self->{transaction_depth} == 0) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; } else { - if (--$self->{transaction_depth} == 0) { - $self->debugobj->txn_commit() - if ($self->debug); - $dbh->commit; - } + die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new; } - }); + } } -=head2 txn_rollback - -Issues a rollback against the current dbh. A nested rollback will -throw a L exception, -which allows the rollback to propagate to the outermost transaction. - -=cut - sub txn_rollback { my $self = shift; - eval { - $self->dbh_do(sub { - my $dbh = shift; - if ($self->{transaction_depth} == 0) { - unless ($dbh->{AutoCommit}) { - $self->debugobj->txn_rollback() - if ($self->debug); - $dbh->rollback; - } - } - else { - if (--$self->{transaction_depth} == 0) { - $self->debugobj->txn_rollback() - if ($self->debug); - $dbh->rollback; - } - else { - die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new; - } - } - }); - }; - + eval { $self->dbh_do($self->can('_dbh_txn_rollback')) }; if ($@) { my $error = $@; my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION"; @@ -820,22 +801,31 @@ sub txn_rollback { } } -sub _execute { +# This used to be the top-half of _execute. It was split out to make it +# easier to override in NoBindVars without duping the rest. It takes up +# all of _execute's args, and emits $sql, @bind. +sub _prep_for_execute { my ($self, $op, $extra_bind, $ident, @args) = @_; + my ($sql, @bind) = $self->sql_maker->$op($ident, @args); unshift(@bind, @$extra_bind) if $extra_bind; + @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args + + return ($sql, @bind); +} + +sub _execute { + my $self = shift; + + my ($sql, @bind) = $self->_prep_for_execute(@_); + if ($self->debug) { my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind; $self->debugobj->query_start($sql, @debug_bind); } - my $sth = eval { $self->sth($sql,$op) }; - if (!$sth || $@) { - $self->throw_exception( - 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql" - ); - } - @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args + my $sth = $self->sth($sql); + my $rv; if ($sth) { my $time = time(); @@ -864,6 +854,54 @@ sub insert { return $to_insert; } +## Still not quite perfect, and EXPERIMENTAL +## Currently it is assumed that all values passed will be "normal", i.e. not +## scalar refs, or at least, all the same type as the first set, the statement is +## only prepped once. +sub insert_bulk { + my ($self, $table, $cols, $data) = @_; + my %colvalues; + @colvalues{@$cols} = (0..$#$cols); + my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues); +# print STDERR "BIND".Dumper(\@bind); + + if ($self->debug) { + my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind; + $self->debugobj->query_start($sql, @debug_bind); + } + my $sth = $self->sth($sql); + +# @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args + + my $rv; + ## This must be an arrayref, else nothing works! + my $tuple_status = []; +# use Data::Dumper; +# print STDERR Dumper($data); + if ($sth) { + my $time = time(); + $rv = eval { $sth->execute_array({ ArrayTupleFetch => sub { my $values = shift @$data; return if !$values; return [ @{$values}[@bind] ]}, + ArrayTupleStatus => $tuple_status }) }; +# print STDERR Dumper($tuple_status); +# print STDERR "RV: $rv\n"; + if ($@ || !defined $rv) { + my $errors = ''; + foreach my $tuple (@$tuple_status) + { + $errors .= "\n" . $tuple->[1] if(ref $tuple); + } + $self->throw_exception("Error executing '$sql': ".($@ || $errors)); + } + } else { + $self->throw_exception("'$sql' did not generate a statement."); + } + if ($self->debug) { + my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind; + $self->debugobj->query_end($sql, @debug_bind); + } + return (wantarray ? ($rv, $sth, @bind) : $rv); +} + sub update { return shift->_execute('update' => [], @_); } @@ -899,6 +937,12 @@ sub _select { =head2 select +=over 4 + +=item Arguments: $ident, $select, $condition, $attrs + +=back + Handle a SQL select statement. =cut @@ -909,94 +953,101 @@ sub select { return $self->cursor->new($self, \@_, $attrs); } -=head2 select_single - -Performs a select, fetch and return of data - handles a single row -only. - -=cut - -# Need to call finish() to work round broken DBDs - sub select_single { my $self = shift; my ($rv, $sth, @bind) = $self->_select(@_); my @row = $sth->fetchrow_array; + # Need to call finish() to work round broken DBDs $sth->finish(); return @row; } =head2 sth +=over 4 + +=item Arguments: $sql + +=back + Returns a L sth (statement handle) for the supplied SQL. =cut -sub sth { - my ($self, $sql) = @_; - # 3 is the if_active parameter which avoids active sth re-use - return $self->dbh_do(sub { shift->prepare_cached($sql, {}, 3) }); -} +sub _dbh_sth { + my ($self, $dbh, $sql) = @_; -=head2 columns_info_for + # 3 is the if_active parameter which avoids active sth re-use + my $sth = $self->disable_sth_caching + ? $dbh->prepare($sql) + : $dbh->prepare_cached($sql, {}, 3); -Returns database type info for a given table columns. + $self->throw_exception( + 'no sth generated via sql (' . ($@ || $dbh->errstr) . "): $sql" + ) if !$sth; -=cut + $sth; +} -sub columns_info_for { - my ($self, $table) = @_; +sub sth { + my ($self, $sql) = @_; + $self->dbh_do($self->can('_dbh_sth'), $sql); +} - $self->dbh_do(sub { - my $dbh = shift; - - if ($dbh->can('column_info')) { - my %result; - eval { - my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table); - my $sth = $dbh->column_info( undef,$schema, $tab, '%' ); - $sth->execute(); - while ( my $info = $sth->fetchrow_hashref() ){ - my %column_info; - $column_info{data_type} = $info->{TYPE_NAME}; - $column_info{size} = $info->{COLUMN_SIZE}; - $column_info{is_nullable} = $info->{NULLABLE} ? 1 : 0; - $column_info{default_value} = $info->{COLUMN_DEF}; - my $col_name = $info->{COLUMN_NAME}; - $col_name =~ s/^\"(.*)\"$/$1/; - - $result{$col_name} = \%column_info; - } - }; - return \%result if !$@; - } +sub _dbh_columns_info_for { + my ($self, $dbh, $table) = @_; + if ($dbh->can('column_info')) { my %result; - my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0"); - $sth->execute; - my @columns = @{$sth->{NAME_lc}}; - for my $i ( 0 .. $#columns ){ - my %column_info; - my $type_num = $sth->{TYPE}->[$i]; - my $type_name; - if(defined $type_num && $dbh->can('type_info')) { - my $type_info = $dbh->type_info($type_num); - $type_name = $type_info->{TYPE_NAME} if $type_info; + eval { + my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table); + my $sth = $dbh->column_info( undef,$schema, $tab, '%' ); + $sth->execute(); + while ( my $info = $sth->fetchrow_hashref() ){ + my %column_info; + $column_info{data_type} = $info->{TYPE_NAME}; + $column_info{size} = $info->{COLUMN_SIZE}; + $column_info{is_nullable} = $info->{NULLABLE} ? 1 : 0; + $column_info{default_value} = $info->{COLUMN_DEF}; + my $col_name = $info->{COLUMN_NAME}; + $col_name =~ s/^\"(.*)\"$/$1/; + + $result{$col_name} = \%column_info; } - $column_info{data_type} = $type_name ? $type_name : $type_num; - $column_info{size} = $sth->{PRECISION}->[$i]; - $column_info{is_nullable} = $sth->{NULLABLE}->[$i] ? 1 : 0; + }; + return \%result if !$@ && scalar keys %result; + } - if ($column_info{data_type} =~ m/^(.*?)\((.*?)\)$/) { - $column_info{data_type} = $1; - $column_info{size} = $2; - } + my %result; + my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0"); + $sth->execute; + my @columns = @{$sth->{NAME_lc}}; + for my $i ( 0 .. $#columns ){ + my %column_info; + my $type_num = $sth->{TYPE}->[$i]; + my $type_name; + if(defined $type_num && $dbh->can('type_info')) { + my $type_info = $dbh->type_info($type_num); + $type_name = $type_info->{TYPE_NAME} if $type_info; + } + $column_info{data_type} = $type_name ? $type_name : $type_num; + $column_info{size} = $sth->{PRECISION}->[$i]; + $column_info{is_nullable} = $sth->{NULLABLE}->[$i] ? 1 : 0; - $result{$columns[$i]} = \%column_info; + if ($column_info{data_type} =~ m/^(.*?)\((.*?)\)$/) { + $column_info{data_type} = $1; + $column_info{size} = $2; } - return \%result; - }); + $result{$columns[$i]} = \%column_info; + } + + return \%result; +} + +sub columns_info_for { + my ($self, $table) = @_; + $self->dbh_do($self->can('_dbh_columns_info_for'), $table); } =head2 last_insert_id @@ -1005,10 +1056,15 @@ Return the row id of the last insert. =cut +sub _dbh_last_insert_id { + my ($self, $dbh, $source, $col) = @_; + # XXX This is a SQLite-ism as a default... is there a DBI-generic way? + $dbh->func('last_insert_rowid'); +} + sub last_insert_id { - my ($self, $row) = @_; - - $self->dbh_do(sub { shift->func('last_insert_rowid') }); + my $self = shift; + $self->dbh_do($self->can('_dbh_last_insert_id'), @_); } =head2 sqlt_type @@ -1017,17 +1073,17 @@ Returns the database driver name. =cut -sub sqlt_type { shift->dbh_do(sub { shift->{Driver}->{Name} }) } +sub sqlt_type { shift->dbh->{Driver}->{Name} } =head2 create_ddl_dir (EXPERIMENTAL) =over 4 -=item Arguments: $schema \@databases, $version, $directory, $sqlt_args +=item Arguments: $schema \@databases, $version, $directory, $preversion, $sqlt_args =back -Creates an SQL file based on the Schema, for each of the specified +Creates a SQL file based on the Schema, for each of the specified database types, in the given directory. Note that this feature is currently EXPERIMENTAL and may not work correctly @@ -1037,7 +1093,7 @@ across all databases, or fully handle complex relationships. sub create_ddl_dir { - my ($self, $schema, $databases, $version, $dir, $sqltargs) = @_; + my ($self, $schema, $databases, $version, $dir, $preversion, $sqltargs) = @_; if(!$dir || !-d $dir) { @@ -1050,14 +1106,18 @@ sub create_ddl_dir $sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} }; eval "use SQL::Translator"; - $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; + $self->throw_exception("Can't create a ddl file without SQL::Translator: $@") if $@; - my $sqlt = SQL::Translator->new($sqltargs); + my $sqlt = SQL::Translator->new({ +# debug => 1, + add_drop_table => 1, + }); foreach my $db (@$databases) { $sqlt->reset(); $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); # $sqlt->parser_args({'DBIx::Class' => $schema); + $sqlt = $self->configure_sqlt($sqlt, $db); $sqlt->data($schema); $sqlt->producer($db); @@ -1065,30 +1125,119 @@ sub create_ddl_dir my $filename = $schema->ddl_filename($db, $dir, $version); if(-e $filename) { - $self->throw_exception("$filename already exists, skipping $db"); + warn("$filename already exists, skipping $db"); next; } - open($file, ">$filename") - or $self->throw_exception("Can't open $filename for writing ($!)"); + my $output = $sqlt->translate; -#use Data::Dumper; -# print join(":", keys %{$schema->source_registrations}); -# print Dumper($sqlt->schema); if(!$output) { - $self->throw_exception("Failed to translate to $db. (" . $sqlt->error . ")"); + warn("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); next; } + if(!open($file, ">$filename")) + { + $self->throw_exception("Can't open $filename for writing ($!)"); + next; + } print $file $output; close($file); + + if($preversion) + { + eval "use SQL::Translator::Diff"; + if($@) + { + warn("Can't diff versions without SQL::Translator::Diff: $@"); + next; + } + + my $prefilename = $schema->ddl_filename($db, $dir, $preversion); +# print "Previous version $prefilename\n"; + if(!-e $prefilename) + { + warn("No previous schema file found ($prefilename)"); + next; + } + #### We need to reparse the SQLite file we just wrote, so that + ## Diff doesnt get all confoosed, and Diff is *very* confused. + ## FIXME: rip Diff to pieces! +# my $target_schema = $sqlt->schema; +# unless ( $target_schema->name ) { +# $target_schema->name( $filename ); +# } + my @input; + push @input, {file => $prefilename, parser => $db}; + push @input, {file => $filename, parser => $db}; + my ( $source_schema, $source_db, $target_schema, $target_db ) = map { + my $file = $_->{'file'}; + my $parser = $_->{'parser'}; + + my $t = SQL::Translator->new; + $t->debug( 0 ); + $t->trace( 0 ); + $t->parser( $parser ) or die $t->error; + my $out = $t->translate( $file ) or die $t->error; + my $schema = $t->schema; + unless ( $schema->name ) { + $schema->name( $file ); + } + ($schema, $parser); + } @input; + + my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db, + $target_schema, $db, + {} + ); + my $difffile = $schema->ddl_filename($db, $dir, $version, $preversion); + print STDERR "Diff: $difffile: $db, $dir, $version, $preversion \n"; + if(-e $difffile) + { + warn("$difffile already exists, skipping"); + next; + } + if(!open $file, ">$difffile") + { + $self->throw_exception("Can't write to $difffile ($!)"); + next; + } + print $file $diff; + close($file); + } } +} +sub configure_sqlt() { + my $self = shift; + my $tr = shift; + my $db = shift || $self->sqlt_type; + if ($db eq 'PostgreSQL') { + $tr->quote_table_names(0); + $tr->quote_field_names(0); + } + return $tr; } =head2 deployment_statements -Create the statements for L and -L. +=over 4 + +=item Arguments: $schema, $type, $version, $directory, $sqlt_args + +=back + +Returns the statements used by L and L. +The database driver name is given by C<$type>, though the value from +L is used if it is not specified. + +C<$directory> is used to return statements from files in a previously created +L directory and is optional. The filenames are constructed +from L, the schema name and the C<$version>. + +If no C<$directory> is specified then the statements are constructed on the +fly using L and C<$version> is ignored. + +See L for a list of values for C<$sqlt_args>. =cut @@ -1099,6 +1248,17 @@ sub deployment_statements { $type ||= $self->sqlt_type; $version ||= $schema->VERSION || '1.x'; $dir ||= './'; + my $filename = $schema->ddl_filename($type, $dir, $version); + if(-f $filename) + { + my $file; + open($file, "<$filename") + or $self->throw_exception("Can't open $filename ($!)"); + my @rows = <$file>; + close($file); + return join('', @rows); + } + eval "use SQL::Translator"; if(!$@) { @@ -1111,34 +1271,14 @@ sub deployment_statements { return "SQL::Translator::Producer::${type}"->can('produce')->($tr); } - my $filename = $schema->ddl_filename($type, $dir, $version); - if(!-f $filename) - { -# $schema->create_ddl_dir([ $type ], $version, $dir, $sqltargs); - $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy"); - return; - } - my $file; - open($file, "<$filename") - or $self->throw_exception("Can't open $filename ($!)"); - my @rows = <$file>; - close($file); - - return join('', @rows); - -} - -=head2 deploy - -Sends the appropriate statements to create or modify tables to the -db. This would normally be called through -L. + $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy"); + return; -=cut +} sub deploy { - my ($self, $schema, $type, $sqltargs) = @_; - foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, { no_comments => 1, %{ $sqltargs || {} } } ) ) { + my ($self, $schema, $type, $sqltargs, $dir) = @_; + foreach my $statement ( $self->deployment_statements($schema, $type, undef, $dir, { no_comments => 1, %{ $sqltargs || {} } } ) ) { for ( split(";\n", $statement)) { next if($_ =~ /^--/); next if(!$_); @@ -1190,7 +1330,6 @@ sub build_datetime_parser { sub DESTROY { my $self = shift; return if !$self->_dbh; - $self->_verify_pid; $self->_dbh(undef); } @@ -1232,25 +1371,6 @@ For setting, this method is deprecated in favor of L. =back -=head1 ENVIRONMENT VARIABLES - -=head2 DBIC_TRACE - -If C is set then SQL trace information -is produced (as when the L method is set). - -If the value is of the form C<1=/path/name> then the trace output is -written to the file C. - -This environment variable is checked when the storage object is first -created (when you call connect on your schema). So, run-time changes -to this environment variable will not take effect unless you also -re-connect on your schema. - -=head2 DBIX_CLASS_STORAGE_DBI_DEBUG - -Old name for DBIC_TRACE - =head1 AUTHORS Matt S. Trout @@ -1262,4 +1382,3 @@ Andy Grundman You may distribute this code under the same terms as Perl itself. =cut -