X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=37a083d434761cb95ffe978e4d790f60025d3b1c;hb=382a78385abc2213ea09b3f6c6732694bf522e63;hp=343b657aa4a73b7382ecda1bb1785e18a5f7eba4;hpb=5642f1ec77b5215598728ae7e5188f3bb1b4b318;p=dbsrgits%2FDBIx-Class-Historic.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 343b657..37a083d 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -10,16 +10,69 @@ use SQL::Abstract::Limit; use DBIx::Class::Storage::DBI::Cursor; use DBIx::Class::Storage::Statistics; use IO::File; -use Carp::Clan qw/DBIx::Class/; + +__PACKAGE__->mk_group_accessors( + 'simple' => + qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid + cursor on_connect_do transaction_depth/ +); + BEGIN { package DBIC::SQL::Abstract; # Would merge upstream, but nate doesn't reply :( use base qw/SQL::Abstract::Limit/; +# This prevents the caching of $dbh in S::A::L, I believe +sub new { + my $self = shift->SUPER::new(@_); + + # If limit_dialect is a ref (like a $dbh), go ahead and replace + # it with what it resolves to: + $self->{limit_dialect} = $self->_find_syntax($self->{limit_dialect}) + if ref $self->{limit_dialect}; + + $self; +} + +sub _RowNumberOver { + my ($self, $sql, $order, $rows, $offset ) = @_; + + $offset += 1; + my $last = $rows + $offset; + my ( $order_by ) = $self->_order_by( $order ); + + $sql = <<""; +SELECT * FROM +( + SELECT Q1.*, ROW_NUMBER() OVER( ) AS ROW_NUM FROM ( + $sql + $order_by + ) Q1 +) Q2 +WHERE ROW_NUM BETWEEN $offset AND $last + + return $sql; +} + + +# While we're at it, this should make LIMIT queries more efficient, +# without digging into things too deeply +sub _find_syntax { + my ($self, $syntax) = @_; + my $dbhname = ref $syntax eq 'HASH' ? $syntax->{Driver}{Name} : ''; + if(ref($self) && $dbhname && $dbhname eq 'DB2') { + return 'RowNumberOver'; + } + + $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax); +} + sub select { my ($self, $table, $fields, $where, $order, @rest) = @_; $table = $self->_quote($table) unless ref($table); + local $self->{rownum_hack_count} = 1 + if (defined $rest[0] && $self->{limit_dialect} eq 'RowNum'); @rest = (-1) unless defined $rest[0]; die "LIMIT 0 Does Not Compute" if $rest[0] == 0; # and anyway, SQL::Abstract::Limit will cause a barf if we don't first @@ -67,7 +120,12 @@ sub _recurse_fields { return $$fields if $ref eq 'SCALAR'; if ($ref eq 'ARRAY') { - return join(', ', map { $self->_recurse_fields($_) } @$fields); + return join(', ', map { + $self->_recurse_fields($_) + .(exists $self->{rownum_hack_count} + ? ' AS col'.$self->{rownum_hack_count}++ + : '') + } @$fields); } elsif ($ref eq 'HASH') { foreach my $func (keys %$fields) { return $self->_sqlcase($func) @@ -92,10 +150,18 @@ sub _order_by { $ret .= $self->_sqlcase(' having ').$frag; } if (defined $_[0]->{order_by}) { - $ret .= $self->SUPER::_order_by($_[0]->{order_by}); + $ret .= $self->_order_by($_[0]->{order_by}); } - } elsif(ref $_[0] eq 'SCALAR') { + } elsif (ref $_[0] eq 'SCALAR') { $ret = $self->_sqlcase(' order by ').${ $_[0] }; + } elsif (ref $_[0] eq 'ARRAY' && @{$_[0]}) { + my @order = @{+shift}; + $ret = $self->_sqlcase(' order by ') + .join(', ', map { + my $r = $self->_order_by($_, @_); + $r =~ s/^ ?ORDER BY //i; + $r; + } @order); } else { $ret = $self->SUPER::_order_by(@_); } @@ -133,8 +199,9 @@ sub _recurse_from { # check whether a join type exists my $join_clause = ''; - if (ref($to) eq 'HASH' and exists($to->{-join_type})) { - $join_clause = ' '.uc($to->{-join_type}).' JOIN '; + my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to; + if (ref($to_jt) eq 'HASH' and exists($to_jt->{-join_type})) { + $join_clause = ' '.uc($to_jt->{-join_type}).' JOIN '; } else { $join_clause = ' JOIN '; } @@ -195,18 +262,6 @@ sub _quote { return $self->SUPER::_quote($label); } -sub _RowNum { - my $self = shift; - my $c; - $_[0] =~ s/SELECT (.*?) FROM/ - 'SELECT '.join(', ', map { $_.' AS col'.++$c } split(', ', $1)).' FROM'/e; - $self->SUPER::_RowNum(@_); -} - -# Accessor for setting limit dialect. This is useful -# for JDBC-bridge among others where the remote SQL-dialect cannot -# be determined by the name of the driver alone. -# sub limit_dialect { my $self = shift; $self->{limit_dialect} = shift if @_; @@ -227,112 +282,301 @@ sub name_sep { } # End of BEGIN block -use base qw/DBIx::Class/; +=head1 NAME + +DBIx::Class::Storage::DBI - DBI storage handler + +=head1 SYNOPSIS + +=head1 DESCRIPTION -__PACKAGE__->load_components(qw/AccessorGroup/); +This class represents the connection to an RDBMS via L. See +L for general information. This pod only +documents DBI-specific methods and behaviors. -__PACKAGE__->mk_group_accessors('simple' => - qw/_connect_info _dbh _sql_maker _conn_pid _conn_tid debug debugobj - cursor on_connect_do transaction_depth/); +=head1 METHODS + +=cut sub new { - my $new = bless({}, ref $_[0] || $_[0]); + my $new = shift->next::method(@_); + $new->cursor("DBIx::Class::Storage::DBI::Cursor"); $new->transaction_depth(0); + $new->_sql_maker_opts({}); - $new->debugobj(new DBIx::Class::Storage::Statistics()); + $new; +} - my $fh; - if (defined($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}) && - ($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} =~ /=(.+)$/)) { - $fh = IO::File->new($1, 'w') - or $new->throw_exception("Cannot open trace file $1"); - } else { - $fh = IO::File->new('>&STDERR'); +=head2 connect_info + +The arguments of C are always a single array reference. + +This is normally accessed via L, which +encapsulates its argument list in an arrayref before calling +C here. + +The arrayref can either contain the same set of arguments one would +normally pass to L, or a lone code reference which returns +a connected database handle. + +In either case, if the final argument in your connect_info happens +to be a hashref, C will look there for several +connection-specific options: + +=over 4 + +=item on_connect_do + +This can be set to an arrayref of literal sql statements, which will +be executed immediately after making the connection to the database +every time we [re-]connect. + +=item limit_dialect + +Sets the limit dialect. This is useful for JDBC-bridge among others +where the remote SQL-dialect cannot be determined by the name of the +driver alone. + +=item quote_char + +Specifies what characters to use to quote table and column names. If +you use this you will want to specify L as well. + +quote_char expects either a single character, in which case is it is placed +on either side of the table/column, or an arrayref of length 2 in which case the +table/column name is placed between the elements. + +For example under MySQL you'd use C '`'>, and user SQL Server you'd +use C [qw/[ ]/]>. + +=item name_sep + +This only needs to be used in conjunction with L, and is used to +specify the charecter that seperates elements (schemas, tables, columns) from +each other. In most cases this is simply a C<.>. + +=back + +These options can be mixed in with your other L connection attributes, +or placed in a seperate hashref after all other normal L connection +arguments. + +Every time C is invoked, any previous settings for +these options will be cleared before setting the new ones, regardless of +whether any options are specified in the new C. + +Important note: DBIC expects the returned database handle provided by +a subref argument to have RaiseError set on it. If it doesn't, things +might not work very well, YMMV. If you don't use a subref, DBIC will +force this setting for you anyways. Setting HandleError to anything +other than simple exception object wrapper might cause problems too. + +Examples: + + # Simple SQLite connection + ->connect_info([ 'dbi:SQLite:./foo.db' ]); + + # Connect via subref + ->connect_info([ sub { DBI->connect(...) } ]); + + # A bit more complicated + ->connect_info( + [ + 'dbi:Pg:dbname=foo', + 'postgres', + 'my_pg_password', + { AutoCommit => 0 }, + { quote_char => q{"}, name_sep => q{.} }, + ] + ); + + # Equivalent to the previous example + ->connect_info( + [ + 'dbi:Pg:dbname=foo', + 'postgres', + 'my_pg_password', + { AutoCommit => 0, quote_char => q{"}, name_sep => q{.} }, + ] + ); + + # Subref + DBIC-specific connection options + ->connect_info( + [ + sub { DBI->connect(...) }, + { + quote_char => q{`}, + name_sep => q{@}, + on_connect_do => ['SET search_path TO myschema,otherschema,public'], + }, + ] + ); + +=cut + +sub connect_info { + my ($self, $info_arg) = @_; + + return $self->_connect_info if !$info_arg; + + # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only + # the new set of options + $self->_sql_maker(undef); + $self->_sql_maker_opts({}); + + my $info = [ @$info_arg ]; # copy because we can alter it + my $last_info = $info->[-1]; + if(ref $last_info eq 'HASH') { + if(my $on_connect_do = delete $last_info->{on_connect_do}) { + $self->on_connect_do($on_connect_do); + } + for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { + if(my $opt_val = delete $last_info->{$sql_maker_opt}) { + $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val; + } + } + + # Get rid of any trailing empty hashref + pop(@$info) if !keys %$last_info; } - $new->debugobj->debugfh($fh); - $new->debug(1) if $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}; - return $new; -} -sub throw_exception { - my ($self, $msg) = @_; - croak($msg); + $self->_connect_info($info); } -=head1 NAME +=head2 on_connect_do -DBIx::Class::Storage::DBI - DBI storage handler +This method is deprecated in favor of setting via L. -=head1 SYNOPSIS +=head2 dbh_do -=head1 DESCRIPTION +Arguments: $subref, @extra_coderef_args? -This class represents the connection to the database +Execute the given subref with the underlying database handle as its +first argument, using the new exception-based connection management. -=head1 METHODS +Any additional arguments will be passed verbatim to the called subref +as arguments 2 and onwards. + +Example: + + my @stuff = $schema->storage->dbh_do( + sub { + my $dbh = shift; + my $cols = join(q{, }, @_); + shift->selectrow_array("SELECT $cols FROM foo") + }, + @column_list + ); =cut -=head2 connect_info +sub dbh_do { + my $self = shift; + my $coderef = shift; -Connection information arrayref. Can either be the same arguments -one would pass to DBI->connect, or a code-reference which returns -a connected database handle. In either case, there is an optional -final element in the arrayref, which can hold a hashref of -connection-specific Storage::DBI options. These include -C, and the sql_maker options C, -C, and C. Examples: + return $coderef->($self->_dbh, @_) if $self->{_in_txn_do}; - ->connect_info([ 'dbi:SQLite:./foo.db' ]); - ->connect_info(sub { DBI->connect(...) }); - ->connect_info([ 'dbi:Pg:dbname=foo', - 'postgres', - '', - { AutoCommit => 0 }, - { quote_char => q{`}, name_sep => q{@} }, - ]); + ref $coderef eq 'CODE' or $self->throw_exception + ('$coderef must be a CODE reference'); -=head2 on_connect_do + my @result; + my $want_array = wantarray; -Executes the sql statements given as a listref on every db connect. + eval { + $self->_verify_pid if $self->_dbh; + $self->_populate_dbh if !$self->_dbh; + if($want_array) { + @result = $coderef->($self->_dbh, @_); + } + elsif(defined $want_array) { + $result[0] = $coderef->($self->_dbh, @_); + } + else { + $coderef->($self->_dbh, @_); + } + }; -=head2 debug + my $exception = $@; + if(!$exception) { return $want_array ? @result : $result[0] } -Causes SQL trace information to be emitted on the C object. -(or C if C has not specifically been set). + $self->throw_exception($exception) if $self->connected; -=head2 debugfh + # We were not connected - reconnect and retry, but let any + # exception fall right through this time + $self->_populate_dbh; + $coderef->($self->_dbh, @_); +} -Set or retrieve the filehandle used for trace/debug output. This should be -an IO::Handle compatible ojbect (only the C method is used. Initially -set to be STDERR - although see information on the -L environment variable. +# This is basically a blend of dbh_do above and DBIx::Class::Storage::txn_do. +# It also informs dbh_do to bypass itself while under the direction of txn_do, +# via $self->{_in_txn_do} (this saves some redundant eval and errorcheck, etc) +sub txn_do { + my $self = shift; + my $coderef = shift; -=head2 debugobj + ref $coderef eq 'CODE' or $self->throw_exception + ('$coderef must be a CODE reference'); -Sets or retrieves the object used for metric collection. Defaults to an instance -of L that is campatible with the original -method of using a coderef as a callback. See the aforementioned Statistics -class for more information. + local $self->{_in_txn_do} = 1; -=head2 debugcb + my $tried = 0; -Sets a callback to be executed each time a statement is run; takes a sub -reference. Callback is executed as $sub->($op, $info) where $op is -SELECT/INSERT/UPDATE/DELETE and $info is what would normally be printed. + my @result; + my $want_array = wantarray; -See L for a better way. + START_TXN: eval { + $self->_verify_pid if $self->_dbh; + $self->_populate_dbh if !$self->_dbh; -=cut -sub debugcb { - my $self = shift(); + $self->txn_begin; + if($want_array) { + @result = $coderef->(@_); + } + elsif(defined $want_array) { + $result[0] = $coderef->(@_); + } + else { + $coderef->(@_); + } + $self->txn_commit; + }; - if($self->debugobj()->can('callback')) { - $self->debugobj()->callback(shift()); + my $exception = $@; + if(!$exception) { return $want_array ? @result : $result[0] } + + if($tried++ > 0 || $self->connected) { + eval { $self->txn_rollback }; + my $rollback_exception = $@; + if($rollback_exception) { + my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION"; + $self->throw_exception($exception) # propagate nested rollback + if $rollback_exception =~ /$exception_class/; + + $self->throw_exception( + "Transaction aborted: ${exception}. " + . "Rollback failed: ${rollback_exception}" + ); } + $self->throw_exception($exception) + } + + # We were not connected, and was first try - reconnect and retry + # XXX I know, gotos are evil. If you can find a better way + # to write this that doesn't duplicate a lot of code/structure, + # and behaves identically, feel free... + + $self->_populate_dbh; + goto START_TXN; } +=head2 disconnect + +Our C method also performs a rollback first if the +database is not in C mode. + +=cut + sub disconnect { my ($self) = @_; @@ -348,13 +592,10 @@ sub connected { if(my $dbh = $self->_dbh) { if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) { - $self->_sql_maker(undef); return $self->_dbh(undef); } - elsif($self->_conn_pid != $$) { - $self->_dbh->{InactiveDestroy} = 1; - $self->_sql_maker(undef); - return $self->_dbh(undef) + else { + $self->_verify_pid; } return ($dbh->FETCH('Active') && $dbh->ping); } @@ -362,6 +603,19 @@ sub connected { return 0; } +# handle pid changes correctly +# NOTE: assumes $self->_dbh is a valid $dbh +sub _verify_pid { + my ($self) = @_; + + return if $self->_conn_pid == $$; + + $self->_dbh->{InactiveDestroy} = 1; + $self->_dbh(undef); + + return; +} + sub ensure_connected { my ($self) = @_; @@ -386,7 +640,7 @@ sub dbh { sub _sql_maker_args { my ($self) = @_; - return ( limit_dialect => $self->dbh ); + return ( limit_dialect => $self->dbh, %{$self->_sql_maker_opts} ); } sub sql_maker { @@ -397,47 +651,19 @@ sub sql_maker { return $self->_sql_maker; } -sub connect_info { - my ($self, $info_arg) = @_; - - if($info_arg) { - my $info = [ @$info_arg ]; # copy because we can alter it - my $last_info = $info->[-1]; - if(ref $last_info eq 'HASH') { - my $used; - if(my $on_connect_do = $last_info->{on_connect_do}) { - $used = 1; - $self->on_connect_do($on_connect_do); - } - for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { - if(my $opt_val = $last_info->{$sql_maker_opt}) { - $used = 1; - $self->sql_maker->$sql_maker_opt($opt_val); - } - } - - # remove our options hashref if it was there, to avoid confusing - # DBI in the case the user didn't use all 4 DBI options, as in: - # [ 'dbi:SQLite:foo.db', { quote_char => q{`} } ] - pop(@$info) if $used; - } - - $self->_connect_info($info); - } - - $self->_connect_info; -} - sub _populate_dbh { my ($self) = @_; my @info = @{$self->_connect_info || []}; $self->_dbh($self->_connect(@info)); - my $driver = $self->_dbh->{Driver}->{Name}; - eval "require DBIx::Class::Storage::DBI::${driver}"; - unless ($@) { - bless $self, "DBIx::Class::Storage::DBI::${driver}"; - $self->_rebless() if $self->can('_rebless'); + + if(ref $self eq 'DBIx::Class::Storage::DBI') { + my $driver = $self->_dbh->{Driver}->{Name}; + if ($self->load_optional_class("DBIx::Class::Storage::DBI::${driver}")) { + bless $self, "DBIx::Class::Storage::DBI::${driver}"; + $self->_rebless() if $self->can('_rebless'); + } } + # if on-connect sql statements are given execute them foreach my $sql_statement (@{$self->on_connect_do || []}) { $self->debugobj->query_start($sql_statement) if $self->debug(); @@ -464,10 +690,12 @@ sub _connect { eval { if(ref $info[0] eq 'CODE') { - $dbh = &{$info[0]}; + $dbh = &{$info[0]} } else { - $dbh = DBI->connect(@info); + $dbh = DBI->connect(@info); + $dbh->{RaiseError} = 1; + $dbh->{PrintError} = 0; } }; @@ -480,37 +708,24 @@ sub _connect { $dbh; } -=head2 txn_begin - -Calls begin_work on the current dbh. - -See L for the txn_do() method, which allows for -an entire code block to be executed transactionally. - -=cut +sub __txn_begin { + my ($dbh, $self) = @_; + if ($dbh->{AutoCommit}) { + $self->debugobj->txn_begin() + if ($self->debug); + $dbh->begin_work; + } +} sub txn_begin { my $self = shift; - if ($self->{transaction_depth}++ == 0) { - my $dbh = $self->dbh; - if ($dbh->{AutoCommit}) { - $self->debugobj->txn_begin() - if ($self->debug); - $dbh->begin_work; - } - } + $self->dbh_do(\&__txn_begin, $self) + if $self->{transaction_depth}++ == 0; } -=head2 txn_commit - -Issues a commit against the current dbh. - -=cut - -sub txn_commit { - my $self = shift; +sub __txn_commit { + my ($dbh, $self) = @_; if ($self->{transaction_depth} == 0) { - my $dbh = $self->dbh; unless ($dbh->{AutoCommit}) { $self->debugobj->txn_commit() if ($self->debug); @@ -521,43 +736,40 @@ sub txn_commit { if (--$self->{transaction_depth} == 0) { $self->debugobj->txn_commit() if ($self->debug); - $self->dbh->commit; + $dbh->commit; } } } -=head2 txn_rollback - -Issues a rollback against the current dbh. A nested rollback will -throw a L exception, -which allows the rollback to propagate to the outermost transaction. - -=cut - -sub txn_rollback { +sub txn_commit { my $self = shift; + $self->dbh_do(\&__txn_commit, $self); +} - eval { - if ($self->{transaction_depth} == 0) { - my $dbh = $self->dbh; - unless ($dbh->{AutoCommit}) { - $self->debugobj->txn_rollback() - if ($self->debug); - $dbh->rollback; - } +sub __txn_rollback { + my ($dbh, $self) = @_; + if ($self->{transaction_depth} == 0) { + unless ($dbh->{AutoCommit}) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; + } + } + else { + if (--$self->{transaction_depth} == 0) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; } else { - if (--$self->{transaction_depth} == 0) { - $self->debugobj->txn_rollback() - if ($self->debug); - $self->dbh->rollback; - } - else { - die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new; - } + die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new; } - }; + } +} +sub txn_rollback { + my $self = shift; + eval { $self->dbh_do(\&__txn_rollback, $self) }; if ($@) { my $error = $@; my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION"; @@ -578,9 +790,10 @@ sub _execute { my $sth = eval { $self->sth($sql,$op) }; if (!$sth || $@) { - $self->throw_exception('no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql"); + $self->throw_exception( + 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql" + ); } - @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args my $rv; if ($sth) { @@ -649,39 +862,38 @@ sub select { return $self->cursor->new($self, \@_, $attrs); } -# Need to call finish() to work round broken DBDs - sub select_single { my $self = shift; my ($rv, $sth, @bind) = $self->_select(@_); my @row = $sth->fetchrow_array; + # Need to call finish() to work round broken DBDs $sth->finish(); return @row; } -sub sth { - my ($self, $sql) = @_; - # 3 is the if_active parameter which avoids active sth re-use - return $self->dbh->prepare_cached($sql, {}, 3); -} - -=head2 columns_info_for +=head2 sth -Returns database type info for a given table columns. +Returns a L sth (statement handle) for the supplied SQL. =cut -sub columns_info_for { - my ($self, $table) = @_; +sub __sth { + my ($dbh, $sql) = @_; + # 3 is the if_active parameter which avoids active sth re-use + $dbh->prepare_cached($sql, {}, 3); +} + +sub sth { + my ($self, $sql) = @_; + $self->dbh_do(\&__sth, $sql); +} + - my $dbh = $self->dbh; +sub __columns_info_for { + my ($dbh, $self, $table) = @_; if ($dbh->can('column_info')) { my %result; - my $old_raise_err = $dbh->{RaiseError}; - my $old_print_err = $dbh->{PrintError}; - $dbh->{RaiseError} = 1; - $dbh->{PrintError} = 0; eval { my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table); my $sth = $dbh->column_info( undef,$schema, $tab, '%' ); @@ -698,9 +910,7 @@ sub columns_info_for { $result{$col_name} = \%column_info; } }; - $dbh->{RaiseError} = $old_raise_err; - $dbh->{PrintError} = $old_print_err; - return \%result if !$@; + return \%result if !$@ && scalar keys %result; } my %result; @@ -730,14 +940,46 @@ sub columns_info_for { return \%result; } +sub columns_info_for { + my ($self, $table) = @_; + $self->dbh_do(\&__columns_info_for, $self, $table); +} + +=head2 last_insert_id + +Return the row id of the last insert. + +=cut + sub last_insert_id { my ($self, $row) = @_; - return $self->dbh->func('last_insert_rowid'); - + $self->dbh_do(sub { shift->func('last_insert_rowid') }); } -sub sqlt_type { shift->dbh->{Driver}->{Name} } +=head2 sqlt_type + +Returns the database driver name. + +=cut + +sub sqlt_type { shift->dbh_do(sub { shift->{Driver}->{Name} }) } + +=head2 create_ddl_dir (EXPERIMENTAL) + +=over 4 + +=item Arguments: $schema \@databases, $version, $directory, $sqlt_args + +=back + +Creates an SQL file based on the Schema, for each of the specified +database types, in the given directory. + +Note that this feature is currently EXPERIMENTAL and may not work correctly +across all databases, or fully handle complex relationships. + +=cut sub create_ddl_dir { @@ -751,14 +993,12 @@ sub create_ddl_dir $databases ||= ['MySQL', 'SQLite', 'PostgreSQL']; $databases = [ $databases ] if(ref($databases) ne 'ARRAY'); $version ||= $schema->VERSION || '1.x'; + $sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} }; eval "use SQL::Translator"; $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; - my $sqlt = SQL::Translator->new({ -# debug => 1, - add_drop_table => 1, - }); + my $sqlt = SQL::Translator->new($sqltargs); foreach my $db (@$databases) { $sqlt->reset(); @@ -791,8 +1031,17 @@ sub create_ddl_dir } +=head2 deployment_statements + +Create the statements for L and +L. + +=cut + sub deployment_statements { my ($self, $schema, $type, $version, $dir, $sqltargs) = @_; + # Need to be connected to get the correct sqlt_type + $self->ensure_connected() unless $type; $type ||= $self->sqlt_type; $version ||= $schema->VERSION || '1.x'; $dir ||= './'; @@ -826,28 +1075,48 @@ sub deployment_statements { } sub deploy { - my ($self, $schema, $type, $sqltargs) = @_; - foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, $sqltargs) ) { + my ($self, $schema, $type, $sqltargs, $dir) = @_; + foreach my $statement ( $self->deployment_statements($schema, $type, undef, $dir, { no_comments => 1, %{ $sqltargs || {} } } ) ) { for ( split(";\n", $statement)) { next if($_ =~ /^--/); next if(!$_); # next if($_ =~ /^DROP/m); next if($_ =~ /^BEGIN TRANSACTION/m); next if($_ =~ /^COMMIT/m); - $self->debugobj->query_begin($_) if $self->debug; - $self->dbh->do($_) or warn "SQL was:\n $_"; + next if $_ =~ /^\s+$/; # skip whitespace only + $self->debugobj->query_start($_) if $self->debug; + $self->dbh->do($_) or warn "SQL was:\n $_"; # XXX exceptions? $self->debugobj->query_end($_) if $self->debug; } } } +=head2 datetime_parser + +Returns the datetime parser class + +=cut + sub datetime_parser { my $self = shift; return $self->{datetime_parser} ||= $self->build_datetime_parser(@_); } +=head2 datetime_parser_type + +Defines (returns) the datetime parser class - currently hardwired to +L + +=cut + sub datetime_parser_type { "DateTime::Format::MySQL"; } +=head2 build_datetime_parser + +See L + +=cut + sub build_datetime_parser { my $self = shift; my $type = $self->datetime_parser_type(@_); @@ -856,24 +1125,49 @@ sub build_datetime_parser { return $type; } -sub DESTROY { shift->disconnect } +sub DESTROY { + my $self = shift; + return if !$self->_dbh; + $self->_verify_pid; + $self->_dbh(undef); +} 1; -=head1 ENVIRONMENT VARIABLES +=head1 SQL METHODS + +The module defines a set of methods within the DBIC::SQL::Abstract +namespace. These build on L to provide the +SQL query functions. + +The following methods are extended:- + +=over 4 + +=item delete + +=item insert + +=item select + +=item update + +=item limit_dialect + +See L for details. +For setting, this method is deprecated in favor of L. + +=item quote_char -=head2 DBIX_CLASS_STORAGE_DBI_DEBUG +See L for details. +For setting, this method is deprecated in favor of L. -If C is set then SQL trace information -is produced (as when the L method is set). +=item name_sep -If the value is of the form C<1=/path/name> then the trace output is -written to the file C. +See L for details. +For setting, this method is deprecated in favor of L. -This environment variable is checked when the storage object is first -created (when you call connect on your schema). So, run-time changes -to this environment variable will not take effect unless you also -re-connect on your schema. +=back =head1 AUTHORS