use DBIx::Class::Storage::Statistics;
use IO::File;
use Carp::Clan qw/DBIx::Class/;
+
+__PACKAGE__->mk_group_accessors(
+ 'simple' =>
+ qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid
+ cursor on_connect_do transaction_depth/
+);
+
BEGIN {
package DBIC::SQL::Abstract; # Would merge upstream, but nate doesn't reply :(
use base qw/SQL::Abstract::Limit/;
+# This prevents the caching of $dbh in S::A::L, I believe
+sub new {
+ my $self = shift->SUPER::new(@_);
+
+ # If limit_dialect is a ref (like a $dbh), go ahead and replace
+ # it with what it resolves to:
+ $self->{limit_dialect} = $self->_find_syntax($self->{limit_dialect})
+ if ref $self->{limit_dialect};
+
+ $self;
+}
+
+# While we're at it, this should make LIMIT queries more efficient,
+# without digging into things too deeply
+sub _find_syntax {
+ my ($self, $syntax) = @_;
+ $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax);
+}
+
sub select {
my ($self, $table, $fields, $where, $order, @rest) = @_;
$table = $self->_quote($table) unless ref($table);
+ local $self->{rownum_hack_count} = 1
+ if (defined $rest[0] && $self->{limit_dialect} eq 'RowNum');
@rest = (-1) unless defined $rest[0];
die "LIMIT 0 Does Not Compute" if $rest[0] == 0;
# and anyway, SQL::Abstract::Limit will cause a barf if we don't first
return $$fields if $ref eq 'SCALAR';
if ($ref eq 'ARRAY') {
- return join(', ', map { $self->_recurse_fields($_) } @$fields);
+ return join(', ', map {
+ $self->_recurse_fields($_)
+ .(exists $self->{rownum_hack_count}
+ ? ' AS col'.$self->{rownum_hack_count}++
+ : '')
+ } @$fields);
} elsif ($ref eq 'HASH') {
foreach my $func (keys %$fields) {
return $self->_sqlcase($func)
$ret .= $self->_sqlcase(' having ').$frag;
}
if (defined $_[0]->{order_by}) {
- $ret .= $self->SUPER::_order_by($_[0]->{order_by});
+ $ret .= $self->_order_by($_[0]->{order_by});
}
- } elsif(ref $_[0] eq 'SCALAR') {
+ } elsif (ref $_[0] eq 'SCALAR') {
$ret = $self->_sqlcase(' order by ').${ $_[0] };
+ } elsif (ref $_[0] eq 'ARRAY' && @{$_[0]}) {
+ my @order = @{+shift};
+ $ret = $self->_sqlcase(' order by ')
+ .join(', ', map {
+ my $r = $self->_order_by($_, @_);
+ $r =~ s/^ ?ORDER BY //i;
+ $r;
+ } @order);
} else {
$ret = $self->SUPER::_order_by(@_);
}
return $self->SUPER::_quote($label);
}
-sub _RowNum {
- my $self = shift;
- my $c;
- $_[0] =~ s/SELECT (.*?) FROM/
- 'SELECT '.join(', ', map { $_.' AS col'.++$c } split(', ', $1)).' FROM'/e;
- $self->SUPER::_RowNum(@_);
-}
-
sub limit_dialect {
my $self = shift;
$self->{limit_dialect} = shift if @_;
} # End of BEGIN block
-use base qw/DBIx::Class/;
-
-__PACKAGE__->load_components(qw/AccessorGroup/);
-
-__PACKAGE__->mk_group_accessors('simple' =>
- qw/_connect_info _dbh _sql_maker _conn_pid _conn_tid debug debugobj
- cursor on_connect_do transaction_depth/);
-
=head1 NAME
DBIx::Class::Storage::DBI - DBI storage handler
=head1 DESCRIPTION
-This class represents the connection to the database
+This class represents the connection to an RDBMS via L<DBI>. See
+L<DBIx::Class::Storage> for general information. This pod only
+documents DBI-specific methods and behaviors.
=head1 METHODS
-=head2 new
-
=cut
sub new {
- my $new = bless({}, ref $_[0] || $_[0]);
+ my $new = shift->next::method(@_);
+
$new->cursor("DBIx::Class::Storage::DBI::Cursor");
$new->transaction_depth(0);
+ $new->_sql_maker_opts({});
- $new->debugobj(new DBIx::Class::Storage::Statistics());
-
- my $fh;
- if (defined($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}) &&
- ($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} =~ /=(.+)$/)) {
- $fh = IO::File->new($1, 'w')
- or $new->throw_exception("Cannot open trace file $1");
- } else {
- $fh = IO::File->new('>&STDERR');
- }
- $new->debugfh($fh);
- $new->debug(1) if $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG};
- return $new;
-}
-
-=head2 throw_exception
-
-Throws an exception - croaks.
-
-=cut
-
-sub throw_exception {
- my ($self, $msg) = @_;
- croak($msg);
+ $new;
}
=head2 connect_info
normally pass to L<DBI/connect>, or a lone code reference which returns
a connected database handle.
-In either case, there is an optional final element within the arrayref
-which can hold a hashref of connection-specific Storage::DBI options.
-These include C<on_connect_do>, and the sql_maker options
-C<limit_dialect>, C<quote_char>, and C<name_sep>. Examples:
+In either case, if the final argument in your connect_info happens
+to be a hashref, C<connect_info> will look there for several
+connection-specific options:
+
+=over 4
+
+=item on_connect_do
+
+This can be set to an arrayref of literal sql statements, which will
+be executed immediately after making the connection to the database
+every time we [re-]connect.
+
+=item limit_dialect
+
+Sets the limit dialect. This is useful for JDBC-bridge among others
+where the remote SQL-dialect cannot be determined by the name of the
+driver alone.
+
+=item quote_char
+
+Specifies what characters to use to quote table and column names. If
+you use this you will want to specify L<name_sep> as well.
+
+quote_char expects either a single character, in which case is it is placed
+on either side of the table/column, or an arrayref of length 2 in which case the
+table/column name is placed between the elements.
+
+For example under MySQL you'd use C<quote_char =E<gt> '`'>, and user SQL Server you'd
+use C<quote_char =E<gt> [qw/[ ]/]>.
+
+=item name_sep
+
+This only needs to be used in conjunction with L<quote_char>, and is used to
+specify the charecter that seperates elements (schemas, tables, columns) from
+each other. In most cases this is simply a C<.>.
+
+=back
+
+These options can be mixed in with your other L<DBI> connection attributes,
+or placed in a seperate hashref after all other normal L<DBI> connection
+arguments.
+
+Every time C<connect_info> is invoked, any previous settings for
+these options will be cleared before setting the new ones, regardless of
+whether any options are specified in the new C<connect_info>.
+
+Important note: DBIC expects the returned database handle provided by
+a subref argument to have RaiseError set on it. If it doesn't, things
+might not work very well, YMMV. If you don't use a subref, DBIC will
+force this setting for you anyways. Setting HandleError to anything
+other than simple exception object wrapper might cause problems too.
+
+Examples:
+ # Simple SQLite connection
->connect_info([ 'dbi:SQLite:./foo.db' ]);
+ # Connect via subref
->connect_info([ sub { DBI->connect(...) } ]);
+ # A bit more complicated
->connect_info(
[
'dbi:Pg:dbname=foo',
'postgres',
'my_pg_password',
{ AutoCommit => 0 },
- { quote_char => q{`}, name_sep => q{@} },
+ { quote_char => q{"}, name_sep => q{.} },
]
);
+ # Equivalent to the previous example
->connect_info(
[
- sub { DBI->connect(...) },
- { quote_char => q{`}, name_sep => q{@} },
+ 'dbi:Pg:dbname=foo',
+ 'postgres',
+ 'my_pg_password',
+ { AutoCommit => 0, quote_char => q{"}, name_sep => q{.} },
]
);
-=head2 on_connect_do
+ # Subref + DBIC-specific connection options
+ ->connect_info(
+ [
+ sub { DBI->connect(...) },
+ {
+ quote_char => q{`},
+ name_sep => q{@},
+ on_connect_do => ['SET search_path TO myschema,otherschema,public'],
+ },
+ ]
+ );
- $schema->storage->on_connect_do(['PRAGMA synchronous = OFF']);
+=cut
-Call this after C<< $schema->connect >> to have the sql statements
-given executed on every db connect.
+sub connect_info {
+ my ($self, $info_arg) = @_;
-This option can also be set via L</connect_info>.
+ return $self->_connect_info if !$info_arg;
-=head2 debug
+ # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only
+ # the new set of options
+ $self->_sql_maker(undef);
+ $self->_sql_maker_opts({});
-Causes SQL trace information to be emitted on the C<debugobj> object.
-(or C<STDERR> if C<debugobj> has not specifically been set).
+ my $info = [ @$info_arg ]; # copy because we can alter it
+ my $last_info = $info->[-1];
+ if(ref $last_info eq 'HASH') {
+ if(my $on_connect_do = delete $last_info->{on_connect_do}) {
+ $self->on_connect_do($on_connect_do);
+ }
+ for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) {
+ if(my $opt_val = delete $last_info->{$sql_maker_opt}) {
+ $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val;
+ }
+ }
-=head2 debugfh
+ # Get rid of any trailing empty hashref
+ pop(@$info) if !keys %$last_info;
+ }
-Set or retrieve the filehandle used for trace/debug output. This should be
-an IO::Handle compatible ojbect (only the C<print> method is used. Initially
-set to be STDERR - although see information on the
-L<DBIX_CLASS_STORAGE_DBI_DEBUG> environment variable.
+ $self->_connect_info($info);
+}
-=cut
+=head2 on_connect_do
-sub debugfh {
- my $self = shift;
+This method is deprecated in favor of setting via L</connect_info>.
- if ($self->debugobj->can('debugfh')) {
- return $self->debugobj->debugfh(@_);
- }
-}
+=head2 dbh_do
-=head2 debugobj
+Arguments: $subref, @extra_coderef_args?
-Sets or retrieves the object used for metric collection. Defaults to an instance
-of L<DBIx::Class::Storage::Statistics> that is campatible with the original
-method of using a coderef as a callback. See the aforementioned Statistics
-class for more information.
+Execute the given subref with the underlying database handle as its
+first argument, using the new exception-based connection management.
-=head2 debugcb
+Any additional arguments will be passed verbatim to the called subref
+as arguments 2 and onwards.
-Sets a callback to be executed each time a statement is run; takes a sub
-reference. Callback is executed as $sub->($op, $info) where $op is
-SELECT/INSERT/UPDATE/DELETE and $info is what would normally be printed.
+Example:
-See L<debugobj> for a better way.
+ my @stuff = $schema->storage->dbh_do(
+ sub {
+ my $dbh = shift;
+ my $cols = join(q{, }, @_);
+ shift->selectrow_array("SELECT $cols FROM foo")
+ },
+ @column_list
+ );
=cut
-sub debugcb {
- my $self = shift;
+sub dbh_do {
+ my $self = shift;
+ my $todo = shift;
- if ($self->debugobj->can('callback')) {
- return $self->debugobj->callback(@_);
+ my @result;
+ my $want_array = wantarray;
+
+ eval {
+ $self->_verify_pid if $self->_dbh;
+ $self->_populate_dbh if !$self->_dbh;
+ my $dbh = $self->_dbh;
+ if($want_array) {
+ @result = $todo->($dbh, @_);
}
+ elsif(defined $want_array) {
+ $result[0] = $todo->($dbh, @_);
+ }
+ else {
+ $todo->($dbh, @_);
+ }
+ };
+
+ if($@) {
+ my $exception = $@;
+ $self->connected
+ ? $self->throw_exception($exception)
+ : $self->_populate_dbh;
+
+ my $dbh = $self->_dbh;
+ return $todo->($dbh, @_);
+ }
+
+ return $want_array ? @result : $result[0];
}
=head2 disconnect
-Disconnect the L<DBI> handle, performing a rollback first if the
+Our C<disconnect> method also performs a rollback first if the
database is not in C<AutoCommit> mode.
=cut
}
}
-=head2 connected
-
-Check if the L<DBI> handle is connected. Returns true if the handle
-is connected.
-
-=cut
-
-sub connected { my ($self) = @_;
+sub connected {
+ my ($self) = @_;
if(my $dbh = $self->_dbh) {
if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) {
- $self->_sql_maker(undef);
return $self->_dbh(undef);
}
- elsif($self->_conn_pid != $$) {
- $self->_dbh->{InactiveDestroy} = 1;
- $self->_sql_maker(undef);
- return $self->_dbh(undef)
+ else {
+ $self->_verify_pid;
}
return ($dbh->FETCH('Active') && $dbh->ping);
}
return 0;
}
-=head2 ensure_connected
+# handle pid changes correctly
+# NOTE: assumes $self->_dbh is a valid $dbh
+sub _verify_pid {
+ my ($self) = @_;
-Check whether the database handle is connected - if not then make a
-connection.
+ return if $self->_conn_pid == $$;
-=cut
+ $self->_dbh->{InactiveDestroy} = 1;
+ $self->_dbh(undef);
+
+ return;
+}
sub ensure_connected {
my ($self) = @_;
sub _sql_maker_args {
my ($self) = @_;
- return ( limit_dialect => $self->dbh );
+ return ( limit_dialect => $self->dbh, %{$self->_sql_maker_opts} );
}
-=head2 sql_maker
-
-Returns a C<sql_maker> object - normally an object of class
-C<DBIC::SQL::Abstract>.
-
-=cut
-
sub sql_maker {
my ($self) = @_;
unless ($self->_sql_maker) {
return $self->_sql_maker;
}
-sub connect_info {
- my ($self, $info_arg) = @_;
-
- if($info_arg) {
- my %sql_maker_opts;
- my $info = [ @$info_arg ]; # copy because we can alter it
- my $last_info = $info->[-1];
- if(ref $last_info eq 'HASH') {
- my $used;
- if(my $on_connect_do = $last_info->{on_connect_do}) {
- $used = 1;
- $self->on_connect_do($on_connect_do);
- }
- for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) {
- if(my $opt_val = $last_info->{$sql_maker_opt}) {
- $used = 1;
- $sql_maker_opts{$sql_maker_opt} = $opt_val;
- }
- }
-
- # remove our options hashref if it was there, to avoid confusing
- # DBI in the case the user didn't use all 4 DBI options, as in:
- # [ 'dbi:SQLite:foo.db', { quote_char => q{`} } ]
- pop(@$info) if $used;
- }
-
- $self->_connect_info($info);
- $self->sql_maker->$_($sql_maker_opts{$_}) for(keys %sql_maker_opts);
- }
-
- $self->_connect_info;
-}
-
sub _populate_dbh {
my ($self) = @_;
my @info = @{$self->_connect_info || []};
}
eval {
- $dbh = ref $info[0] eq 'CODE'
- ? &{$info[0]}
- : DBI->connect(@info);
+ if(ref $info[0] eq 'CODE') {
+ $dbh = &{$info[0]}
+ }
+ else {
+ $dbh = DBI->connect(@info);
+ $dbh->{RaiseError} = 1;
+ $dbh->{PrintError} = 0;
+ }
};
$DBI::connect_via = $old_connect_via if $old_connect_via;
$dbh;
}
-=head2 txn_begin
-
-Calls begin_work on the current dbh.
-
-See L<DBIx::Class::Schema> for the txn_do() method, which allows for
-an entire code block to be executed transactionally.
-
-=cut
-
sub txn_begin {
my $self = shift;
if ($self->{transaction_depth}++ == 0) {
- my $dbh = $self->dbh;
- if ($dbh->{AutoCommit}) {
- $self->debugobj->txn_begin()
- if ($self->debug);
- $dbh->begin_work;
- }
+ $self->dbh_do(sub {
+ my $dbh = shift;
+ if ($dbh->{AutoCommit}) {
+ $self->debugobj->txn_begin()
+ if ($self->debug);
+ $dbh->begin_work;
+ }
+ });
}
}
-=head2 txn_commit
-
-Issues a commit against the current dbh.
-
-=cut
-
sub txn_commit {
my $self = shift;
- my $dbh = $self->dbh;
- if ($self->{transaction_depth} == 0) {
- unless ($dbh->{AutoCommit}) {
- $self->debugobj->txn_commit()
- if ($self->debug);
- $dbh->commit;
- }
- }
- else {
- if (--$self->{transaction_depth} == 0) {
- $self->debugobj->txn_commit()
- if ($self->debug);
- $dbh->commit;
- }
- }
-}
-
-=head2 txn_rollback
-
-Issues a rollback against the current dbh. A nested rollback will
-throw a L<DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION> exception,
-which allows the rollback to propagate to the outermost transaction.
-
-=cut
-
-sub txn_rollback {
- my $self = shift;
-
- eval {
- my $dbh = $self->dbh;
+ $self->dbh_do(sub {
+ my $dbh = shift;
if ($self->{transaction_depth} == 0) {
unless ($dbh->{AutoCommit}) {
- $self->debugobj->txn_rollback()
+ $self->debugobj->txn_commit()
if ($self->debug);
- $dbh->rollback;
+ $dbh->commit;
}
}
else {
if (--$self->{transaction_depth} == 0) {
- $self->debugobj->txn_rollback()
+ $self->debugobj->txn_commit()
if ($self->debug);
- $dbh->rollback;
+ $dbh->commit;
+ }
+ }
+ });
+}
+
+sub txn_rollback {
+ my $self = shift;
+
+ eval {
+ $self->dbh_do(sub {
+ my $dbh = shift;
+ if ($self->{transaction_depth} == 0) {
+ unless ($dbh->{AutoCommit}) {
+ $self->debugobj->txn_rollback()
+ if ($self->debug);
+ $dbh->rollback;
+ }
}
else {
- die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
+ if (--$self->{transaction_depth} == 0) {
+ $self->debugobj->txn_rollback()
+ if ($self->debug);
+ $dbh->rollback;
+ }
+ else {
+ die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
+ }
}
- }
+ });
};
if ($@) {
return $self->_execute(@args);
}
-=head2 select
-
-Handle a SQL select statement.
-
-=cut
-
sub select {
my $self = shift;
my ($ident, $select, $condition, $attrs) = @_;
return $self->cursor->new($self, \@_, $attrs);
}
-=head2 select_single
-
-Performs a select, fetch and return of data - handles a single row
-only.
-
-=cut
-
-# Need to call finish() to work round broken DBDs
-
sub select_single {
my $self = shift;
my ($rv, $sth, @bind) = $self->_select(@_);
my @row = $sth->fetchrow_array;
+ # Need to call finish() to work round broken DBDs
$sth->finish();
return @row;
}
sub sth {
my ($self, $sql) = @_;
# 3 is the if_active parameter which avoids active sth re-use
- return $self->dbh->prepare_cached($sql, {}, 3);
+ return $self->dbh_do(sub { shift->prepare_cached($sql, {}, 3) });
}
-=head2 columns_info_for
-
-Returns database type info for a given table columns.
-
-=cut
-
sub columns_info_for {
my ($self, $table) = @_;
- my $dbh = $self->dbh;
+ $self->dbh_do(sub {
+ my $dbh = shift;
+
+ if ($dbh->can('column_info')) {
+ my %result;
+ eval {
+ my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table);
+ my $sth = $dbh->column_info( undef,$schema, $tab, '%' );
+ $sth->execute();
+ while ( my $info = $sth->fetchrow_hashref() ){
+ my %column_info;
+ $column_info{data_type} = $info->{TYPE_NAME};
+ $column_info{size} = $info->{COLUMN_SIZE};
+ $column_info{is_nullable} = $info->{NULLABLE} ? 1 : 0;
+ $column_info{default_value} = $info->{COLUMN_DEF};
+ my $col_name = $info->{COLUMN_NAME};
+ $col_name =~ s/^\"(.*)\"$/$1/;
+
+ $result{$col_name} = \%column_info;
+ }
+ };
+ return \%result if !$@;
+ }
- if ($dbh->can('column_info')) {
my %result;
- my $old_raise_err = $dbh->{RaiseError};
- my $old_print_err = $dbh->{PrintError};
- $dbh->{RaiseError} = 1;
- $dbh->{PrintError} = 0;
- eval {
- my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table);
- my $sth = $dbh->column_info( undef,$schema, $tab, '%' );
- $sth->execute();
- while ( my $info = $sth->fetchrow_hashref() ){
- my %column_info;
- $column_info{data_type} = $info->{TYPE_NAME};
- $column_info{size} = $info->{COLUMN_SIZE};
- $column_info{is_nullable} = $info->{NULLABLE} ? 1 : 0;
- $column_info{default_value} = $info->{COLUMN_DEF};
- my $col_name = $info->{COLUMN_NAME};
- $col_name =~ s/^\"(.*)\"$/$1/;
-
- $result{$col_name} = \%column_info;
+ my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0");
+ $sth->execute;
+ my @columns = @{$sth->{NAME_lc}};
+ for my $i ( 0 .. $#columns ){
+ my %column_info;
+ my $type_num = $sth->{TYPE}->[$i];
+ my $type_name;
+ if(defined $type_num && $dbh->can('type_info')) {
+ my $type_info = $dbh->type_info($type_num);
+ $type_name = $type_info->{TYPE_NAME} if $type_info;
}
- };
- $dbh->{RaiseError} = $old_raise_err;
- $dbh->{PrintError} = $old_print_err;
- return \%result if !$@;
- }
+ $column_info{data_type} = $type_name ? $type_name : $type_num;
+ $column_info{size} = $sth->{PRECISION}->[$i];
+ $column_info{is_nullable} = $sth->{NULLABLE}->[$i] ? 1 : 0;
- my %result;
- my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0");
- $sth->execute;
- my @columns = @{$sth->{NAME_lc}};
- for my $i ( 0 .. $#columns ){
- my %column_info;
- my $type_num = $sth->{TYPE}->[$i];
- my $type_name;
- if(defined $type_num && $dbh->can('type_info')) {
- my $type_info = $dbh->type_info($type_num);
- $type_name = $type_info->{TYPE_NAME} if $type_info;
- }
- $column_info{data_type} = $type_name ? $type_name : $type_num;
- $column_info{size} = $sth->{PRECISION}->[$i];
- $column_info{is_nullable} = $sth->{NULLABLE}->[$i] ? 1 : 0;
+ if ($column_info{data_type} =~ m/^(.*?)\((.*?)\)$/) {
+ $column_info{data_type} = $1;
+ $column_info{size} = $2;
+ }
- if ($column_info{data_type} =~ m/^(.*?)\((.*?)\)$/) {
- $column_info{data_type} = $1;
- $column_info{size} = $2;
+ $result{$columns[$i]} = \%column_info;
}
- $result{$columns[$i]} = \%column_info;
- }
-
- return \%result;
+ return \%result;
+ });
}
=head2 last_insert_id
sub last_insert_id {
my ($self, $row) = @_;
- return $self->dbh->func('last_insert_rowid');
-
+ $self->dbh_do(sub { shift->func('last_insert_rowid') });
}
=head2 sqlt_type
=cut
-sub sqlt_type { shift->dbh->{Driver}->{Name} }
+sub sqlt_type { shift->dbh_do(sub { shift->{Driver}->{Name} }) }
=head2 create_ddl_dir (EXPERIMENTAL)
$databases ||= ['MySQL', 'SQLite', 'PostgreSQL'];
$databases = [ $databases ] if(ref($databases) ne 'ARRAY');
$version ||= $schema->VERSION || '1.x';
+ $sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} };
eval "use SQL::Translator";
$self->throw_exception("Can't deploy without SQL::Translator: $@") if $@;
- my $sqlt = SQL::Translator->new({
-# debug => 1,
- add_drop_table => 1,
- });
+ my $sqlt = SQL::Translator->new($sqltargs);
foreach my $db (@$databases)
{
$sqlt->reset();
}
-=head2 deploy
-
-Sends the appropriate statements to create or modify tables to the
-db. This would normally be called through
-L<DBIx::Class::Schema/deploy>.
-
-=cut
-
sub deploy {
my ($self, $schema, $type, $sqltargs) = @_;
- foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, $sqltargs) ) {
+ foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, { no_comments => 1, %{ $sqltargs || {} } } ) ) {
for ( split(";\n", $statement)) {
next if($_ =~ /^--/);
next if(!$_);
# next if($_ =~ /^DROP/m);
next if($_ =~ /^BEGIN TRANSACTION/m);
next if($_ =~ /^COMMIT/m);
+ next if $_ =~ /^\s+$/; # skip whitespace only
$self->debugobj->query_start($_) if $self->debug;
- $self->dbh->do($_) or warn "SQL was:\n $_";
+ $self->dbh->do($_) or warn "SQL was:\n $_"; # XXX exceptions?
$self->debugobj->query_end($_) if $self->debug;
}
}
return $type;
}
-sub DESTROY { shift->disconnect }
+sub DESTROY {
+ my $self = shift;
+ return if !$self->_dbh;
+
+ $self->_verify_pid;
+ $self->_dbh(undef);
+}
1;
=item limit_dialect
-Accessor for setting limit dialect. This is useful
-for JDBC-bridge among others where the remote SQL-dialect cannot
-be determined by the name of the driver alone.
-
-This option can also be set via L</connect_info>.
+See L</connect_info> for details.
+For setting, this method is deprecated in favor of L</connect_info>.
=item quote_char
-Specifies what characters to use to quote table and column names. If
-you use this you will want to specify L<name_sep> as well.
-
-quote_char expectes either a single character, in which case is it is placed
-on either side of the table/column, or an arrayref of length 2 in which case the
-table/column name is placed between the elements.
-
-For example under MySQL you'd use C<quote_char('`')>, and user SQL Server you'd
-use C<quote_char(qw/[ ]/)>.
-
-This option can also be set via L</connect_info>.
+See L</connect_info> for details.
+For setting, this method is deprecated in favor of L</connect_info>.
=item name_sep
-This only needs to be used in conjunction with L<quote_char>, and is used to
-specify the charecter that seperates elements (schemas, tables, columns) from
-each other. In most cases this is simply a C<.>.
-
-This option can also be set via L</connect_info>.
+See L</connect_info> for details.
+For setting, this method is deprecated in favor of L</connect_info>.
=back
-=head1 ENVIRONMENT VARIABLES
-
-=head2 DBIX_CLASS_STORAGE_DBI_DEBUG
-
-If C<DBIX_CLASS_STORAGE_DBI_DEBUG> is set then SQL trace information
-is produced (as when the L<debug> method is set).
-
-If the value is of the form C<1=/path/name> then the trace output is
-written to the file C</path/name>.
-
-This environment variable is checked when the storage object is first
-created (when you call connect on your schema). So, run-time changes
-to this environment variable will not take effect unless you also
-re-connect on your schema.
-
=head1 AUTHORS
Matt S. Trout <mst@shadowcatsystems.co.uk>