X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FStorage%2FDBI.pm;h=5984c947f93c1a5e7c8e540921387eb94239e1a8;hb=04786a4c19fe3964002b69e8a3dbb291524e0610;hp=2dba8ce9d79391e0b45692a1a984edc50faa9a99;hpb=1b45b01edd39a4b8c69df839d81299b7e51b7e7b;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 2dba8ce..5984c94 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1,4 +1,5 @@ package DBIx::Class::Storage::DBI; +# -*- mode: cperl; cperl-indent-level: 2 -*- use base 'DBIx::Class::Storage'; @@ -7,18 +8,42 @@ use warnings; use DBI; use SQL::Abstract::Limit; use DBIx::Class::Storage::DBI::Cursor; +use DBIx::Class::Storage::Statistics; use IO::File; use Carp::Clan qw/DBIx::Class/; - BEGIN { package DBIC::SQL::Abstract; # Would merge upstream, but nate doesn't reply :( use base qw/SQL::Abstract::Limit/; +# This prevents the caching of $dbh in S::A::L, I believe +sub new { + my $self = shift->SUPER::new(@_); + + # If limit_dialect is a ref (like a $dbh), go ahead and replace + # it with what it resolves to: + $self->{limit_dialect} = $self->_find_syntax($self->{limit_dialect}) + if ref $self->{limit_dialect}; + + $self; +} + +# While we're at it, this should make LIMIT queries more efficient, +# without digging into things too deeply +sub _find_syntax { + my ($self, $syntax) = @_; + $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax); +} + sub select { my ($self, $table, $fields, $where, $order, @rest) = @_; + $table = $self->_quote($table) unless ref($table); + local $self->{rownum_hack_count} = 1 + if (defined $rest[0] && $self->{limit_dialect} eq 'RowNum'); @rest = (-1) unless defined $rest[0]; + die "LIMIT 0 Does Not Compute" if $rest[0] == 0; + # and anyway, SQL::Abstract::Limit will cause a barf if we don't first local $self->{having_bind} = []; my ($sql, @ret) = $self->SUPER::select( $table, $self->_recurse_fields($fields), $where, $order, @rest @@ -26,6 +51,27 @@ sub select { return wantarray ? ($sql, @ret, @{$self->{having_bind}}) : $sql; } +sub insert { + my $self = shift; + my $table = shift; + $table = $self->_quote($table) unless ref($table); + $self->SUPER::insert($table, @_); +} + +sub update { + my $self = shift; + my $table = shift; + $table = $self->_quote($table) unless ref($table); + $self->SUPER::update($table, @_); +} + +sub delete { + my $self = shift; + my $table = shift; + $table = $self->_quote($table) unless ref($table); + $self->SUPER::delete($table, @_); +} + sub _emulate_limit { my $self = shift; if ($_[3] == -1) { @@ -42,7 +88,12 @@ sub _recurse_fields { return $$fields if $ref eq 'SCALAR'; if ($ref eq 'ARRAY') { - return join(', ', map { $self->_recurse_fields($_) } @$fields); + return join(', ', map { + $self->_recurse_fields($_) + .(exists $self->{rownum_hack_count} + ? ' AS col'.$self->{rownum_hack_count}++ + : '') + } @$fields); } elsif ($ref eq 'HASH') { foreach my $func (keys %$fields) { return $self->_sqlcase($func) @@ -67,10 +118,18 @@ sub _order_by { $ret .= $self->_sqlcase(' having ').$frag; } if (defined $_[0]->{order_by}) { - $ret .= $self->SUPER::_order_by($_[0]->{order_by}); + $ret .= $self->_order_by($_[0]->{order_by}); } - } elsif(ref $_[0] eq 'SCALAR') { + } elsif (ref $_[0] eq 'SCALAR') { $ret = $self->_sqlcase(' order by ').${ $_[0] }; + } elsif (ref $_[0] eq 'ARRAY' && @{$_[0]}) { + my @order = @{+shift}; + $ret = $self->_sqlcase(' order by ') + .join(', ', map { + my $r = $self->_order_by($_, @_); + $r =~ s/^ ?ORDER BY //i; + $r; + } @order); } else { $ret = $self->SUPER::_order_by(@_); } @@ -90,7 +149,12 @@ sub _table { } elsif (ref $from eq 'HASH') { return $self->_make_as($from); } else { - return $from; + return $from; # would love to quote here but _table ends up getting called + # twice during an ->select without a limit clause due to + # the way S::A::Limit->select works. should maybe consider + # bypassing this and doing S::A::select($self, ...) in + # our select method above. meantime, quoting shims have + # been added to select/insert/update/delete here } } @@ -103,8 +167,9 @@ sub _recurse_from { # check whether a join type exists my $join_clause = ''; - if (ref($to) eq 'HASH' and exists($to->{-join_type})) { - $join_clause = ' '.uc($to->{-join_type}).' JOIN '; + my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to; + if (ref($to_jt) eq 'HASH' and exists($to_jt->{-join_type})) { + $join_clause = ' '.uc($to_jt->{-join_type}).' JOIN '; } else { $join_clause = ' JOIN '; } @@ -165,18 +230,6 @@ sub _quote { return $self->SUPER::_quote($label); } -sub _RowNum { - my $self = shift; - my $c; - $_[0] =~ s/SELECT (.*?) FROM/ - 'SELECT '.join(', ', map { $_.' AS col'.++$c } split(', ', $1)).' FROM'/e; - $self->SUPER::_RowNum(@_); -} - -# Accessor for setting limit dialect. This is useful -# for JDBC-bridge among others where the remote SQL-dialect cannot -# be determined by the name of the driver alone. -# sub limit_dialect { my $self = shift; $self->{limit_dialect} = shift if @_; @@ -195,111 +248,229 @@ sub name_sep { return $self->{name_sep}; } +} # End of BEGIN block +use base qw/DBIx::Class/; +__PACKAGE__->load_components(qw/AccessorGroup/); -package DBIx::Class::Storage::DBI::DebugCallback; +__PACKAGE__->mk_group_accessors('simple' => + qw/_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid _conn_tid + debug debugobj cursor on_connect_do transaction_depth/); -sub print { - my ($self, $string) = @_; - $string =~ m/^(\w+)/; - ${$self}->($1, $string); -} +=head1 NAME -} # End of BEGIN block +DBIx::Class::Storage::DBI - DBI storage handler -use base qw/DBIx::Class/; +=head1 SYNOPSIS -__PACKAGE__->load_components(qw/AccessorGroup/); +=head1 DESCRIPTION -__PACKAGE__->mk_group_accessors('simple' => - qw/_connect_info _dbh _sql_maker _conn_pid _conn_tid debug debugfh - cursor on_connect_do transaction_depth/); +This class represents the connection to the database + +=head1 METHODS + +=head2 new + +=cut sub new { - my $new = bless({}, ref $_[0] || $_[0]); + my $new = {}; + bless $new, (ref $_[0] || $_[0]); + $new->cursor("DBIx::Class::Storage::DBI::Cursor"); $new->transaction_depth(0); - if (defined($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}) && - ($ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} =~ /=(.+)$/)) { - $new->debugfh(IO::File->new($1, 'w')) + + $new->debugobj(new DBIx::Class::Storage::Statistics()); + + my $fh; + + my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} + || $ENV{DBIC_TRACE}; + + if (defined($debug_env) && ($debug_env =~ /=(.+)$/)) { + $fh = IO::File->new($1, 'w') or $new->throw_exception("Cannot open trace file $1"); } else { - $new->debugfh(IO::File->new('>&STDERR')); + $fh = IO::File->new('>&STDERR'); } - $new->debug(1) if $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}; + $new->debugfh($fh); + $new->debug(1) if $debug_env; + $new->_sql_maker_opts({}); return $new; } +=head2 throw_exception + +Throws an exception - croaks. + +=cut + sub throw_exception { my ($self, $msg) = @_; croak($msg); } -=head1 NAME +=head2 connect_info -DBIx::Class::Storage::DBI - DBI storage handler +The arguments of C are always a single array reference. -=head1 SYNOPSIS +This is normally accessed via L, which +encapsulates its argument list in an arrayref before calling +C here. -=head1 DESCRIPTION +The arrayref can either contain the same set of arguments one would +normally pass to L, or a lone code reference which returns +a connected database handle. -This class represents the connection to the database +In either case, if the final argument in your connect_info happens +to be a hashref, C will look there for several +connection-specific options: -=head1 METHODS +=over 4 -=cut +=item on_connect_do -=head2 connect_info +This can be set to an arrayref of literal sql statements, which will +be executed immediately after making the connection to the database +every time we [re-]connect. + +=item limit_dialect + +Sets the limit dialect. This is useful for JDBC-bridge among others +where the remote SQL-dialect cannot be determined by the name of the +driver alone. + +=item quote_char -Connection information arrayref. Can either be the same arguments -one would pass to DBI->connect, or a code-reference which returns -a connected database handle. In either case, there is an optional -final element in the arrayref, which can hold a hashref of -connection-specific Storage::DBI options. These include -C, and the sql_maker options C, -C, and C. Examples: +Specifies what characters to use to quote table and column names. If +you use this you will want to specify L as well. +quote_char expects either a single character, in which case is it is placed +on either side of the table/column, or an arrayref of length 2 in which case the +table/column name is placed between the elements. + +For example under MySQL you'd use C '`'>, and user SQL Server you'd +use C [qw/[ ]/]>. + +=item name_sep + +This only needs to be used in conjunction with L, and is used to +specify the charecter that seperates elements (schemas, tables, columns) from +each other. In most cases this is simply a C<.>. + +=back + +These options can be mixed in with your other L connection attributes, +or placed in a seperate hashref after all other normal L connection +arguments. + +Every time C is invoked, any previous settings for +these options will be cleared before setting the new ones, regardless of +whether any options are specified in the new C. + +Examples: + + # Simple SQLite connection ->connect_info([ 'dbi:SQLite:./foo.db' ]); - ->connect_info(sub { DBI->connect(...) }); - ->connect_info([ 'dbi:Pg:dbname=foo', - 'postgres', - '', - { AutoCommit => 0 }, - { quote_char => q{`}, name_sep => q{@} }, - ]); + + # Connect via subref + ->connect_info([ sub { DBI->connect(...) } ]); + + # A bit more complicated + ->connect_info( + [ + 'dbi:Pg:dbname=foo', + 'postgres', + 'my_pg_password', + { AutoCommit => 0 }, + { quote_char => q{"}, name_sep => q{.} }, + ] + ); + + # Equivalent to the previous example + ->connect_info( + [ + 'dbi:Pg:dbname=foo', + 'postgres', + 'my_pg_password', + { AutoCommit => 0, quote_char => q{"}, name_sep => q{.} }, + ] + ); + + # Subref + DBIC-specific connection options + ->connect_info( + [ + sub { DBI->connect(...) }, + { + quote_char => q{`}, + name_sep => q{@}, + on_connect_do => ['SET search_path TO myschema,otherschema,public'], + }, + ] + ); =head2 on_connect_do -Executes the sql statements given as a listref on every db connect. +This method is deprecated in favor of setting via L. =head2 debug -Causes SQL trace information to be emitted on C filehandle -(or C if C has not specifically been set). +Causes SQL trace information to be emitted on the C object. +(or C if C has not specifically been set). + +This is the equivalent to setting L in your +shell environment. =head2 debugfh -Sets or retrieves the filehandle used for trace/debug output. This -should be an IO::Handle compatible object (only the C method is -used). Initially set to be STDERR - although see information on the -L environment variable. +Set or retrieve the filehandle used for trace/debug output. This should be +an IO::Handle compatible ojbect (only the C method is used. Initially +set to be STDERR - although see information on the +L environment variable. + +=cut + +sub debugfh { + my $self = shift; + + if ($self->debugobj->can('debugfh')) { + return $self->debugobj->debugfh(@_); + } +} + +=head2 debugobj + +Sets or retrieves the object used for metric collection. Defaults to an instance +of L that is campatible with the original +method of using a coderef as a callback. See the aforementioned Statistics +class for more information. =head2 debugcb Sets a callback to be executed each time a statement is run; takes a sub -reference. Overrides debugfh. Callback is executed as $sub->($op, $info) -where $op is SELECT/INSERT/UPDATE/DELETE and $info is what would normally -be printed. +reference. Callback is executed as $sub->($op, $info) where $op is +SELECT/INSERT/UPDATE/DELETE and $info is what would normally be printed. + +See L for a better way. =cut sub debugcb { - my ($self, $cb) = @_; - my $cb_obj = bless(\$cb, 'DBIx::Class::Storage::DBI::DebugCallback'); - $self->debugfh($cb_obj); + my $self = shift; + + if ($self->debugobj->can('callback')) { + return $self->debugobj->callback(@_); + } } +=head2 disconnect + +Disconnect the L handle, performing a rollback first if the +database is not in C mode. + +=cut + sub disconnect { my ($self) = @_; @@ -310,18 +481,22 @@ sub disconnect { } } -sub connected { - my ($self) = @_; +=head2 connected + +Check if the L handle is connected. Returns true if the handle +is connected. + +=cut + +sub connected { my ($self) = @_; if(my $dbh = $self->_dbh) { if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) { - $self->_sql_maker(undef); return $self->_dbh(undef); } elsif($self->_conn_pid != $$) { $self->_dbh->{InactiveDestroy} = 1; - $self->_sql_maker(undef); - return $self->_dbh(undef) + return $self->_dbh(undef); } return ($dbh->FETCH('Active') && $dbh->ping); } @@ -329,6 +504,13 @@ sub connected { return 0; } +=head2 ensure_connected + +Check whether the database handle is connected - if not then make a +connection. + +=cut + sub ensure_connected { my ($self) = @_; @@ -337,6 +519,12 @@ sub ensure_connected { } } +=head2 dbh + +Returns the dbh - a data base handle of class L. + +=cut + sub dbh { my ($self) = @_; @@ -344,57 +532,76 @@ sub dbh { return $self->_dbh; } +sub _sql_maker_args { + my ($self) = @_; + + return ( limit_dialect => $self->dbh, %{$self->_sql_maker_opts} ); +} + +=head2 sql_maker + +Returns a C object - normally an object of class +C. + +=cut + sub sql_maker { my ($self) = @_; unless ($self->_sql_maker) { - $self->_sql_maker(new DBIC::SQL::Abstract( limit_dialect => $self->dbh )); + $self->_sql_maker(new DBIC::SQL::Abstract( $self->_sql_maker_args )); } return $self->_sql_maker; } sub connect_info { - my ($self, $info_arg) = @_; - - if($info_arg) { - my $info = [ @$info_arg ]; # copy because we can alter it - my $last_info = $info->[-1]; - if(ref $last_info eq 'HASH') { - my $used; - if(my $on_connect_do = $last_info->{on_connect_do}) { - $used = 1; - $self->on_connect_do($self->{on_connect_do}); - } - foreach my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { - if(my $opt_val = $last_info->{$sql_maker_opt}) { - $used = 1; - $self->sql_maker->$sql_maker_opt($opt_val); - } - } - - # remove our options hashref if it was there, to avoid confusing - # DBI in the case the user didn't use all 4 DBI options, as in: - # [ 'dbi:SQLite:foo.db', { quote_char => q{`} } ] - pop(@$info) if $used; + my ($self, $info_arg) = @_; + + if($info_arg) { + # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only + # the new set of options + $self->_sql_maker(undef); + $self->_sql_maker_opts({}); + + my $info = [ @$info_arg ]; # copy because we can alter it + my $last_info = $info->[-1]; + if(ref $last_info eq 'HASH') { + if(my $on_connect_do = delete $last_info->{on_connect_do}) { + $self->on_connect_do($on_connect_do); + } + for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) { + if(my $opt_val = delete $last_info->{$sql_maker_opt}) { + $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val; } + } - $self->_connect_info($info); + # Get rid of any trailing empty hashref + pop(@$info) if !keys %$last_info; } - $self->_connect_info; + $self->_connect_info($info); + } + + $self->_connect_info; } sub _populate_dbh { my ($self) = @_; my @info = @{$self->_connect_info || []}; $self->_dbh($self->_connect(@info)); - my $driver = $self->_dbh->{Driver}->{Name}; - eval "require DBIx::Class::Storage::DBI::${driver}"; - unless ($@) { - bless $self, "DBIx::Class::Storage::DBI::${driver}"; + + if(ref $self eq 'DBIx::Class::Storage::DBI') { + my $driver = $self->_dbh->{Driver}->{Name}; + if ($self->load_optional_class("DBIx::Class::Storage::DBI::${driver}")) { + bless $self, "DBIx::Class::Storage::DBI::${driver}"; + $self->_rebless() if $self->can('_rebless'); + } } + # if on-connect sql statements are given execute them foreach my $sql_statement (@{$self->on_connect_do || []}) { + $self->debugobj->query_start($sql_statement) if $self->debug(); $self->_dbh->do($sql_statement); + $self->debugobj->query_end($sql_statement) if $self->debug(); } $self->_conn_pid($$); @@ -414,17 +621,17 @@ sub _connect { $DBI::connect_via = 'connect'; } - if(ref $info[0] eq 'CODE') { - $dbh = &{$info[0]}; - } - else { - $dbh = DBI->connect(@info); - } + eval { + $dbh = ref $info[0] eq 'CODE' + ? &{$info[0]} + : DBI->connect(@info); + }; $DBI::connect_via = $old_connect_via if $old_connect_via; - $self->throw_exception("DBI Connection failed: $DBI::errstr") - unless $dbh; + if (!$dbh || $@) { + $self->throw_exception("DBI Connection failed: " . ($@ || $DBI::errstr)); + } $dbh; } @@ -440,8 +647,14 @@ an entire code block to be executed transactionally. sub txn_begin { my $self = shift; - $self->dbh->begin_work - if $self->{transaction_depth}++ == 0 and $self->dbh->{AutoCommit}; + if ($self->{transaction_depth}++ == 0) { + my $dbh = $self->dbh; + if ($dbh->{AutoCommit}) { + $self->debugobj->txn_begin() + if ($self->debug); + $dbh->begin_work; + } + } } =head2 txn_commit @@ -452,11 +665,20 @@ Issues a commit against the current dbh. sub txn_commit { my $self = shift; + my $dbh = $self->dbh; if ($self->{transaction_depth} == 0) { - $self->dbh->commit unless $self->dbh->{AutoCommit}; + unless ($dbh->{AutoCommit}) { + $self->debugobj->txn_commit() + if ($self->debug); + $dbh->commit; + } } else { - $self->dbh->commit if --$self->{transaction_depth} == 0; + if (--$self->{transaction_depth} == 0) { + $self->debugobj->txn_commit() + if ($self->debug); + $dbh->commit; + } } } @@ -472,13 +694,23 @@ sub txn_rollback { my $self = shift; eval { + my $dbh = $self->dbh; if ($self->{transaction_depth} == 0) { - $self->dbh->rollback unless $self->dbh->{AutoCommit}; + unless ($dbh->{AutoCommit}) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; + } } else { - --$self->{transaction_depth} == 0 ? - $self->dbh->rollback : + if (--$self->{transaction_depth} == 0) { + $self->debugobj->txn_rollback() + if ($self->debug); + $dbh->rollback; + } + else { die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new; + } } }; @@ -496,19 +728,32 @@ sub _execute { my ($sql, @bind) = $self->sql_maker->$op($ident, @args); unshift(@bind, @$extra_bind) if $extra_bind; if ($self->debug) { - my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind; - $self->debugfh->print("$sql: " . join(', ', @debug_bind) . "\n"); + my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind; + $self->debugobj->query_start($sql, @debug_bind); + } + my $sth = eval { $self->sth($sql,$op) }; + + if (!$sth || $@) { + $self->throw_exception( + 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql" + ); } - my $sth = $self->sth($sql,$op); - $self->throw_exception("no sth generated via sql: $sql") unless $sth; @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args my $rv; - if ($sth) { - $rv = $sth->execute(@bind) - or $self->throw_exception("Error executing '$sql': " . $sth->errstr); - } else { + if ($sth) { + my $time = time(); + $rv = eval { $sth->execute(@bind) }; + + if ($@ || !$rv) { + $self->throw_exception("Error executing '$sql': ".($@ || $sth->errstr)); + } + } else { $self->throw_exception("'$sql' did not generate a statement."); } + if ($self->debug) { + my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind; + $self->debugobj->query_end($sql, @debug_bind); + } return (wantarray ? ($rv, $sth, @bind) : $rv); } @@ -548,17 +793,32 @@ sub _select { $self->sql_maker->_default_limit_syntax eq "GenericSubQ") { $attrs->{software_limit} = 1; } else { + $self->throw_exception("rows attribute must be positive if present") + if (defined($attrs->{rows}) && !($attrs->{rows} > 0)); push @args, $attrs->{rows}, $attrs->{offset}; } return $self->_execute(@args); } +=head2 select + +Handle a SQL select statement. + +=cut + sub select { my $self = shift; my ($ident, $select, $condition, $attrs) = @_; return $self->cursor->new($self, \@_, $attrs); } +=head2 select_single + +Performs a select, fetch and return of data - handles a single row +only. + +=cut + # Need to call finish() to work round broken DBDs sub select_single { @@ -569,6 +829,12 @@ sub select_single { return @row; } +=head2 sth + +Returns a L sth (statement handle) for the supplied SQL. + +=cut + sub sth { my ($self, $sql) = @_; # 3 is the if_active parameter which avoids active sth re-use @@ -584,40 +850,47 @@ Returns database type info for a given table columns. sub columns_info_for { my ($self, $table) = @_; - if ($self->dbh->can('column_info')) { + my $dbh = $self->dbh; + + if ($dbh->can('column_info')) { my %result; - my $old_raise_err = $self->dbh->{RaiseError}; - my $old_print_err = $self->dbh->{PrintError}; - $self->dbh->{RaiseError} = 1; - $self->dbh->{PrintError} = 0; + local $dbh->{RaiseError} = 1; + local $dbh->{PrintError} = 0; eval { - my $sth = $self->dbh->column_info( undef, undef, $table, '%' ); + my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table); + my $sth = $dbh->column_info( undef,$schema, $tab, '%' ); $sth->execute(); + + # Some error occured or there is no information: + if($sth->rows <1) { + die "column_info returned no rows for $schema, $tab"; + } + while ( my $info = $sth->fetchrow_hashref() ){ my %column_info; $column_info{data_type} = $info->{TYPE_NAME}; $column_info{size} = $info->{COLUMN_SIZE}; $column_info{is_nullable} = $info->{NULLABLE} ? 1 : 0; $column_info{default_value} = $info->{COLUMN_DEF}; + my $col_name = $info->{COLUMN_NAME}; + $col_name =~ s/^\"(.*)\"$/$1/; - $result{$info->{COLUMN_NAME}} = \%column_info; + $result{$col_name} = \%column_info; } }; - $self->dbh->{RaiseError} = $old_raise_err; - $self->dbh->{PrintError} = $old_print_err; return \%result if !$@; } my %result; - my $sth = $self->dbh->prepare("SELECT * FROM $table WHERE 1=0"); + my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0"); $sth->execute; my @columns = @{$sth->{NAME_lc}}; for my $i ( 0 .. $#columns ){ my %column_info; my $type_num = $sth->{TYPE}->[$i]; my $type_name; - if(defined $type_num && $self->dbh->can('type_info')) { - my $type_info = $self->dbh->type_info($type_num); + if(defined $type_num && $dbh->can('type_info')) { + my $type_info = $dbh->type_info($type_num); $type_name = $type_info->{TYPE_NAME} if $type_info; } $column_info{data_type} = $type_name ? $type_name : $type_num; @@ -635,6 +908,12 @@ sub columns_info_for { return \%result; } +=head2 last_insert_id + +Return the row id of the last insert. + +=cut + sub last_insert_id { my ($self, $row) = @_; @@ -642,45 +921,240 @@ sub last_insert_id { } +=head2 sqlt_type + +Returns the database driver name. + +=cut + sub sqlt_type { shift->dbh->{Driver}->{Name} } +=head2 create_ddl_dir (EXPERIMENTAL) + +=over 4 + +=item Arguments: $schema \@databases, $version, $directory, $sqlt_args + +=back + +Creates an SQL file based on the Schema, for each of the specified +database types, in the given directory. + +Note that this feature is currently EXPERIMENTAL and may not work correctly +across all databases, or fully handle complex relationships. + +=cut + +sub create_ddl_dir +{ + my ($self, $schema, $databases, $version, $dir, $sqltargs) = @_; + + if(!$dir || !-d $dir) + { + warn "No directory given, using ./\n"; + $dir = "./"; + } + $databases ||= ['MySQL', 'SQLite', 'PostgreSQL']; + $databases = [ $databases ] if(ref($databases) ne 'ARRAY'); + $version ||= $schema->VERSION || '1.x'; + $sqltargs = { ( add_drop_table => 1 ), %{$sqltargs || {}} }; + + eval "use SQL::Translator"; + $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; + + my $sqlt = SQL::Translator->new($sqltargs); + foreach my $db (@$databases) + { + $sqlt->reset(); + $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); +# $sqlt->parser_args({'DBIx::Class' => $schema); + $sqlt->data($schema); + $sqlt->producer($db); + + my $file; + my $filename = $schema->ddl_filename($db, $dir, $version); + if(-e $filename) + { + $self->throw_exception("$filename already exists, skipping $db"); + next; + } + open($file, ">$filename") + or $self->throw_exception("Can't open $filename for writing ($!)"); + my $output = $sqlt->translate; +#use Data::Dumper; +# print join(":", keys %{$schema->source_registrations}); +# print Dumper($sqlt->schema); + if(!$output) + { + $self->throw_exception("Failed to translate to $db. (" . $sqlt->error . ")"); + next; + } + print $file $output; + close($file); + } + +} + +=head2 deployment_statements + +Create the statements for L and +L. + +=cut + sub deployment_statements { - my ($self, $schema, $type, $sqltargs) = @_; + my ($self, $schema, $type, $version, $dir, $sqltargs) = @_; + # Need to be connected to get the correct sqlt_type + $self->ensure_connected() unless $type; $type ||= $self->sqlt_type; + $version ||= $schema->VERSION || '1.x'; + $dir ||= './'; eval "use SQL::Translator"; - $self->throw_exception("Can't deploy without SQL::Translator: $@") if $@; - eval "use SQL::Translator::Parser::DBIx::Class;"; - $self->throw_exception($@) if $@; - eval "use SQL::Translator::Producer::${type};"; - $self->throw_exception($@) if $@; - my $tr = SQL::Translator->new(%$sqltargs); - SQL::Translator::Parser::DBIx::Class::parse( $tr, $schema ); - return "SQL::Translator::Producer::${type}"->can('produce')->($tr); + if(!$@) + { + eval "use SQL::Translator::Parser::DBIx::Class;"; + $self->throw_exception($@) if $@; + eval "use SQL::Translator::Producer::${type};"; + $self->throw_exception($@) if $@; + my $tr = SQL::Translator->new(%$sqltargs); + SQL::Translator::Parser::DBIx::Class::parse( $tr, $schema ); + return "SQL::Translator::Producer::${type}"->can('produce')->($tr); + } + + my $filename = $schema->ddl_filename($type, $dir, $version); + if(!-f $filename) + { +# $schema->create_ddl_dir([ $type ], $version, $dir, $sqltargs); + $self->throw_exception("No SQL::Translator, and no Schema file found, aborting deploy"); + return; + } + my $file; + open($file, "<$filename") + or $self->throw_exception("Can't open $filename ($!)"); + my @rows = <$file>; + close($file); + + return join('', @rows); + } +=head2 deploy + +Sends the appropriate statements to create or modify tables to the +db. This would normally be called through +L. + +=cut + sub deploy { my ($self, $schema, $type, $sqltargs) = @_; - my @statements = $self->deployment_statements($schema, $type, $sqltargs); - foreach(split(";\n", @statements)) { - $self->debugfh->print("$_\n") if $self->debug; - $self->dbh->do($_) or warn "SQL was:\n $_"; - } + foreach my $statement ( $self->deployment_statements($schema, $type, undef, undef, { no_comments => 1, %{ $sqltargs || {} } } ) ) { + for ( split(";\n", $statement)) { + next if($_ =~ /^--/); + next if(!$_); +# next if($_ =~ /^DROP/m); + next if($_ =~ /^BEGIN TRANSACTION/m); + next if($_ =~ /^COMMIT/m); + next if $_ =~ /^\s+$/; # skip whitespace only + $self->debugobj->query_start($_) if $self->debug; + $self->dbh->do($_) or warn "SQL was:\n $_"; + $self->debugobj->query_end($_) if $self->debug; + } + } +} + +=head2 datetime_parser + +Returns the datetime parser class + +=cut + +sub datetime_parser { + my $self = shift; + return $self->{datetime_parser} ||= $self->build_datetime_parser(@_); +} + +=head2 datetime_parser_type + +Defines (returns) the datetime parser class - currently hardwired to +L + +=cut + +sub datetime_parser_type { "DateTime::Format::MySQL"; } + +=head2 build_datetime_parser + +See L + +=cut + +sub build_datetime_parser { + my $self = shift; + my $type = $self->datetime_parser_type(@_); + eval "use ${type}"; + $self->throw_exception("Couldn't load ${type}: $@") if $@; + return $type; } sub DESTROY { shift->disconnect } 1; +=head1 SQL METHODS + +The module defines a set of methods within the DBIC::SQL::Abstract +namespace. These build on L to provide the +SQL query functions. + +The following methods are extended:- + +=over 4 + +=item delete + +=item insert + +=item select + +=item update + +=item limit_dialect + +See L for details. +For setting, this method is deprecated in favor of L. + +=item quote_char + +See L for details. +For setting, this method is deprecated in favor of L. + +=item name_sep + +See L for details. +For setting, this method is deprecated in favor of L. + +=back + =head1 ENVIRONMENT VARIABLES -=head2 DBIX_CLASS_STORAGE_DBI_DEBUG +=head2 DBIC_TRACE -If C is set then SQL trace information +If C is set then SQL trace information is produced (as when the L method is set). If the value is of the form C<1=/path/name> then the trace output is written to the file C. +This environment variable is checked when the storage object is first +created (when you call connect on your schema). So, run-time changes +to this environment variable will not take effect unless you also +re-connect on your schema. + +=head2 DBIX_CLASS_STORAGE_DBI_DEBUG + +Old name for DBIC_TRACE + =head1 AUTHORS Matt S. Trout