1 package DBIx::Class::Storage::DBI::Sybase::ASE;
7 DBIx::Class::Storage::DBI::Sybase
8 DBIx::Class::Storage::DBI::AutoCast
9 DBIx::Class::Storage::DBI::IdentityInsert
12 use DBIx::Class::Carp;
13 use Scalar::Util qw/blessed weaken/;
14 use List::Util 'first';
16 use Data::Dumper::Concise 'Dumper';
18 use Context::Preserve 'preserve_context';
21 __PACKAGE__->sql_limit_dialect ('RowCountOrGenericSubQ');
22 __PACKAGE__->sql_quote_char ([qw/[ ]/]);
23 __PACKAGE__->datetime_parser_type(
24 'DBIx::Class::Storage::DBI::Sybase::ASE::DateTime::Format'
27 __PACKAGE__->mk_group_accessors('simple' =>
28 qw/_identity _identity_method _blob_log_on_update _parent_storage
29 _writer_storage _is_writer_storage
30 _bulk_storage _is_bulk_storage _began_bulk_work
35 my @also_proxy_to_extra_storages = qw/
36 connect_call_set_auto_cast auto_cast connect_call_blob_setup
37 connect_call_datetime_setup
39 disconnect _connect_info _sql_maker _sql_maker_opts disable_sth_caching
40 auto_savepoint unsafe cursor_class debug debugobj schema
45 DBIx::Class::Storage::DBI::Sybase::ASE - Sybase ASE SQL Server support for
50 This subclass supports L<DBD::Sybase> for real (non-Microsoft) Sybase databases.
54 If your version of Sybase does not support placeholders, then your storage will
55 be reblessed to L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
56 You can also enable that driver explicitly, see the documentation for more
59 With this driver there is unfortunately no way to get the C<last_insert_id>
60 without doing a C<SELECT MAX(col)>. This is done safely in a transaction
61 (locking the table.) See L</INSERTS WITH PLACEHOLDERS>.
63 A recommended L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting:
65 on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]]
74 my $no_bind_vars = __PACKAGE__ . '::NoBindVars';
76 if ($self->_using_freetds) {
77 carp_once <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
79 You are using FreeTDS with Sybase.
81 We will do our best to support this configuration, but please consider this
84 TEXT/IMAGE columns will definitely not work.
86 You are encouraged to recompile DBD::Sybase with the Sybase Open Client libraries
89 See perldoc DBIx::Class::Storage::DBI::Sybase::ASE for more details.
91 To turn off this warning set the DBIC_SYBASE_FREETDS_NOWARN environment
95 if (not $self->_use_typeless_placeholders) {
96 if ($self->_use_placeholders) {
100 $self->ensure_class_loaded($no_bind_vars);
101 bless $self, $no_bind_vars;
107 elsif (not $self->_get_dbh->{syb_dynamic_supported}) {
108 # not necessarily FreeTDS, but no placeholders nevertheless
109 $self->ensure_class_loaded($no_bind_vars);
110 bless $self, $no_bind_vars;
113 # this is highly unlikely, but we check just in case
114 elsif (not $self->_use_typeless_placeholders) {
121 $self->_set_max_connect(256);
123 # create storage for insert/(update blob) transactions,
124 # unless this is that storage
125 return if $self->_parent_storage;
127 my $writer_storage = (ref $self)->new;
129 $writer_storage->_is_writer_storage(1); # just info
130 $writer_storage->connect_info($self->connect_info);
131 $writer_storage->auto_cast($self->auto_cast);
133 weaken ($writer_storage->{_parent_storage} = $self);
134 $self->_writer_storage($writer_storage);
136 # create a bulk storage unless connect_info is a coderef
137 return if ref($self->_dbi_connect_info->[0]) eq 'CODE';
139 my $bulk_storage = (ref $self)->new;
141 $bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics
142 $bulk_storage->connect_info($self->connect_info);
145 $bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1';
147 weaken ($bulk_storage->{_parent_storage} = $self);
148 $self->_bulk_storage($bulk_storage);
151 for my $method (@also_proxy_to_extra_storages) {
153 no warnings 'redefine';
155 my $replaced = __PACKAGE__->can($method);
157 *{$method} = Sub::Name::subname $method => sub {
159 $self->_writer_storage->$replaced(@_) if $self->_writer_storage;
160 $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage;
161 return $self->$replaced(@_);
168 # Even though we call $sth->finish for uses off the bulk API, there's still an
169 # "active statement" warning on disconnect, which we throw away here.
170 # This is due to the bug described in insert_bulk.
171 # Currently a noop because 'prepare' is used instead of 'prepare_cached'.
172 local $SIG{__WARN__} = sub {
173 warn $_[0] unless $_[0] =~ /active statement/i;
174 } if $self->_is_bulk_storage;
176 # so that next transaction gets a dbh
177 $self->_began_bulk_work(0) if $self->_is_bulk_storage;
182 # This is only invoked for FreeTDS drivers by ::Storage::DBI::Sybase::FreeTDS
183 sub _set_autocommit_stmt {
184 my ($self, $on) = @_;
186 return 'SET CHAINED ' . ($on ? 'OFF' : 'ON');
189 # Set up session settings for Sybase databases for the connection.
191 # Make sure we have CHAINED mode turned on if AutoCommit is off in non-FreeTDS
192 # DBD::Sybase (since we don't know how DBD::Sybase was compiled.) If however
193 # we're using FreeTDS, CHAINED mode turns on an implicit transaction which we
194 # only want when AutoCommit is off.
195 sub _run_connection_actions {
198 if ($self->_is_bulk_storage) {
199 # this should be cleared on every reconnect
200 $self->_began_bulk_work(0);
204 $self->_dbh->{syb_chained_txn} = 1
205 unless $self->_using_freetds;
207 $self->next::method(@_);
210 =head2 connect_call_blob_setup
214 on_connect_call => [ [ 'blob_setup', log_on_update => 0 ] ]
216 Does C<< $dbh->{syb_binary_images} = 1; >> to return C<IMAGE> data as raw binary
217 instead of as a hex string.
221 Also sets the C<log_on_update> value for blob write operations. The default is
222 C<1>, but C<0> is better if your database is configured for it.
225 L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
229 sub connect_call_blob_setup {
232 my $dbh = $self->_dbh;
233 $dbh->{syb_binary_images} = 1;
235 $self->_blob_log_on_update($args{log_on_update})
236 if exists $args{log_on_update};
240 my ($self, $source, $column) = @_;
242 return $self->_is_lob_type($source->column_info($column)->{data_type});
245 sub _prep_for_execute {
247 my ($op, $ident) = @_;
250 ### This is commented out because all tests pass. However I am leaving it
251 ### here as it may prove necessary (can't think through all combinations)
252 ### BTW it doesn't currently work exactly - need better sensitivity to
253 # currently set value
255 # inherit these from the parent for the duration of _prep_for_execute
256 # Don't know how to make a localizing loop with if's, otherwise I would
257 #local $self->{_autoinc_supplied_for_op}
258 # = $self->_parent_storage->_autoinc_supplied_for_op
259 #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage;
260 #local $self->{_perform_autoinc_retrieval}
261 # = $self->_parent_storage->_perform_autoinc_retrieval
262 #if ($op eq 'insert' or $op eq 'update') and $self->_parent_storage;
264 my ($sql, $bind) = $self->next::method (@_);
266 if (my $identity_col = $self->_perform_autoinc_retrieval) {
267 $sql .= "\n" . $self->_fetch_identity_sql($ident, $identity_col)
270 return ($sql, $bind);
273 sub _fetch_identity_sql {
274 my ($self, $source, $col) = @_;
276 return sprintf ("SELECT MAX(%s) FROM %s",
277 map { $self->sql_maker->_quote ($_) } ($col, $source->from)
281 # Stolen from SQLT, with some modifications. This is a makeshift
282 # solution before a sane type-mapping library is available, thus
283 # the 'our' for easy overrides.
284 our %TYPE_MAPPING = (
287 varchar => 'varchar',
288 varchar2 => 'varchar',
289 timestamp => 'datetime',
291 real => 'double precision',
294 tinyint => 'smallint',
295 float => 'double precision',
297 bigserial => 'numeric',
298 boolean => 'varchar',
302 sub _native_data_type {
303 my ($self, $type) = @_;
306 $type =~ s/\s* identity//x;
308 return uc($TYPE_MAPPING{$type} || $type);
316 my ($rv, $sth, @bind) = $self->next::method(@_);
318 $self->_identity( ($sth->fetchall_arrayref)[0][0] )
319 if $self->_perform_autoinc_retrieval;
321 return wantarray ? ($rv, $sth, @bind) : $rv;
324 sub last_insert_id { shift->_identity }
326 # handles TEXT/IMAGE and transaction for last_insert_id
329 my ($source, $to_insert) = @_;
331 my $columns_info = $source->columns_info;
334 (first { $columns_info->{$_}{is_auto_increment} }
335 keys %$columns_info )
338 # FIXME - this is duplication from DBI.pm. When refactored towards
339 # the LobWriter this can be folded back where it belongs.
340 local $self->{_autoinc_supplied_for_op} = exists $to_insert->{$identity_col}
344 local $self->{_perform_autoinc_retrieval} =
345 ($identity_col and ! exists $to_insert->{$identity_col})
350 # check for empty insert
351 # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
352 # try to insert explicit 'DEFAULT's instead (except for identity, timestamp
353 # and computed columns)
354 if (not %$to_insert) {
355 for my $col ($source->columns) {
356 next if $col eq $identity_col;
358 my $info = $source->column_info($col);
360 next if ref $info->{default_value} eq 'SCALAR'
361 || (exists $info->{data_type} && (not defined $info->{data_type}));
363 next if $info->{data_type} && $info->{data_type} =~ /^timestamp\z/i;
365 $to_insert->{$col} = \'DEFAULT';
369 my $blob_cols = $self->_remove_blob_cols($source, $to_insert);
371 # do we need the horrific SELECT MAX(COL) hack?
372 my $need_dumb_last_insert_id = (
373 $self->_perform_autoinc_retrieval
375 ($self->_identity_method||'') ne '@@IDENTITY'
378 my $next = $self->next::can;
380 # we are already in a transaction, or there are no blobs
381 # and we don't need the PK - just (try to) do it
382 if ($self->{transaction_depth}
383 || (!$blob_cols && !$need_dumb_last_insert_id)
385 return $self->_insert (
386 $next, $source, $to_insert, $blob_cols, $identity_col
390 # otherwise use the _writer_storage to do the insert+transaction on another
392 my $guard = $self->_writer_storage->txn_scope_guard;
394 my $updated_cols = $self->_writer_storage->_insert (
395 $next, $source, $to_insert, $blob_cols, $identity_col
398 $self->_identity($self->_writer_storage->_identity);
402 return $updated_cols;
406 my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_;
408 my $updated_cols = $self->$next ($source, $to_insert);
412 ($identity_col => $self->last_insert_id($source, $identity_col)) : ()),
417 $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols;
419 return $updated_cols;
424 my ($source, $fields, $where, @rest) = @_;
427 # When *updating* identities, ASE requires SET IDENTITY_UPDATE called
429 if (my $blob_cols = $self->_remove_blob_cols($source, $fields)) {
431 # If there are any blobs in $where, Sybase will return a descriptive error
433 # XXX blobs can still be used with a LIKE query, and this should be handled.
435 # update+blob update(s) done atomically on separate connection
436 $self = $self->_writer_storage;
438 my $guard = $self->txn_scope_guard;
440 # First update the blob columns to be updated to '' (taken from $fields, where
441 # it is originally put by _remove_blob_cols .)
442 my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
444 # We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
445 $self->next::method($source, \%blobs_to_empty, $where, @rest);
447 # Now update the blobs before the other columns in case the update of other
448 # columns makes the search condition invalid.
449 my $rv = $self->_update_blobs($source, $blob_cols, $where);
453 # Now set the identity update flags for the actual update
454 local $self->{_autoinc_supplied_for_op} = (first
455 { $_->{is_auto_increment} }
456 values %{ $source->columns_info([ keys %$fields ]) }
459 my $next = $self->next::can;
461 return preserve_context {
462 $self->$next(@$args);
463 } after => sub { $guard->commit };
471 # Set the identity update flags for the actual update
472 local $self->{_autoinc_supplied_for_op} = (first
473 { $_->{is_auto_increment} }
474 values %{ $source->columns_info([ keys %$fields ]) }
477 return $self->next::method(@_);
483 my ($source, $cols, $data) = @_;
485 my $columns_info = $source->columns_info;
488 first { $columns_info->{$_}{is_auto_increment} }
491 # FIXME - this is duplication from DBI.pm. When refactored towards
492 # the LobWriter this can be folded back where it belongs.
493 local $self->{_autoinc_supplied_for_op} =
494 (first { $_ eq $identity_col } @$cols)
500 $self->_bulk_storage &&
501 $self->_get_dbh->{syb_has_blk};
503 if (! $use_bulk_api and ref($self->_dbi_connect_info->[0]) eq 'CODE') {
504 carp_unique( join ' ',
505 'Bulk API support disabled due to use of a CODEREF connect_info.',
506 'Reverting to regular array inserts.',
510 if (not $use_bulk_api) {
511 my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data);
513 # next::method uses a txn anyway, but it ends too early in case we need to
514 # select max(col) to get the identity for inserting blobs.
515 ($self, my $guard) = $self->{transaction_depth} == 0 ?
516 ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
520 $self->next::method(@_);
523 if ($self->_autoinc_supplied_for_op) {
524 $self->_insert_blobs_array ($source, $blob_cols, $cols, $data);
527 my @cols_with_identities = (@$cols, $identity_col);
529 ## calculate identities
530 # XXX This assumes identities always increase by 1, which may or may not
532 my ($last_identity) =
533 $self->_dbh->selectrow_array (
534 $self->_fetch_identity_sql($source, $identity_col)
536 my @identities = (($last_identity - @$data + 1) .. $last_identity);
538 my @data_with_identities = map [@$_, shift @identities], @$data;
540 $self->_insert_blobs_array (
541 $source, $blob_cols, \@cols_with_identities, \@data_with_identities
546 $guard->commit if $guard;
551 # otherwise, use the bulk API
553 # rearrange @$data so that columns are in database order
554 # and so we submit a full column list
555 my %orig_order = map { $cols->[$_] => $_ } 0..$#$cols;
557 my @source_columns = $source->columns;
559 # bcp identity index is 1-based
560 my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns);
561 $identity_idx = defined $identity_idx ? $identity_idx + 1 : 0;
564 for my $slice_idx (0..$#$data) {
565 push @new_data, [map {
566 # identity data will be 'undef' if not _autoinc_supplied_for_op()
567 # columns with defaults will also be 'undef'
568 exists $orig_order{$_}
569 ? $data->[$slice_idx][$orig_order{$_}]
574 my $proto_bind = $self->_resolve_bindattrs(
577 [ { dbic_colname => $source_columns[$_], _bind_data_slice_idx => $_ }
578 => $new_data[0][$_] ]
579 } (0 ..$#source_columns) ],
583 ## Set a client-side conversion error handler, straight from DBD::Sybase docs.
584 # This ignores any data conversion errors detected by the client side libs, as
585 # they are usually harmless.
586 my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
587 Sub::Name::subname insert_bulk => sub {
588 my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
590 return 1 if $errno == 36;
593 "Layer: $layer, Origin: $origin, Severity: $severity, Error: $errno" .
594 ($errmsg ? "\n$errmsg" : '') .
595 ($osmsg ? "\n$osmsg" : '') .
596 ($blkmsg ? "\n$blkmsg" : '');
603 my $bulk = $self->_bulk_storage;
605 my $guard = $bulk->txn_scope_guard;
607 ## FIXME - once this is done - address the FIXME on finish() below
608 ## XXX get this to work instead of our own $sth
609 ## will require SQLA or *Hacks changes for ordered columns
610 # $bulk->next::method($source, \@source_columns, \@new_data, {
611 # syb_bcp_attribs => {
612 # identity_flag => $self->_autoinc_supplied_for_op ? 1 : 0,
613 # identity_column => $identity_idx,
616 my $sql = 'INSERT INTO ' .
617 $bulk->sql_maker->_quote($source->name) . ' (' .
618 # colname list is ignored for BCP, but does no harm
619 (join ', ', map $bulk->sql_maker->_quote($_), @source_columns) . ') '.
620 ' VALUES ('. (join ', ', ('?') x @source_columns) . ')';
622 ## XXX there's a bug in the DBD::Sybase bulk support that makes $sth->finish for
623 ## a prepare_cached statement ineffective. Replace with ->sth when fixed, or
624 ## better yet the version above. Should be fixed in DBD::Sybase .
625 my $sth = $bulk->_get_dbh->prepare($sql,
629 identity_flag => $self->_autoinc_supplied_for_op ? 1 : 0,
630 identity_column => $identity_idx,
636 # FIXME the $sth->finish in _execute_array does a rollback for some
637 # reason. Disable it temporarily until we fix the SQLMaker thing above
638 no warnings 'redefine';
640 local *{ref($sth).'::finish'} = sub {};
642 $self->_dbh_execute_for_fetch(
643 $source, $sth, $proto_bind, \@source_columns, \@new_data
649 $bulk->_query_end($sql);
654 DBD::Sybase::set_cslib_cb($orig_cslib_cb);
656 if ($exception =~ /-Y option/) {
657 my $w = 'Sybase bulk API operation failed due to character set incompatibility, '
658 . 'reverting to regular array inserts. Try unsetting the LANG environment variable'
660 $w .= "\n$exception" if $self->debug;
663 $self->_bulk_storage(undef);
668 # rollback makes the bulkLogin connection unusable
669 $self->_bulk_storage->disconnect;
670 $self->throw_exception($exception);
674 # Make sure blobs are not bound as placeholders, and return any non-empty ones
676 sub _remove_blob_cols {
677 my ($self, $source, $fields) = @_;
681 for my $col (keys %$fields) {
682 if ($self->_is_lob_column($source, $col)) {
683 my $blob_val = delete $fields->{$col};
684 if (not defined $blob_val) {
685 $fields->{$col} = \'NULL';
688 $fields->{$col} = \"''";
689 $blob_cols{$col} = $blob_val unless $blob_val eq '';
694 return %blob_cols ? \%blob_cols : undef;
697 # same for insert_bulk
698 sub _remove_blob_cols_array {
699 my ($self, $source, $cols, $data) = @_;
703 for my $i (0..$#$cols) {
704 my $col = $cols->[$i];
706 if ($self->_is_lob_column($source, $col)) {
707 for my $j (0..$#$data) {
708 my $blob_val = delete $data->[$j][$i];
709 if (not defined $blob_val) {
710 $data->[$j][$i] = \'NULL';
713 $data->[$j][$i] = \"''";
714 $blob_cols[$j][$i] = $blob_val
715 unless $blob_val eq '';
721 return @blob_cols ? \@blob_cols : undef;
725 my ($self, $source, $blob_cols, $where) = @_;
727 my @primary_cols = try
728 { $source->_pri_cols }
730 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
737 @primary_cols == grep { defined $where->{$_} } @primary_cols
740 @row_to_update{@primary_cols} = @{$where}{@primary_cols};
741 @pks_to_update = \%row_to_update;
744 my $cursor = $self->select ($source, \@primary_cols, $where, {});
745 @pks_to_update = map {
746 my %row; @row{@primary_cols} = @$_; \%row
750 for my $ident (@pks_to_update) {
751 $self->_insert_blobs($source, $blob_cols, $ident);
756 my ($self, $source, $blob_cols, $row) = @_;
757 my $dbh = $self->_get_dbh;
759 my $table = $source->name;
762 my @primary_cols = try
763 { $source->_pri_cols }
765 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
768 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
769 if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
771 for my $col (keys %$blob_cols) {
772 my $blob = $blob_cols->{$col};
774 my %where = map { ($_, $row{$_}) } @primary_cols;
776 my $cursor = $self->select ($source, [$col], \%where, {});
778 my $sth = $cursor->sth;
781 $self->throw_exception(
782 "Could not find row in table '$table' for blob update:\n"
789 $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
792 $sth->func('ct_prepare_send') or die $sth->errstr;
794 my $log_on_update = $self->_blob_log_on_update;
795 $log_on_update = 1 if not defined $log_on_update;
797 $sth->func('CS_SET', 1, {
798 total_txtlen => length($blob),
799 log_on_update => $log_on_update
800 }, 'ct_data_info') or die $sth->errstr;
802 $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
804 $sth->func('ct_finish_send') or die $sth->errstr;
807 if ($self->_using_freetds) {
808 $self->throw_exception (
809 "TEXT/IMAGE operation failed, probably because you are using FreeTDS: $_"
813 $self->throw_exception($_);
817 $sth->finish if $sth;
822 sub _insert_blobs_array {
823 my ($self, $source, $blob_cols, $cols, $data) = @_;
825 for my $i (0..$#$data) {
826 my $datum = $data->[$i];
829 @row{ @$cols } = @$datum;
832 for my $j (0..$#$cols) {
833 if (exists $blob_cols->[$i][$j]) {
834 $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j];
838 $self->_insert_blobs ($source, \%blob_vals, \%row);
842 =head2 connect_call_datetime_setup
846 on_connect_call => 'datetime_setup'
848 In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
850 $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
851 $dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080
853 This works for both C<DATETIME> and C<SMALLDATETIME> columns, note that
854 C<SMALLDATETIME> columns only have minute precision.
858 sub connect_call_datetime_setup {
860 my $dbh = $self->_get_dbh;
862 if ($dbh->can('syb_date_fmt')) {
863 # amazingly, this works with FreeTDS
864 $dbh->syb_date_fmt('ISO_strict');
868 'Your DBD::Sybase is too old to support '
869 .'DBIx::Class::InflateColumn::DateTime, please upgrade!';
871 # FIXME - in retrospect this is a rather bad US-centric choice
872 # of format. Not changing as a bugwards compat, though in reality
873 # the only piece that sees the results of $dt object formatting
874 # (as opposed to parsing) is the database itself, so theoretically
875 # changing both this SET command and the formatter definition of
876 # ::S::D::Sybase::ASE::DateTime::Format below should be safe and
879 $dbh->do('SET DATEFORMAT mdy');
884 sub _exec_txn_begin {
887 # bulkLogin=1 connections are always in a transaction, and can only call BEGIN
888 # TRAN once. However, we need to make sure there's a $dbh.
889 return if $self->_is_bulk_storage && $self->_dbh && $self->_began_bulk_work;
891 $self->next::method(@_);
893 $self->_began_bulk_work(1) if $self->_is_bulk_storage;
896 # savepoint support using ASE syntax
898 sub _exec_svp_begin {
899 my ($self, $name) = @_;
901 $self->_dbh->do("SAVE TRANSACTION $name");
904 # A new SAVE TRANSACTION with the same name releases the previous one.
905 sub _exec_svp_release { 1 }
907 sub _exec_svp_rollback {
908 my ($self, $name) = @_;
910 $self->_dbh->do("ROLLBACK TRANSACTION $name");
913 package # hide from PAUSE
914 DBIx::Class::Storage::DBI::Sybase::ASE::DateTime::Format;
916 my $datetime_parse_format = '%Y-%m-%dT%H:%M:%S.%3NZ';
917 my $datetime_format_format = '%m/%d/%Y %H:%M:%S.%3N';
919 my ($datetime_parser, $datetime_formatter);
923 require DateTime::Format::Strptime;
924 $datetime_parser ||= DateTime::Format::Strptime->new(
925 pattern => $datetime_parse_format,
928 return $datetime_parser->parse_datetime(shift);
931 sub format_datetime {
933 require DateTime::Format::Strptime;
934 $datetime_formatter ||= DateTime::Format::Strptime->new(
935 pattern => $datetime_format_format,
938 return $datetime_formatter->format_datetime(shift);
943 =head1 Schema::Loader Support
945 As of version C<0.05000>, L<DBIx::Class::Schema::Loader> should work well with
946 most versions of Sybase ASE.
950 This driver supports L<DBD::Sybase> compiled against FreeTDS
951 (L<http://www.freetds.org/>) to the best of our ability, however it is
952 recommended that you recompile L<DBD::Sybase> against the Sybase Open Client
953 libraries. They are a part of the Sybase ASE distribution:
955 The Open Client FAQ is here:
956 L<http://www.isug.com/Sybase_FAQ/ASE/section7.html>.
958 Sybase ASE for Linux (which comes with the Open Client libraries) may be
959 downloaded here: L<http://response.sybase.com/forms/ASE_Linux_Download>.
961 To see if you're using FreeTDS run:
963 perl -MDBI -le 'my $dbh = DBI->connect($dsn, $user, $pass); print $dbh->{syb_oc_version}'
965 It is recommended to set C<tds version> for your ASE server to C<5.0> in
966 C</etc/freetds/freetds.conf>.
968 Some versions or configurations of the libraries involved will not support
969 placeholders, in which case the storage will be reblessed to
970 L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
972 In some configurations, placeholders will work but will throw implicit type
973 conversion errors for anything that's not expecting a string. In such a case,
974 the C<auto_cast> option from L<DBIx::Class::Storage::DBI::AutoCast> is
975 automatically set, which you may enable on connection with
976 L<connect_call_set_auto_cast|DBIx::Class::Storage::DBI::AutoCast/connect_call_set_auto_cast>.
977 The type info for the C<CAST>s is taken from the
978 L<DBIx::Class::ResultSource/data_type> definitions in your Result classes, and
979 are mapped to a Sybase type (if it isn't already) using a mapping based on
982 In other configurations, placeholders will work just as they do with the Sybase
983 Open Client libraries.
985 Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS.
987 =head1 INSERTS WITH PLACEHOLDERS
989 With placeholders enabled, inserts are done in a transaction so that there are
990 no concurrency issues with getting the inserted identity value using
991 C<SELECT MAX(col)>, which is the only way to get the C<IDENTITY> value in this
994 In addition, they are done on a separate connection so that it's possible to
995 have active cursors when doing an insert.
997 When using C<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars> transactions
998 are unnecessary and not used, as there are no concurrency issues with C<SELECT
999 @@IDENTITY> which is a session variable.
1003 Due to limitations of the TDS protocol and L<DBD::Sybase>, you cannot begin a
1004 transaction while there are active cursors, nor can you use multiple active
1005 cursors within a transaction. An active cursor is, for example, a
1006 L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or
1007 C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>.
1009 For example, this will not work:
1011 $schema->txn_do(sub {
1012 my $rs = $schema->resultset('Book');
1013 while (my $row = $rs->next) {
1014 $schema->resultset('MetaData')->create({
1015 book_id => $row->id,
1023 my $first_row = $large_rs->first;
1024 $schema->txn_do(sub { ... });
1026 Transactions done for inserts in C<AutoCommit> mode when placeholders are in use
1027 are not affected, as they are done on an extra database handle.
1033 =item * use L<DBIx::Class::Storage::DBI::Replicated>
1035 =item * L<connect|DBIx::Class::Schema/connect> another L<Schema|DBIx::Class::Schema>
1037 =item * load the data from your cursor with L<DBIx::Class::ResultSet/all>
1041 =head1 MAXIMUM CONNECTIONS
1043 The TDS protocol makes separate connections to the server for active statements
1044 in the background. By default the number of such connections is limited to 25,
1045 on both the client side and the server side.
1047 This is a bit too low for a complex L<DBIx::Class> application, so on connection
1048 the client side setting is set to C<256> (see L<DBD::Sybase/maxConnect>.) You
1049 can override it to whatever setting you like in the DSN.
1052 L<http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.ase_15.0.sag1/html/sag1/sag1272.htm>
1053 for information on changing the setting on the server side.
1057 See L</connect_call_datetime_setup> to setup date formats
1058 for L<DBIx::Class::InflateColumn::DateTime>.
1060 =head1 TEXT/IMAGE COLUMNS
1062 L<DBD::Sybase> compiled with FreeTDS will B<NOT> allow you to insert or update
1063 C<TEXT/IMAGE> columns.
1065 Setting C<< $dbh->{LongReadLen} >> will also not work with FreeTDS use either:
1067 $schema->storage->dbh->do("SET TEXTSIZE $bytes");
1071 $schema->storage->set_textsize($bytes);
1075 However, the C<LongReadLen> you pass in
1076 L<connect_info|DBIx::Class::Storage::DBI/connect_info> is used to execute the
1077 equivalent C<SET TEXTSIZE> command on connection.
1079 See L</connect_call_blob_setup> for a
1080 L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting you need to work
1081 with C<IMAGE> columns.
1085 The experimental L<DBD::Sybase> Bulk API support is used for
1086 L<populate|DBIx::Class::ResultSet/populate> in B<void> context, in a transaction
1087 on a separate connection.
1089 To use this feature effectively, use a large number of rows for each
1090 L<populate|DBIx::Class::ResultSet/populate> call, eg.:
1092 while (my $rows = $data_source->get_100_rows()) {
1093 $rs->populate($rows);
1096 B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns>
1097 calls in your C<Result> classes B<must> list columns in database order for this
1098 to work. Also, you may have to unset the C<LANG> environment variable before
1099 loading your app, as C<BCP -Y> is not yet supported in DBD::Sybase .
1101 When inserting IMAGE columns using this method, you'll need to use
1102 L</connect_call_blob_setup> as well.
1104 =head1 COMPUTED COLUMNS
1106 If you have columns such as:
1108 created_dtm AS getdate()
1110 represent them in your Result classes as:
1114 default_value => \'getdate()',
1116 inflate_datetime => 1,
1119 The C<data_type> must exist and must be C<undef>. Then empty inserts will work
1120 on tables with such columns.
1122 =head1 TIMESTAMP COLUMNS
1124 C<timestamp> columns in Sybase ASE are not really timestamps, see:
1125 L<http://dba.fyicenter.com/Interview-Questions/SYBASE/The_timestamp_datatype_in_Sybase_.html>.
1127 They should be defined in your Result classes as:
1130 data_type => 'timestamp',
1132 inflate_datetime => 0,
1135 The C<<inflate_datetime => 0>> is necessary if you use
1136 L<DBIx::Class::InflateColumn::DateTime>, and most people do, and still want to
1137 be able to read these values.
1139 The values will come back as hexadecimal.
1147 Transitions to AutoCommit=0 (starting a transaction) mode by exhausting
1148 any active cursors, using eager cursors.
1152 Real limits and limited counts using stored procedures deployed on startup.
1156 Blob update with a LIKE query on a blob, without invalidating the WHERE condition.
1160 bulk_insert using prepare_cached (see comments.)
1166 See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
1170 You may distribute this code under the same terms as Perl itself.