1 package DBIx::Class::Storage::DBI::Sybase::ASE;
7 DBIx::Class::Storage::DBI::Sybase
8 DBIx::Class::Storage::DBI::AutoCast
11 use Carp::Clan qw/^DBIx::Class/;
12 use Scalar::Util 'blessed';
13 use List::Util 'first';
15 use Data::Dumper::Concise 'Dumper';
19 __PACKAGE__->sql_limit_dialect ('RowCountOrGenericSubQ');
21 __PACKAGE__->mk_group_accessors('simple' =>
22 qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
23 _bulk_storage _is_bulk_storage _began_bulk_work
24 _bulk_disabled_due_to_coderef_connect_info_warned
28 my @also_proxy_to_extra_storages = qw/
29 connect_call_set_auto_cast auto_cast connect_call_blob_setup
30 connect_call_datetime_setup
32 disconnect _connect_info _sql_maker _sql_maker_opts disable_sth_caching
33 auto_savepoint unsafe cursor_class debug debugobj schema
38 DBIx::Class::Storage::DBI::Sybase::ASE - Sybase ASE SQL Server support for
43 This subclass supports L<DBD::Sybase> for real (non-Microsoft) Sybase databases.
47 If your version of Sybase does not support placeholders, then your storage will
48 be reblessed to L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
49 You can also enable that driver explicitly, see the documentation for more
52 With this driver there is unfortunately no way to get the C<last_insert_id>
53 without doing a C<SELECT MAX(col)>. This is done safely in a transaction
54 (locking the table.) See L</INSERTS WITH PLACEHOLDERS>.
56 A recommended L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting:
58 on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]]
67 my $no_bind_vars = __PACKAGE__ . '::NoBindVars';
69 if ($self->using_freetds) {
70 carp <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
72 You are using FreeTDS with Sybase.
74 We will do our best to support this configuration, but please consider this
77 TEXT/IMAGE columns will definitely not work.
79 You are encouraged to recompile DBD::Sybase with the Sybase Open Client libraries
82 See perldoc DBIx::Class::Storage::DBI::Sybase::ASE for more details.
84 To turn off this warning set the DBIC_SYBASE_FREETDS_NOWARN environment
88 if (not $self->_use_typeless_placeholders) {
89 if ($self->_use_placeholders) {
93 $self->ensure_class_loaded($no_bind_vars);
94 bless $self, $no_bind_vars;
100 elsif (not $self->_get_dbh->{syb_dynamic_supported}) {
101 # not necessarily FreeTDS, but no placeholders nevertheless
102 $self->ensure_class_loaded($no_bind_vars);
103 bless $self, $no_bind_vars;
106 # this is highly unlikely, but we check just in case
107 elsif (not $self->_use_typeless_placeholders) {
114 $self->_set_max_connect(256);
116 # create storage for insert/(update blob) transactions,
117 # unless this is that storage
118 return if $self->_is_extra_storage;
120 my $writer_storage = (ref $self)->new;
122 $writer_storage->_is_extra_storage(1);
123 $writer_storage->connect_info($self->connect_info);
124 $writer_storage->auto_cast($self->auto_cast);
126 $self->_writer_storage($writer_storage);
128 # create a bulk storage unless connect_info is a coderef
129 return if ref($self->_dbi_connect_info->[0]) eq 'CODE';
131 my $bulk_storage = (ref $self)->new;
133 $bulk_storage->_is_extra_storage(1);
134 $bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics
135 $bulk_storage->connect_info($self->connect_info);
138 $bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1';
140 $self->_bulk_storage($bulk_storage);
143 for my $method (@also_proxy_to_extra_storages) {
145 no warnings 'redefine';
147 my $replaced = __PACKAGE__->can($method);
149 *{$method} = Sub::Name::subname $method => sub {
151 $self->_writer_storage->$replaced(@_) if $self->_writer_storage;
152 $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage;
153 return $self->$replaced(@_);
160 # Even though we call $sth->finish for uses off the bulk API, there's still an
161 # "active statement" warning on disconnect, which we throw away here.
162 # This is due to the bug described in insert_bulk.
163 # Currently a noop because 'prepare' is used instead of 'prepare_cached'.
164 local $SIG{__WARN__} = sub {
165 warn $_[0] unless $_[0] =~ /active statement/i;
166 } if $self->_is_bulk_storage;
168 # so that next transaction gets a dbh
169 $self->_began_bulk_work(0) if $self->_is_bulk_storage;
174 # Set up session settings for Sybase databases for the connection.
176 # Make sure we have CHAINED mode turned on if AutoCommit is off in non-FreeTDS
177 # DBD::Sybase (since we don't know how DBD::Sybase was compiled.) If however
178 # we're using FreeTDS, CHAINED mode turns on an implicit transaction which we
179 # only want when AutoCommit is off.
181 # Also SET TEXTSIZE for FreeTDS because LongReadLen doesn't work.
182 sub _run_connection_actions {
185 if ($self->_is_bulk_storage) {
186 # this should be cleared on every reconnect
187 $self->_began_bulk_work(0);
191 if (not $self->using_freetds) {
192 $self->_dbh->{syb_chained_txn} = 1;
194 # based on LongReadLen in connect_info
197 if ($self->_dbh_autocommit) {
198 $self->_dbh->do('SET CHAINED OFF');
200 $self->_dbh->do('SET CHAINED ON');
204 $self->next::method(@_);
207 =head2 connect_call_blob_setup
211 on_connect_call => [ [ 'blob_setup', log_on_update => 0 ] ]
213 Does C<< $dbh->{syb_binary_images} = 1; >> to return C<IMAGE> data as raw binary
214 instead of as a hex string.
218 Also sets the C<log_on_update> value for blob write operations. The default is
219 C<1>, but C<0> is better if your database is configured for it.
222 L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
226 sub connect_call_blob_setup {
229 my $dbh = $self->_dbh;
230 $dbh->{syb_binary_images} = 1;
232 $self->_blob_log_on_update($args{log_on_update})
233 if exists $args{log_on_update};
239 $type && $type =~ /(?:text|image|lob|bytea|binary|memo)/i;
243 my ($self, $source, $column) = @_;
245 return $self->_is_lob_type($source->column_info($column)->{data_type});
248 sub _prep_for_execute {
250 my ($op, $extra_bind, $ident, $args) = @_;
252 my ($sql, $bind) = $self->next::method (@_);
254 my $table = blessed $ident ? $ident->from : $ident;
256 my $bind_info = $self->_resolve_column_info(
257 $ident, [map $_->[0], @{$bind}]
259 my $bound_identity_col =
260 first { $bind_info->{$_}{is_auto_increment} }
264 my $columns_info = blessed $ident && $ident->columns_info;
268 first { $columns_info->{$_}{is_auto_increment} }
272 if (($op eq 'insert' && $bound_identity_col) ||
273 ($op eq 'update' && exists $args->[0]{$identity_col})) {
275 $self->_set_table_identity_sql($op => $table, 'on'),
277 $self->_set_table_identity_sql($op => $table, 'off'),
281 if ($op eq 'insert' && (not $bound_identity_col) && $identity_col &&
282 (not $self->{insert_bulk})) {
285 $self->_fetch_identity_sql($ident, $identity_col);
288 return ($sql, $bind);
291 sub _set_table_identity_sql {
292 my ($self, $op, $table, $on_off) = @_;
294 return sprintf 'SET IDENTITY_%s %s %s',
295 uc($op), $self->sql_maker->_quote($table), uc($on_off);
298 # Stolen from SQLT, with some modifications. This is a makeshift
299 # solution before a sane type-mapping library is available, thus
300 # the 'our' for easy overrides.
301 our %TYPE_MAPPING = (
304 varchar => 'varchar',
305 varchar2 => 'varchar',
306 timestamp => 'datetime',
308 real => 'double precision',
311 tinyint => 'smallint',
312 float => 'double precision',
314 bigserial => 'numeric',
315 boolean => 'varchar',
319 sub _native_data_type {
320 my ($self, $type) = @_;
323 $type =~ s/\s* identity//x;
325 return uc($TYPE_MAPPING{$type} || $type);
328 sub _fetch_identity_sql {
329 my ($self, $source, $col) = @_;
331 return sprintf ("SELECT MAX(%s) FROM %s",
332 map { $self->sql_maker->_quote ($_) } ($col, $source->from)
340 my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
342 if ($op eq 'insert') {
343 $self->_identity($sth->fetchrow_array);
347 return wantarray ? ($rv, $sth, @bind) : $rv;
350 sub last_insert_id { shift->_identity }
352 # handles TEXT/IMAGE and transaction for last_insert_id
355 my ($source, $to_insert) = @_;
357 my $columns_info = $source->columns_info;
360 (first { $columns_info->{$_}{is_auto_increment} }
361 keys %$columns_info )
364 # check for empty insert
365 # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
366 # try to insert explicit 'DEFAULT's instead (except for identity, timestamp
367 # and computed columns)
368 if (not %$to_insert) {
369 for my $col ($source->columns) {
370 next if $col eq $identity_col;
372 my $info = $source->column_info($col);
374 next if ref $info->{default_value} eq 'SCALAR'
375 || (exists $info->{data_type} && (not defined $info->{data_type}));
377 next if $info->{data_type} && $info->{data_type} =~ /^timestamp\z/i;
379 $to_insert->{$col} = \'DEFAULT';
383 my $blob_cols = $self->_remove_blob_cols($source, $to_insert);
385 # do we need the horrific SELECT MAX(COL) hack?
386 my $dumb_last_insert_id =
388 && (not exists $to_insert->{$identity_col})
389 && ($self->_identity_method||'') ne '@@IDENTITY';
391 my $next = $self->next::can;
393 # we are already in a transaction, or there are no blobs
394 # and we don't need the PK - just (try to) do it
395 if ($self->{transaction_depth}
396 || (!$blob_cols && !$dumb_last_insert_id)
398 return $self->_insert (
399 $next, $source, $to_insert, $blob_cols, $identity_col
403 # otherwise use the _writer_storage to do the insert+transaction on another
405 my $guard = $self->_writer_storage->txn_scope_guard;
407 my $updated_cols = $self->_writer_storage->_insert (
408 $next, $source, $to_insert, $blob_cols, $identity_col
411 $self->_identity($self->_writer_storage->_identity);
415 return $updated_cols;
419 my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_;
421 my $updated_cols = $self->$next ($source, $to_insert);
425 ($identity_col => $self->last_insert_id($source, $identity_col)) : ()),
430 $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols;
432 return $updated_cols;
437 my ($source, $fields, $where, @rest) = @_;
439 my $wantarray = wantarray;
441 my $blob_cols = $self->_remove_blob_cols($source, $fields);
443 my $table = $source->name;
445 my $columns_info = $source->columns_info;
448 first { $columns_info->{$_}{is_auto_increment} }
451 my $is_identity_update = $identity_col && defined $fields->{$identity_col};
453 return $self->next::method(@_) unless $blob_cols;
455 # If there are any blobs in $where, Sybase will return a descriptive error
457 # XXX blobs can still be used with a LIKE query, and this should be handled.
459 # update+blob update(s) done atomically on separate connection
460 $self = $self->_writer_storage;
462 my $guard = $self->txn_scope_guard;
464 # First update the blob columns to be updated to '' (taken from $fields, where
465 # it is originally put by _remove_blob_cols .)
466 my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
468 # We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
470 $self->next::method($source, \%blobs_to_empty, $where, @rest);
472 # Now update the blobs before the other columns in case the update of other
473 # columns makes the search condition invalid.
474 $self->_update_blobs($source, $blob_cols, $where);
479 @res = $self->next::method(@_);
481 elsif (defined $wantarray) {
482 $res[0] = $self->next::method(@_);
485 $self->next::method(@_);
491 return $wantarray ? @res : $res[0];
496 my ($source, $cols, $data) = @_;
498 my $columns_info = $source->columns_info;
501 first { $columns_info->{$_}{is_auto_increment} }
504 my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;
506 my @source_columns = $source->columns;
509 $self->_bulk_storage &&
510 $self->_get_dbh->{syb_has_blk};
512 if ((not $use_bulk_api)
514 (ref($self->_dbi_connect_info->[0]) eq 'CODE')
516 (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
518 Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
519 regular array inserts.
521 $self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
524 if (not $use_bulk_api) {
525 my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data);
527 # _execute_array uses a txn anyway, but it ends too early in case we need to
528 # select max(col) to get the identity for inserting blobs.
529 ($self, my $guard) = $self->{transaction_depth} == 0 ?
530 ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
534 local $self->{insert_bulk} = 1;
536 $self->next::method(@_);
539 if ($is_identity_insert) {
540 $self->_insert_blobs_array ($source, $blob_cols, $cols, $data);
543 my @cols_with_identities = (@$cols, $identity_col);
545 ## calculate identities
546 # XXX This assumes identities always increase by 1, which may or may not
548 my ($last_identity) =
549 $self->_dbh->selectrow_array (
550 $self->_fetch_identity_sql($source, $identity_col)
552 my @identities = (($last_identity - @$data + 1) .. $last_identity);
554 my @data_with_identities = map [@$_, shift @identities], @$data;
556 $self->_insert_blobs_array (
557 $source, $blob_cols, \@cols_with_identities, \@data_with_identities
562 $guard->commit if $guard;
567 # otherwise, use the bulk API
569 # rearrange @$data so that columns are in database order
571 @orig_idx{@$cols} = 0..$#$cols;
574 @new_idx{@source_columns} = 0..$#source_columns;
577 for my $datum (@$data) {
579 for my $col (@source_columns) {
580 # identity data will be 'undef' if not $is_identity_insert
581 # columns with defaults will also be 'undef'
582 $new_datum->[ $new_idx{$col} ] =
583 exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
585 push @new_data, $new_datum;
588 # bcp identity index is 1-based
589 my $identity_idx = exists $new_idx{$identity_col} ?
590 $new_idx{$identity_col} + 1 : 0;
592 ## Set a client-side conversion error handler, straight from DBD::Sybase docs.
593 # This ignores any data conversion errors detected by the client side libs, as
594 # they are usually harmless.
595 my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
596 Sub::Name::subname insert_bulk => sub {
597 my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
599 return 1 if $errno == 36;
602 "Layer: $layer, Origin: $origin, Severity: $severity, Error: $errno" .
603 ($errmsg ? "\n$errmsg" : '') .
604 ($osmsg ? "\n$osmsg" : '') .
605 ($blkmsg ? "\n$blkmsg" : '');
612 my $bulk = $self->_bulk_storage;
614 my $guard = $bulk->txn_scope_guard;
616 ## XXX get this to work instead of our own $sth
617 ## will require SQLA or *Hacks changes for ordered columns
618 # $bulk->next::method($source, \@source_columns, \@new_data, {
619 # syb_bcp_attribs => {
620 # identity_flag => $is_identity_insert,
621 # identity_column => $identity_idx,
624 my $sql = 'INSERT INTO ' .
625 $bulk->sql_maker->_quote($source->name) . ' (' .
626 # colname list is ignored for BCP, but does no harm
627 (join ', ', map $bulk->sql_maker->_quote($_), @source_columns) . ') '.
628 ' VALUES ('. (join ', ', ('?') x @source_columns) . ')';
630 ## XXX there's a bug in the DBD::Sybase bulk support that makes $sth->finish for
631 ## a prepare_cached statement ineffective. Replace with ->sth when fixed, or
632 ## better yet the version above. Should be fixed in DBD::Sybase .
633 my $sth = $bulk->_get_dbh->prepare($sql,
637 identity_flag => $is_identity_insert,
638 identity_column => $identity_idx,
645 map [ $_, $idx++ ], @source_columns;
648 $self->_execute_array(
649 $source, $sth, \@bind, \@source_columns, \@new_data, sub {
654 $bulk->_query_end($sql);
659 DBD::Sybase::set_cslib_cb($orig_cslib_cb);
661 if ($exception =~ /-Y option/) {
662 my $w = 'Sybase bulk API operation failed due to character set incompatibility, '
663 . 'reverting to regular array inserts. Try unsetting the LANG environment variable'
665 $w .= "\n$exception" if $self->debug;
668 $self->_bulk_storage(undef);
673 # rollback makes the bulkLogin connection unusable
674 $self->_bulk_storage->disconnect;
675 $self->throw_exception($exception);
679 sub _dbh_execute_array {
680 my ($self, $sth, $tuple_status, $cb) = @_;
682 my $rv = $self->next::method($sth, $tuple_status);
688 # Make sure blobs are not bound as placeholders, and return any non-empty ones
690 sub _remove_blob_cols {
691 my ($self, $source, $fields) = @_;
695 for my $col (keys %$fields) {
696 if ($self->_is_lob_column($source, $col)) {
697 my $blob_val = delete $fields->{$col};
698 if (not defined $blob_val) {
699 $fields->{$col} = \'NULL';
702 $fields->{$col} = \"''";
703 $blob_cols{$col} = $blob_val unless $blob_val eq '';
708 return %blob_cols ? \%blob_cols : undef;
711 # same for insert_bulk
712 sub _remove_blob_cols_array {
713 my ($self, $source, $cols, $data) = @_;
717 for my $i (0..$#$cols) {
718 my $col = $cols->[$i];
720 if ($self->_is_lob_column($source, $col)) {
721 for my $j (0..$#$data) {
722 my $blob_val = delete $data->[$j][$i];
723 if (not defined $blob_val) {
724 $data->[$j][$i] = \'NULL';
727 $data->[$j][$i] = \"''";
728 $blob_cols[$j][$i] = $blob_val
729 unless $blob_val eq '';
735 return @blob_cols ? \@blob_cols : undef;
739 my ($self, $source, $blob_cols, $where) = @_;
741 my @primary_cols = try
742 { $source->_pri_cols }
744 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
747 # check if we're updating a single row by PK
748 my $pk_cols_in_where = 0;
749 for my $col (@primary_cols) {
750 $pk_cols_in_where++ if defined $where->{$col};
754 if ($pk_cols_in_where == @primary_cols) {
756 @row_to_update{@primary_cols} = @{$where}{@primary_cols};
757 @rows = \%row_to_update;
759 my $cursor = $self->select ($source, \@primary_cols, $where, {});
761 my %row; @row{@primary_cols} = @$_; \%row
765 for my $row (@rows) {
766 $self->_insert_blobs($source, $blob_cols, $row);
771 my ($self, $source, $blob_cols, $row) = @_;
772 my $dbh = $self->_get_dbh;
774 my $table = $source->name;
777 my @primary_cols = try
778 { $source->_pri_cols }
780 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
783 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
784 if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
786 for my $col (keys %$blob_cols) {
787 my $blob = $blob_cols->{$col};
789 my %where = map { ($_, $row{$_}) } @primary_cols;
791 my $cursor = $self->select ($source, [$col], \%where, {});
793 my $sth = $cursor->sth;
796 $self->throw_exception(
797 "Could not find row in table '$table' for blob update:\n"
804 $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
807 $sth->func('ct_prepare_send') or die $sth->errstr;
809 my $log_on_update = $self->_blob_log_on_update;
810 $log_on_update = 1 if not defined $log_on_update;
812 $sth->func('CS_SET', 1, {
813 total_txtlen => length($blob),
814 log_on_update => $log_on_update
815 }, 'ct_data_info') or die $sth->errstr;
817 $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
819 $sth->func('ct_finish_send') or die $sth->errstr;
822 if ($self->using_freetds) {
823 $self->throw_exception (
824 "TEXT/IMAGE operation failed, probably because you are using FreeTDS: $_"
828 $self->throw_exception($_);
832 $sth->finish if $sth;
837 sub _insert_blobs_array {
838 my ($self, $source, $blob_cols, $cols, $data) = @_;
840 for my $i (0..$#$data) {
841 my $datum = $data->[$i];
844 @row{ @$cols } = @$datum;
847 for my $j (0..$#$cols) {
848 if (exists $blob_cols->[$i][$j]) {
849 $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j];
853 $self->_insert_blobs ($source, \%blob_vals, \%row);
857 =head2 connect_call_datetime_setup
861 on_connect_call => 'datetime_setup'
863 In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
865 $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
866 $dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080
868 On connection for use with L<DBIx::Class::InflateColumn::DateTime>, using
869 L<DateTime::Format::Sybase>, which you will need to install.
871 This works for both C<DATETIME> and C<SMALLDATETIME> columns, although
872 C<SMALLDATETIME> columns only have minute precision.
877 my $old_dbd_warned = 0;
879 sub connect_call_datetime_setup {
881 my $dbh = $self->_get_dbh;
883 if ($dbh->can('syb_date_fmt')) {
884 # amazingly, this works with FreeTDS
885 $dbh->syb_date_fmt('ISO_strict');
886 } elsif (not $old_dbd_warned) {
887 carp "Your DBD::Sybase is too old to support ".
888 "DBIx::Class::InflateColumn::DateTime, please upgrade!";
892 $dbh->do('SET DATEFORMAT mdy');
898 sub datetime_parser_type { "DateTime::Format::Sybase" }
900 # ->begin_work and such have no effect with FreeTDS but we run them anyway to
901 # let the DBD keep any state it needs to.
903 # If they ever do start working, the extra statements will do no harm (because
904 # Sybase supports nested transactions.)
906 sub _dbh_begin_work {
909 # bulkLogin=1 connections are always in a transaction, and can only call BEGIN
910 # TRAN once. However, we need to make sure there's a $dbh.
911 return if $self->_is_bulk_storage && $self->_dbh && $self->_began_bulk_work;
913 $self->next::method(@_);
915 if ($self->using_freetds) {
916 $self->_get_dbh->do('BEGIN TRAN');
919 $self->_began_bulk_work(1) if $self->_is_bulk_storage;
924 if ($self->using_freetds) {
925 $self->_dbh->do('COMMIT');
927 return $self->next::method(@_);
932 if ($self->using_freetds) {
933 $self->_dbh->do('ROLLBACK');
935 return $self->next::method(@_);
938 # savepoint support using ASE syntax
941 my ($self, $name) = @_;
943 $self->_get_dbh->do("SAVE TRANSACTION $name");
946 # A new SAVE TRANSACTION with the same name releases the previous one.
947 sub _svp_release { 1 }
950 my ($self, $name) = @_;
952 $self->_get_dbh->do("ROLLBACK TRANSACTION $name");
957 =head1 Schema::Loader Support
959 As of version C<0.05000>, L<DBIx::Class::Schema::Loader> should work well with
960 most (if not all) versions of Sybase ASE.
964 This driver supports L<DBD::Sybase> compiled against FreeTDS
965 (L<http://www.freetds.org/>) to the best of our ability, however it is
966 recommended that you recompile L<DBD::Sybase> against the Sybase Open Client
967 libraries. They are a part of the Sybase ASE distribution:
969 The Open Client FAQ is here:
970 L<http://www.isug.com/Sybase_FAQ/ASE/section7.html>.
972 Sybase ASE for Linux (which comes with the Open Client libraries) may be
973 downloaded here: L<http://response.sybase.com/forms/ASE_Linux_Download>.
975 To see if you're using FreeTDS check C<< $schema->storage->using_freetds >>, or run:
977 perl -MDBI -le 'my $dbh = DBI->connect($dsn, $user, $pass); print $dbh->{syb_oc_version}'
979 Some versions of the libraries involved will not support placeholders, in which
980 case the storage will be reblessed to
981 L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
983 In some configurations, placeholders will work but will throw implicit type
984 conversion errors for anything that's not expecting a string. In such a case,
985 the C<auto_cast> option from L<DBIx::Class::Storage::DBI::AutoCast> is
986 automatically set, which you may enable on connection with
987 L<DBIx::Class::Storage::DBI::AutoCast/connect_call_set_auto_cast>. The type info
988 for the C<CAST>s is taken from the L<DBIx::Class::ResultSource/data_type>
989 definitions in your Result classes, and are mapped to a Sybase type (if it isn't
990 already) using a mapping based on L<SQL::Translator>.
992 In other configurations, placeholders will work just as they do with the Sybase
993 Open Client libraries.
995 Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS.
997 =head1 INSERTS WITH PLACEHOLDERS
999 With placeholders enabled, inserts are done in a transaction so that there are
1000 no concurrency issues with getting the inserted identity value using
1001 C<SELECT MAX(col)>, which is the only way to get the C<IDENTITY> value in this
1004 In addition, they are done on a separate connection so that it's possible to
1005 have active cursors when doing an insert.
1007 When using C<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars> transactions
1008 are disabled, as there are no concurrency issues with C<SELECT @@IDENTITY> as
1009 it's a session variable.
1013 Due to limitations of the TDS protocol, L<DBD::Sybase>, or both, you cannot
1014 begin a transaction while there are active cursors, nor can you use multiple
1015 active cursors within a transaction. An active cursor is, for example, a
1016 L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or
1017 C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>.
1019 For example, this will not work:
1021 $schema->txn_do(sub {
1022 my $rs = $schema->resultset('Book');
1023 while (my $row = $rs->next) {
1024 $schema->resultset('MetaData')->create({
1025 book_id => $row->id,
1033 my $first_row = $large_rs->first;
1034 $schema->txn_do(sub { ... });
1036 Transactions done for inserts in C<AutoCommit> mode when placeholders are in use
1037 are not affected, as they are done on an extra database handle.
1043 =item * use L<DBIx::Class::Storage::DBI::Replicated>
1045 =item * L<connect|DBIx::Class::Schema/connect> another L<Schema|DBIx::Class::Schema>
1047 =item * load the data from your cursor with L<DBIx::Class::ResultSet/all>
1051 =head1 MAXIMUM CONNECTIONS
1053 The TDS protocol makes separate connections to the server for active statements
1054 in the background. By default the number of such connections is limited to 25,
1055 on both the client side and the server side.
1057 This is a bit too low for a complex L<DBIx::Class> application, so on connection
1058 the client side setting is set to C<256> (see L<DBD::Sybase/maxConnect>.) You
1059 can override it to whatever setting you like in the DSN.
1062 L<http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.ase_15.0.sag1/html/sag1/sag1272.htm>
1063 for information on changing the setting on the server side.
1067 See L</connect_call_datetime_setup> to setup date formats
1068 for L<DBIx::Class::InflateColumn::DateTime>.
1070 =head1 TEXT/IMAGE COLUMNS
1072 L<DBD::Sybase> compiled with FreeTDS will B<NOT> allow you to insert or update
1073 C<TEXT/IMAGE> columns.
1075 Setting C<< $dbh->{LongReadLen} >> will also not work with FreeTDS use either:
1077 $schema->storage->dbh->do("SET TEXTSIZE $bytes");
1081 $schema->storage->set_textsize($bytes);
1085 However, the C<LongReadLen> you pass in
1086 L<connect_info|DBIx::Class::Storage::DBI/connect_info> is used to execute the
1087 equivalent C<SET TEXTSIZE> command on connection.
1089 See L</connect_call_blob_setup> for a
1090 L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting you need to work
1091 with C<IMAGE> columns.
1095 The experimental L<DBD::Sybase> Bulk API support is used for
1096 L<populate|DBIx::Class::ResultSet/populate> in B<void> context, in a transaction
1097 on a separate connection.
1099 To use this feature effectively, use a large number of rows for each
1100 L<populate|DBIx::Class::ResultSet/populate> call, eg.:
1102 while (my $rows = $data_source->get_100_rows()) {
1103 $rs->populate($rows);
1106 B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns>
1107 calls in your C<Result> classes B<must> list columns in database order for this
1108 to work. Also, you may have to unset the C<LANG> environment variable before
1109 loading your app, if it doesn't match the character set of your database.
1111 When inserting IMAGE columns using this method, you'll need to use
1112 L</connect_call_blob_setup> as well.
1114 =head1 COMPUTED COLUMNS
1116 If you have columns such as:
1118 created_dtm AS getdate()
1120 represent them in your Result classes as:
1124 default_value => \'getdate()',
1128 The C<data_type> must exist and must be C<undef>. Then empty inserts will work
1129 on tables with such columns.
1131 =head1 TIMESTAMP COLUMNS
1133 C<timestamp> columns in Sybase ASE are not really timestamps, see:
1134 L<http://dba.fyicenter.com/Interview-Questions/SYBASE/The_timestamp_datatype_in_Sybase_.html>.
1136 They should be defined in your Result classes as:
1139 data_type => 'timestamp',
1141 inflate_datetime => 0,
1144 The C<<inflate_datetime => 0>> is necessary if you use
1145 L<DBIx::Class::InflateColumn::DateTime>, and most people do, and still want to
1146 be able to read these values.
1148 The values will come back as hexadecimal.
1156 Transitions to AutoCommit=0 (starting a transaction) mode by exhausting
1157 any active cursors, using eager cursors.
1161 Real limits and limited counts using stored procedures deployed on startup.
1165 Adaptive Server Anywhere (ASA) support
1169 Blob update with a LIKE query on a blob, without invalidating the WHERE condition.
1173 bulk_insert using prepare_cached (see comments.)
1179 See L<DBIx::Class/CONTRIBUTORS>.
1183 You may distribute this code under the same terms as Perl itself.