Stop relying on ->can in the SQL::Abstract carp-hack, allows
[dbsrgits/DBIx-Class.git] / lib / DBIx / Class / Storage / DBI / Sybase / ASE.pm
CommitLineData
057db5ce 1package DBIx::Class::Storage::DBI::Sybase::ASE;
2
3use strict;
4use warnings;
5
6use base qw/
7 DBIx::Class::Storage::DBI::Sybase
8 DBIx::Class::Storage::DBI::AutoCast
9/;
10use mro 'c3';
11use Carp::Clan qw/^DBIx::Class/;
6298a324 12use Scalar::Util 'blessed';
13use List::Util 'first';
057db5ce 14use Sub::Name();
6298a324 15use Data::Dumper::Concise 'Dumper';
ed7ab0f4 16use Try::Tiny;
fd323bf1 17use namespace::clean;
057db5ce 18
19__PACKAGE__->mk_group_accessors('simple' =>
20 qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
21 _bulk_storage _is_bulk_storage _began_bulk_work
22 _bulk_disabled_due_to_coderef_connect_info_warned
23 _identity_method/
24);
25
26my @also_proxy_to_extra_storages = qw/
27 connect_call_set_auto_cast auto_cast connect_call_blob_setup
28 connect_call_datetime_setup
29
30 disconnect _connect_info _sql_maker _sql_maker_opts disable_sth_caching
31 auto_savepoint unsafe cursor_class debug debugobj schema
32/;
33
34=head1 NAME
35
36DBIx::Class::Storage::DBI::Sybase::ASE - Sybase ASE SQL Server support for
37DBIx::Class
38
39=head1 SYNOPSIS
40
41This subclass supports L<DBD::Sybase> for real (non-Microsoft) Sybase databases.
42
43=head1 DESCRIPTION
44
45If your version of Sybase does not support placeholders, then your storage will
46be reblessed to L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
47You can also enable that driver explicitly, see the documentation for more
48details.
49
50With this driver there is unfortunately no way to get the C<last_insert_id>
51without doing a C<SELECT MAX(col)>. This is done safely in a transaction
52(locking the table.) See L</INSERTS WITH PLACEHOLDERS>.
53
8384a713 54A recommended L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting:
057db5ce 55
56 on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]]
57
58=head1 METHODS
59
60=cut
61
62sub _rebless {
63 my $self = shift;
64
65 my $no_bind_vars = __PACKAGE__ . '::NoBindVars';
66
67 if ($self->using_freetds) {
68 carp <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
69
70You are using FreeTDS with Sybase.
71
72We will do our best to support this configuration, but please consider this
73support experimental.
74
75TEXT/IMAGE columns will definitely not work.
76
77You are encouraged to recompile DBD::Sybase with the Sybase Open Client libraries
78instead.
79
80See perldoc DBIx::Class::Storage::DBI::Sybase::ASE for more details.
81
82To turn off this warning set the DBIC_SYBASE_FREETDS_NOWARN environment
83variable.
84EOF
85
bbdda281 86 if (not $self->_use_typeless_placeholders) {
87 if ($self->_use_placeholders) {
057db5ce 88 $self->auto_cast(1);
89 }
90 else {
91 $self->ensure_class_loaded($no_bind_vars);
92 bless $self, $no_bind_vars;
93 $self->_rebless;
94 }
95 }
96 }
97
98 elsif (not $self->_get_dbh->{syb_dynamic_supported}) {
99 # not necessarily FreeTDS, but no placeholders nevertheless
100 $self->ensure_class_loaded($no_bind_vars);
101 bless $self, $no_bind_vars;
102 $self->_rebless;
103 }
104 # this is highly unlikely, but we check just in case
bbdda281 105 elsif (not $self->_use_typeless_placeholders) {
057db5ce 106 $self->auto_cast(1);
107 }
108}
109
110sub _init {
111 my $self = shift;
112 $self->_set_max_connect(256);
113
114# create storage for insert/(update blob) transactions,
115# unless this is that storage
116 return if $self->_is_extra_storage;
117
118 my $writer_storage = (ref $self)->new;
119
120 $writer_storage->_is_extra_storage(1);
121 $writer_storage->connect_info($self->connect_info);
122 $writer_storage->auto_cast($self->auto_cast);
123
124 $self->_writer_storage($writer_storage);
125
126# create a bulk storage unless connect_info is a coderef
127 return if ref($self->_dbi_connect_info->[0]) eq 'CODE';
128
129 my $bulk_storage = (ref $self)->new;
130
131 $bulk_storage->_is_extra_storage(1);
132 $bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics
133 $bulk_storage->connect_info($self->connect_info);
134
135# this is why
136 $bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1';
137
138 $self->_bulk_storage($bulk_storage);
139}
140
141for my $method (@also_proxy_to_extra_storages) {
142 no strict 'refs';
143 no warnings 'redefine';
144
145 my $replaced = __PACKAGE__->can($method);
146
147 *{$method} = Sub::Name::subname $method => sub {
148 my $self = shift;
149 $self->_writer_storage->$replaced(@_) if $self->_writer_storage;
150 $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage;
151 return $self->$replaced(@_);
152 };
153}
154
3f528319 155sub _sql_maker_opts {
156 my ( $self, $opts ) = @_;
157
158 if ( $opts ) {
159 $self->{_sql_maker_opts} = { %$opts };
160 }
161
162 return { limit_dialect => 'RowCountOrGenericSubQ', %{$self->{_sql_maker_opts}||{}} };
163}
164
057db5ce 165sub disconnect {
166 my $self = shift;
167
168# Even though we call $sth->finish for uses off the bulk API, there's still an
169# "active statement" warning on disconnect, which we throw away here.
170# This is due to the bug described in insert_bulk.
171# Currently a noop because 'prepare' is used instead of 'prepare_cached'.
172 local $SIG{__WARN__} = sub {
173 warn $_[0] unless $_[0] =~ /active statement/i;
174 } if $self->_is_bulk_storage;
175
176# so that next transaction gets a dbh
177 $self->_began_bulk_work(0) if $self->_is_bulk_storage;
178
179 $self->next::method;
180}
181
182# Set up session settings for Sybase databases for the connection.
183#
184# Make sure we have CHAINED mode turned on if AutoCommit is off in non-FreeTDS
185# DBD::Sybase (since we don't know how DBD::Sybase was compiled.) If however
186# we're using FreeTDS, CHAINED mode turns on an implicit transaction which we
187# only want when AutoCommit is off.
188#
189# Also SET TEXTSIZE for FreeTDS because LongReadLen doesn't work.
190sub _run_connection_actions {
191 my $self = shift;
192
193 if ($self->_is_bulk_storage) {
194# this should be cleared on every reconnect
195 $self->_began_bulk_work(0);
196 return;
197 }
198
199 if (not $self->using_freetds) {
200 $self->_dbh->{syb_chained_txn} = 1;
201 } else {
202 # based on LongReadLen in connect_info
203 $self->set_textsize;
204
205 if ($self->_dbh_autocommit) {
206 $self->_dbh->do('SET CHAINED OFF');
207 } else {
208 $self->_dbh->do('SET CHAINED ON');
209 }
210 }
211
212 $self->next::method(@_);
213}
214
215=head2 connect_call_blob_setup
216
217Used as:
218
219 on_connect_call => [ [ 'blob_setup', log_on_update => 0 ] ]
220
221Does C<< $dbh->{syb_binary_images} = 1; >> to return C<IMAGE> data as raw binary
222instead of as a hex string.
223
224Recommended.
225
226Also sets the C<log_on_update> value for blob write operations. The default is
227C<1>, but C<0> is better if your database is configured for it.
228
229See
230L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
231
232=cut
233
234sub connect_call_blob_setup {
235 my $self = shift;
236 my %args = @_;
237 my $dbh = $self->_dbh;
238 $dbh->{syb_binary_images} = 1;
239
240 $self->_blob_log_on_update($args{log_on_update})
241 if exists $args{log_on_update};
242}
243
244sub _is_lob_type {
245 my $self = shift;
246 my $type = shift;
247 $type && $type =~ /(?:text|image|lob|bytea|binary|memo)/i;
248}
249
250sub _is_lob_column {
251 my ($self, $source, $column) = @_;
252
253 return $self->_is_lob_type($source->column_info($column)->{data_type});
254}
255
256sub _prep_for_execute {
257 my $self = shift;
258 my ($op, $extra_bind, $ident, $args) = @_;
259
260 my ($sql, $bind) = $self->next::method (@_);
261
6298a324 262 my $table = blessed $ident ? $ident->from : $ident;
057db5ce 263
264 my $bind_info = $self->_resolve_column_info(
265 $ident, [map $_->[0], @{$bind}]
266 );
6298a324 267 my $bound_identity_col =
268 first { $bind_info->{$_}{is_auto_increment} }
269 keys %$bind_info
057db5ce 270 ;
6298a324 271 my $identity_col =
272 blessed $ident &&
273 first { $ident->column_info($_)->{is_auto_increment} } $ident->columns
057db5ce 274 ;
275
276 if (($op eq 'insert' && $bound_identity_col) ||
277 ($op eq 'update' && exists $args->[0]{$identity_col})) {
278 $sql = join ("\n",
279 $self->_set_table_identity_sql($op => $table, 'on'),
280 $sql,
281 $self->_set_table_identity_sql($op => $table, 'off'),
282 );
283 }
284
285 if ($op eq 'insert' && (not $bound_identity_col) && $identity_col &&
286 (not $self->{insert_bulk})) {
287 $sql =
288 "$sql\n" .
289 $self->_fetch_identity_sql($ident, $identity_col);
290 }
291
292 return ($sql, $bind);
293}
294
295sub _set_table_identity_sql {
296 my ($self, $op, $table, $on_off) = @_;
297
298 return sprintf 'SET IDENTITY_%s %s %s',
299 uc($op), $self->sql_maker->_quote($table), uc($on_off);
300}
301
302# Stolen from SQLT, with some modifications. This is a makeshift
303# solution before a sane type-mapping library is available, thus
304# the 'our' for easy overrides.
305our %TYPE_MAPPING = (
306 number => 'numeric',
307 money => 'money',
308 varchar => 'varchar',
309 varchar2 => 'varchar',
310 timestamp => 'datetime',
311 text => 'varchar',
312 real => 'double precision',
313 comment => 'text',
314 bit => 'bit',
315 tinyint => 'smallint',
316 float => 'double precision',
317 serial => 'numeric',
318 bigserial => 'numeric',
319 boolean => 'varchar',
320 long => 'varchar',
321);
322
323sub _native_data_type {
324 my ($self, $type) = @_;
325
326 $type = lc $type;
327 $type =~ s/\s* identity//x;
328
329 return uc($TYPE_MAPPING{$type} || $type);
330}
331
332sub _fetch_identity_sql {
333 my ($self, $source, $col) = @_;
334
335 return sprintf ("SELECT MAX(%s) FROM %s",
336 map { $self->sql_maker->_quote ($_) } ($col, $source->from)
337 );
338}
339
340sub _execute {
341 my $self = shift;
342 my ($op) = @_;
343
344 my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
345
346 if ($op eq 'insert') {
347 $self->_identity($sth->fetchrow_array);
348 $sth->finish;
349 }
350
351 return wantarray ? ($rv, $sth, @bind) : $rv;
352}
353
354sub last_insert_id { shift->_identity }
355
356# handles TEXT/IMAGE and transaction for last_insert_id
357sub insert {
358 my $self = shift;
359 my ($source, $to_insert) = @_;
360
6298a324 361 my $identity_col =
362 (first { $source->column_info($_)->{is_auto_increment} } $source->columns)
363 || '';
057db5ce 364
365 # check for empty insert
366 # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
6469dabf 367 # try to insert explicit 'DEFAULT's instead (except for identity, timestamp
368 # and computed columns)
057db5ce 369 if (not %$to_insert) {
370 for my $col ($source->columns) {
371 next if $col eq $identity_col;
6469dabf 372
373 my $info = $source->column_info($col);
374
375 next if ref $info->{default_value} eq 'SCALAR'
376 || (exists $info->{data_type} && (not defined $info->{data_type}));
377
378 next if $info->{data_type} && $info->{data_type} =~ /^timestamp\z/i;
379
057db5ce 380 $to_insert->{$col} = \'DEFAULT';
381 }
382 }
383
384 my $blob_cols = $self->_remove_blob_cols($source, $to_insert);
385
386 # do we need the horrific SELECT MAX(COL) hack?
387 my $dumb_last_insert_id =
388 $identity_col
389 && (not exists $to_insert->{$identity_col})
390 && ($self->_identity_method||'') ne '@@IDENTITY';
391
392 my $next = $self->next::can;
393
394 # we are already in a transaction, or there are no blobs
395 # and we don't need the PK - just (try to) do it
396 if ($self->{transaction_depth}
397 || (!$blob_cols && !$dumb_last_insert_id)
398 ) {
399 return $self->_insert (
400 $next, $source, $to_insert, $blob_cols, $identity_col
401 );
402 }
403
404 # otherwise use the _writer_storage to do the insert+transaction on another
405 # connection
406 my $guard = $self->_writer_storage->txn_scope_guard;
407
408 my $updated_cols = $self->_writer_storage->_insert (
409 $next, $source, $to_insert, $blob_cols, $identity_col
410 );
411
412 $self->_identity($self->_writer_storage->_identity);
413
414 $guard->commit;
415
416 return $updated_cols;
417}
418
419sub _insert {
420 my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_;
421
422 my $updated_cols = $self->$next ($source, $to_insert);
423
424 my $final_row = {
425 ($identity_col ?
426 ($identity_col => $self->last_insert_id($source, $identity_col)) : ()),
427 %$to_insert,
428 %$updated_cols,
429 };
430
431 $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols;
432
433 return $updated_cols;
434}
435
436sub update {
437 my $self = shift;
438 my ($source, $fields, $where, @rest) = @_;
439
440 my $wantarray = wantarray;
441
442 my $blob_cols = $self->_remove_blob_cols($source, $fields);
443
444 my $table = $source->name;
445
6298a324 446 my $identity_col =
447 first { $source->column_info($_)->{is_auto_increment} } $source->columns;
057db5ce 448
449 my $is_identity_update = $identity_col && defined $fields->{$identity_col};
450
451 return $self->next::method(@_) unless $blob_cols;
452
453# If there are any blobs in $where, Sybase will return a descriptive error
454# message.
455# XXX blobs can still be used with a LIKE query, and this should be handled.
456
457# update+blob update(s) done atomically on separate connection
458 $self = $self->_writer_storage;
459
460 my $guard = $self->txn_scope_guard;
461
462# First update the blob columns to be updated to '' (taken from $fields, where
463# it is originally put by _remove_blob_cols .)
464 my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
465
466# We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
467
468 $self->next::method($source, \%blobs_to_empty, $where, @rest);
469
470# Now update the blobs before the other columns in case the update of other
471# columns makes the search condition invalid.
472 $self->_update_blobs($source, $blob_cols, $where);
473
474 my @res;
475 if (%$fields) {
476 if ($wantarray) {
477 @res = $self->next::method(@_);
478 }
479 elsif (defined $wantarray) {
480 $res[0] = $self->next::method(@_);
481 }
482 else {
483 $self->next::method(@_);
484 }
485 }
486
487 $guard->commit;
488
489 return $wantarray ? @res : $res[0];
490}
491
492sub insert_bulk {
493 my $self = shift;
494 my ($source, $cols, $data) = @_;
495
6298a324 496 my $identity_col =
497 first { $source->column_info($_)->{is_auto_increment} } $source->columns;
057db5ce 498
6298a324 499 my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;
057db5ce 500
501 my @source_columns = $source->columns;
502
503 my $use_bulk_api =
504 $self->_bulk_storage &&
505 $self->_get_dbh->{syb_has_blk};
506
507 if ((not $use_bulk_api)
508 &&
509 (ref($self->_dbi_connect_info->[0]) eq 'CODE')
510 &&
511 (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
512 carp <<'EOF';
513Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
514regular array inserts.
515EOF
516 $self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
517 }
518
519 if (not $use_bulk_api) {
520 my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data);
521
522# _execute_array uses a txn anyway, but it ends too early in case we need to
523# select max(col) to get the identity for inserting blobs.
524 ($self, my $guard) = $self->{transaction_depth} == 0 ?
525 ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
526 :
527 ($self, undef);
528
529 local $self->{insert_bulk} = 1;
530
531 $self->next::method(@_);
532
533 if ($blob_cols) {
534 if ($is_identity_insert) {
535 $self->_insert_blobs_array ($source, $blob_cols, $cols, $data);
536 }
537 else {
538 my @cols_with_identities = (@$cols, $identity_col);
539
540 ## calculate identities
541 # XXX This assumes identities always increase by 1, which may or may not
542 # be true.
543 my ($last_identity) =
544 $self->_dbh->selectrow_array (
545 $self->_fetch_identity_sql($source, $identity_col)
546 );
547 my @identities = (($last_identity - @$data + 1) .. $last_identity);
548
549 my @data_with_identities = map [@$_, shift @identities], @$data;
550
551 $self->_insert_blobs_array (
552 $source, $blob_cols, \@cols_with_identities, \@data_with_identities
553 );
554 }
555 }
556
557 $guard->commit if $guard;
558
559 return;
560 }
561
562# otherwise, use the bulk API
563
564# rearrange @$data so that columns are in database order
565 my %orig_idx;
566 @orig_idx{@$cols} = 0..$#$cols;
567
568 my %new_idx;
569 @new_idx{@source_columns} = 0..$#source_columns;
570
571 my @new_data;
572 for my $datum (@$data) {
573 my $new_datum = [];
574 for my $col (@source_columns) {
575# identity data will be 'undef' if not $is_identity_insert
576# columns with defaults will also be 'undef'
577 $new_datum->[ $new_idx{$col} ] =
578 exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
579 }
580 push @new_data, $new_datum;
581 }
582
583# bcp identity index is 1-based
584 my $identity_idx = exists $new_idx{$identity_col} ?
585 $new_idx{$identity_col} + 1 : 0;
586
587## Set a client-side conversion error handler, straight from DBD::Sybase docs.
588# This ignores any data conversion errors detected by the client side libs, as
589# they are usually harmless.
590 my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
591 Sub::Name::subname insert_bulk => sub {
592 my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
593
594 return 1 if $errno == 36;
595
596 carp
597 "Layer: $layer, Origin: $origin, Severity: $severity, Error: $errno" .
598 ($errmsg ? "\n$errmsg" : '') .
599 ($osmsg ? "\n$osmsg" : '') .
600 ($blkmsg ? "\n$blkmsg" : '');
601
602 return 0;
603 });
604
4edfce2f 605 my $exception = '';
ed7ab0f4 606 try {
057db5ce 607 my $bulk = $self->_bulk_storage;
608
609 my $guard = $bulk->txn_scope_guard;
610
611## XXX get this to work instead of our own $sth
612## will require SQLA or *Hacks changes for ordered columns
613# $bulk->next::method($source, \@source_columns, \@new_data, {
614# syb_bcp_attribs => {
615# identity_flag => $is_identity_insert,
616# identity_column => $identity_idx,
617# }
618# });
619 my $sql = 'INSERT INTO ' .
620 $bulk->sql_maker->_quote($source->name) . ' (' .
621# colname list is ignored for BCP, but does no harm
622 (join ', ', map $bulk->sql_maker->_quote($_), @source_columns) . ') '.
623 ' VALUES ('. (join ', ', ('?') x @source_columns) . ')';
624
625## XXX there's a bug in the DBD::Sybase bulk support that makes $sth->finish for
626## a prepare_cached statement ineffective. Replace with ->sth when fixed, or
627## better yet the version above. Should be fixed in DBD::Sybase .
628 my $sth = $bulk->_get_dbh->prepare($sql,
629# 'insert', # op
630 {
631 syb_bcp_attribs => {
632 identity_flag => $is_identity_insert,
633 identity_column => $identity_idx,
634 }
635 }
636 );
637
638 my @bind = do {
639 my $idx = 0;
640 map [ $_, $idx++ ], @source_columns;
641 };
642
643 $self->_execute_array(
644 $source, $sth, \@bind, \@source_columns, \@new_data, sub {
645 $guard->commit
646 }
647 );
648
649 $bulk->_query_end($sql);
ed7ab0f4 650 } catch {
651 $exception = shift;
057db5ce 652 };
653
057db5ce 654 DBD::Sybase::set_cslib_cb($orig_cslib_cb);
655
656 if ($exception =~ /-Y option/) {
657 carp <<"EOF";
658
659Sybase bulk API operation failed due to character set incompatibility, reverting
660to regular array inserts:
661
662*** Try unsetting the LANG environment variable.
663
664$exception
665EOF
666 $self->_bulk_storage(undef);
667 unshift @_, $self;
668 goto \&insert_bulk;
669 }
670 elsif ($exception) {
671# rollback makes the bulkLogin connection unusable
672 $self->_bulk_storage->disconnect;
673 $self->throw_exception($exception);
674 }
675}
676
677sub _dbh_execute_array {
678 my ($self, $sth, $tuple_status, $cb) = @_;
679
680 my $rv = $self->next::method($sth, $tuple_status);
681 $cb->() if $cb;
682
683 return $rv;
684}
685
686# Make sure blobs are not bound as placeholders, and return any non-empty ones
687# as a hash.
688sub _remove_blob_cols {
689 my ($self, $source, $fields) = @_;
690
691 my %blob_cols;
692
693 for my $col (keys %$fields) {
694 if ($self->_is_lob_column($source, $col)) {
695 my $blob_val = delete $fields->{$col};
696 if (not defined $blob_val) {
697 $fields->{$col} = \'NULL';
698 }
699 else {
700 $fields->{$col} = \"''";
701 $blob_cols{$col} = $blob_val unless $blob_val eq '';
702 }
703 }
704 }
705
706 return %blob_cols ? \%blob_cols : undef;
707}
708
709# same for insert_bulk
710sub _remove_blob_cols_array {
711 my ($self, $source, $cols, $data) = @_;
712
713 my @blob_cols;
714
715 for my $i (0..$#$cols) {
716 my $col = $cols->[$i];
717
718 if ($self->_is_lob_column($source, $col)) {
719 for my $j (0..$#$data) {
720 my $blob_val = delete $data->[$j][$i];
721 if (not defined $blob_val) {
722 $data->[$j][$i] = \'NULL';
723 }
724 else {
725 $data->[$j][$i] = \"''";
726 $blob_cols[$j][$i] = $blob_val
727 unless $blob_val eq '';
728 }
729 }
730 }
731 }
732
733 return @blob_cols ? \@blob_cols : undef;
734}
735
736sub _update_blobs {
737 my ($self, $source, $blob_cols, $where) = @_;
738
9780718f 739 my @primary_cols = try
740 { $source->_pri_cols }
741 catch {
742 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
743 };
057db5ce 744
745# check if we're updating a single row by PK
746 my $pk_cols_in_where = 0;
747 for my $col (@primary_cols) {
748 $pk_cols_in_where++ if defined $where->{$col};
749 }
750 my @rows;
751
752 if ($pk_cols_in_where == @primary_cols) {
753 my %row_to_update;
754 @row_to_update{@primary_cols} = @{$where}{@primary_cols};
755 @rows = \%row_to_update;
756 } else {
757 my $cursor = $self->select ($source, \@primary_cols, $where, {});
758 @rows = map {
759 my %row; @row{@primary_cols} = @$_; \%row
760 } $cursor->all;
761 }
762
763 for my $row (@rows) {
764 $self->_insert_blobs($source, $blob_cols, $row);
765 }
766}
767
768sub _insert_blobs {
769 my ($self, $source, $blob_cols, $row) = @_;
770 my $dbh = $self->_get_dbh;
771
772 my $table = $source->name;
773
774 my %row = %$row;
9780718f 775 my @primary_cols = try
776 { $source->_pri_cols }
777 catch {
778 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
779 };
057db5ce 780
781 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
782 if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
783
784 for my $col (keys %$blob_cols) {
785 my $blob = $blob_cols->{$col};
786
787 my %where = map { ($_, $row{$_}) } @primary_cols;
788
789 my $cursor = $self->select ($source, [$col], \%where, {});
790 $cursor->next;
791 my $sth = $cursor->sth;
792
793 if (not $sth) {
057db5ce 794 $self->throw_exception(
795 "Could not find row in table '$table' for blob update:\n"
6298a324 796 . (Dumper \%where)
057db5ce 797 );
798 }
799
9780718f 800 try {
057db5ce 801 do {
802 $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
803 } while $sth->fetch;
804
805 $sth->func('ct_prepare_send') or die $sth->errstr;
806
807 my $log_on_update = $self->_blob_log_on_update;
808 $log_on_update = 1 if not defined $log_on_update;
809
810 $sth->func('CS_SET', 1, {
811 total_txtlen => length($blob),
812 log_on_update => $log_on_update
813 }, 'ct_data_info') or die $sth->errstr;
814
815 $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
816
817 $sth->func('ct_finish_send') or die $sth->errstr;
9780718f 818 }
819 catch {
057db5ce 820 if ($self->using_freetds) {
821 $self->throw_exception (
9780718f 822 "TEXT/IMAGE operation failed, probably because you are using FreeTDS: $_"
057db5ce 823 );
9780718f 824 }
825 else {
826 $self->throw_exception($_);
057db5ce 827 }
828 }
9780718f 829 finally {
830 $sth->finish if $sth;
831 };
057db5ce 832 }
833}
834
835sub _insert_blobs_array {
836 my ($self, $source, $blob_cols, $cols, $data) = @_;
837
838 for my $i (0..$#$data) {
839 my $datum = $data->[$i];
840
841 my %row;
842 @row{ @$cols } = @$datum;
843
844 my %blob_vals;
845 for my $j (0..$#$cols) {
846 if (exists $blob_cols->[$i][$j]) {
847 $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j];
848 }
849 }
850
851 $self->_insert_blobs ($source, \%blob_vals, \%row);
852 }
853}
854
855=head2 connect_call_datetime_setup
856
857Used as:
858
859 on_connect_call => 'datetime_setup'
860
8384a713 861In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
057db5ce 862
863 $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
864 $dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080
865
866On connection for use with L<DBIx::Class::InflateColumn::DateTime>, using
867L<DateTime::Format::Sybase>, which you will need to install.
868
869This works for both C<DATETIME> and C<SMALLDATETIME> columns, although
870C<SMALLDATETIME> columns only have minute precision.
871
872=cut
873
874{
875 my $old_dbd_warned = 0;
876
877 sub connect_call_datetime_setup {
878 my $self = shift;
879 my $dbh = $self->_get_dbh;
880
881 if ($dbh->can('syb_date_fmt')) {
882 # amazingly, this works with FreeTDS
883 $dbh->syb_date_fmt('ISO_strict');
884 } elsif (not $old_dbd_warned) {
885 carp "Your DBD::Sybase is too old to support ".
886 "DBIx::Class::InflateColumn::DateTime, please upgrade!";
887 $old_dbd_warned = 1;
888 }
889
890 $dbh->do('SET DATEFORMAT mdy');
891
892 1;
893 }
894}
895
896sub datetime_parser_type { "DateTime::Format::Sybase" }
897
898# ->begin_work and such have no effect with FreeTDS but we run them anyway to
899# let the DBD keep any state it needs to.
900#
901# If they ever do start working, the extra statements will do no harm (because
902# Sybase supports nested transactions.)
903
904sub _dbh_begin_work {
905 my $self = shift;
906
907# bulkLogin=1 connections are always in a transaction, and can only call BEGIN
908# TRAN once. However, we need to make sure there's a $dbh.
909 return if $self->_is_bulk_storage && $self->_dbh && $self->_began_bulk_work;
910
911 $self->next::method(@_);
912
913 if ($self->using_freetds) {
914 $self->_get_dbh->do('BEGIN TRAN');
915 }
916
917 $self->_began_bulk_work(1) if $self->_is_bulk_storage;
918}
919
920sub _dbh_commit {
921 my $self = shift;
922 if ($self->using_freetds) {
923 $self->_dbh->do('COMMIT');
924 }
925 return $self->next::method(@_);
926}
927
928sub _dbh_rollback {
929 my $self = shift;
930 if ($self->using_freetds) {
931 $self->_dbh->do('ROLLBACK');
932 }
933 return $self->next::method(@_);
934}
935
936# savepoint support using ASE syntax
937
938sub _svp_begin {
939 my ($self, $name) = @_;
940
941 $self->_get_dbh->do("SAVE TRANSACTION $name");
942}
943
944# A new SAVE TRANSACTION with the same name releases the previous one.
945sub _svp_release { 1 }
946
947sub _svp_rollback {
948 my ($self, $name) = @_;
949
950 $self->_get_dbh->do("ROLLBACK TRANSACTION $name");
951}
952
9531;
954
955=head1 Schema::Loader Support
956
290da7d6 957As of version C<0.05000>, L<DBIx::Class::Schema::Loader> should work well with
958most (if not all) versions of Sybase ASE.
057db5ce 959
960=head1 FreeTDS
961
962This driver supports L<DBD::Sybase> compiled against FreeTDS
963(L<http://www.freetds.org/>) to the best of our ability, however it is
964recommended that you recompile L<DBD::Sybase> against the Sybase Open Client
965libraries. They are a part of the Sybase ASE distribution:
966
967The Open Client FAQ is here:
968L<http://www.isug.com/Sybase_FAQ/ASE/section7.html>.
969
970Sybase ASE for Linux (which comes with the Open Client libraries) may be
971downloaded here: L<http://response.sybase.com/forms/ASE_Linux_Download>.
972
973To see if you're using FreeTDS check C<< $schema->storage->using_freetds >>, or run:
974
975 perl -MDBI -le 'my $dbh = DBI->connect($dsn, $user, $pass); print $dbh->{syb_oc_version}'
976
977Some versions of the libraries involved will not support placeholders, in which
978case the storage will be reblessed to
979L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
980
981In some configurations, placeholders will work but will throw implicit type
982conversion errors for anything that's not expecting a string. In such a case,
983the C<auto_cast> option from L<DBIx::Class::Storage::DBI::AutoCast> is
984automatically set, which you may enable on connection with
985L<DBIx::Class::Storage::DBI::AutoCast/connect_call_set_auto_cast>. The type info
986for the C<CAST>s is taken from the L<DBIx::Class::ResultSource/data_type>
987definitions in your Result classes, and are mapped to a Sybase type (if it isn't
988already) using a mapping based on L<SQL::Translator>.
989
48580715 990In other configurations, placeholders will work just as they do with the Sybase
057db5ce 991Open Client libraries.
992
993Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS.
994
995=head1 INSERTS WITH PLACEHOLDERS
996
997With placeholders enabled, inserts are done in a transaction so that there are
998no concurrency issues with getting the inserted identity value using
999C<SELECT MAX(col)>, which is the only way to get the C<IDENTITY> value in this
1000mode.
1001
1002In addition, they are done on a separate connection so that it's possible to
1003have active cursors when doing an insert.
1004
1005When using C<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars> transactions
1006are disabled, as there are no concurrency issues with C<SELECT @@IDENTITY> as
1007it's a session variable.
1008
1009=head1 TRANSACTIONS
1010
48580715 1011Due to limitations of the TDS protocol, L<DBD::Sybase>, or both, you cannot
1012begin a transaction while there are active cursors, nor can you use multiple
057db5ce 1013active cursors within a transaction. An active cursor is, for example, a
1014L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or
1015C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>.
1016
1017For example, this will not work:
1018
1019 $schema->txn_do(sub {
1020 my $rs = $schema->resultset('Book');
1021 while (my $row = $rs->next) {
1022 $schema->resultset('MetaData')->create({
1023 book_id => $row->id,
1024 ...
1025 });
1026 }
1027 });
1028
1029This won't either:
1030
1031 my $first_row = $large_rs->first;
1032 $schema->txn_do(sub { ... });
1033
1034Transactions done for inserts in C<AutoCommit> mode when placeholders are in use
1035are not affected, as they are done on an extra database handle.
1036
1037Some workarounds:
1038
1039=over 4
1040
1041=item * use L<DBIx::Class::Storage::DBI::Replicated>
1042
1043=item * L<connect|DBIx::Class::Schema/connect> another L<Schema|DBIx::Class::Schema>
1044
1045=item * load the data from your cursor with L<DBIx::Class::ResultSet/all>
1046
1047=back
1048
1049=head1 MAXIMUM CONNECTIONS
1050
1051The TDS protocol makes separate connections to the server for active statements
1052in the background. By default the number of such connections is limited to 25,
1053on both the client side and the server side.
1054
1055This is a bit too low for a complex L<DBIx::Class> application, so on connection
1056the client side setting is set to C<256> (see L<DBD::Sybase/maxConnect>.) You
1057can override it to whatever setting you like in the DSN.
1058
1059See
1060L<http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.ase_15.0.sag1/html/sag1/sag1272.htm>
1061for information on changing the setting on the server side.
1062
1063=head1 DATES
1064
1065See L</connect_call_datetime_setup> to setup date formats
1066for L<DBIx::Class::InflateColumn::DateTime>.
1067
1068=head1 TEXT/IMAGE COLUMNS
1069
1070L<DBD::Sybase> compiled with FreeTDS will B<NOT> allow you to insert or update
1071C<TEXT/IMAGE> columns.
1072
1073Setting C<< $dbh->{LongReadLen} >> will also not work with FreeTDS use either:
1074
1075 $schema->storage->dbh->do("SET TEXTSIZE $bytes");
1076
1077or
1078
1079 $schema->storage->set_textsize($bytes);
1080
1081instead.
1082
1083However, the C<LongReadLen> you pass in
8384a713 1084L<connect_info|DBIx::Class::Storage::DBI/connect_info> is used to execute the
1085equivalent C<SET TEXTSIZE> command on connection.
057db5ce 1086
8384a713 1087See L</connect_call_blob_setup> for a
1088L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting you need to work
1089with C<IMAGE> columns.
057db5ce 1090
1091=head1 BULK API
1092
1093The experimental L<DBD::Sybase> Bulk API support is used for
1094L<populate|DBIx::Class::ResultSet/populate> in B<void> context, in a transaction
1095on a separate connection.
1096
1097To use this feature effectively, use a large number of rows for each
1098L<populate|DBIx::Class::ResultSet/populate> call, eg.:
1099
1100 while (my $rows = $data_source->get_100_rows()) {
1101 $rs->populate($rows);
1102 }
1103
1104B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns>
1105calls in your C<Result> classes B<must> list columns in database order for this
1106to work. Also, you may have to unset the C<LANG> environment variable before
1107loading your app, if it doesn't match the character set of your database.
1108
1109When inserting IMAGE columns using this method, you'll need to use
1110L</connect_call_blob_setup> as well.
1111
6476fd66 1112=head1 COMPUTED COLUMNS
1113
1114If you have columns such as:
1115
1116 created_dtm AS getdate()
1117
1118represent them in your Result classes as:
1119
1120 created_dtm => {
1121 data_type => undef,
1122 default_value => \'getdate()',
1123 is_nullable => 0,
1124 }
1125
1126The C<data_type> must exist and must be C<undef>. Then empty inserts will work
1127on tables with such columns.
1128
1129=head1 TIMESTAMP COLUMNS
1130
1131C<timestamp> columns in Sybase ASE are not really timestamps, see:
1132L<http://dba.fyicenter.com/Interview-Questions/SYBASE/The_timestamp_datatype_in_Sybase_.html>.
1133
1134They should be defined in your Result classes as:
1135
1136 ts => {
1137 data_type => 'timestamp',
1138 is_nullable => 0,
1139 inflate_datetime => 0,
1140 }
1141
1142The C<<inflate_datetime => 0>> is necessary if you use
1143L<DBIx::Class::InflateColumn::DateTime>, and most people do, and still want to
1144be able to read these values.
1145
1146The values will come back as hexadecimal.
1147
057db5ce 1148=head1 TODO
1149
1150=over
1151
1152=item *
1153
1154Transitions to AutoCommit=0 (starting a transaction) mode by exhausting
1155any active cursors, using eager cursors.
1156
1157=item *
1158
1159Real limits and limited counts using stored procedures deployed on startup.
1160
1161=item *
1162
1163Adaptive Server Anywhere (ASA) support, with possible SQLA::Limit support.
1164
1165=item *
1166
1167Blob update with a LIKE query on a blob, without invalidating the WHERE condition.
1168
1169=item *
1170
1171bulk_insert using prepare_cached (see comments.)
1172
1173=back
1174
1175=head1 AUTHOR
1176
1177See L<DBIx::Class/CONTRIBUTORS>.
1178
1179=head1 LICENSE
1180
1181You may distribute this code under the same terms as Perl itself.
1182
1183=cut
1184# vim:sts=2 sw=2: