refactor part 2
[dbsrgits/DBIx-Class.git] / lib / DBIx / Class / Storage / DBI / Sybase / ASE.pm
CommitLineData
057db5ce 1package DBIx::Class::Storage::DBI::Sybase::ASE;
2
3use strict;
4use warnings;
5
6use base qw/
7 DBIx::Class::Storage::DBI::Sybase
8 DBIx::Class::Storage::DBI::AutoCast
9/;
10use mro 'c3';
11use Carp::Clan qw/^DBIx::Class/;
12use Scalar::Util();
13use List::Util();
14use Sub::Name();
15use Data::Dumper::Concise();
16
17__PACKAGE__->mk_group_accessors('simple' =>
18 qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
19 _bulk_storage _is_bulk_storage _began_bulk_work
20 _bulk_disabled_due_to_coderef_connect_info_warned
21 _identity_method/
22);
23
24my @also_proxy_to_extra_storages = qw/
25 connect_call_set_auto_cast auto_cast connect_call_blob_setup
26 connect_call_datetime_setup
27
28 disconnect _connect_info _sql_maker _sql_maker_opts disable_sth_caching
29 auto_savepoint unsafe cursor_class debug debugobj schema
30/;
31
32=head1 NAME
33
34DBIx::Class::Storage::DBI::Sybase::ASE - Sybase ASE SQL Server support for
35DBIx::Class
36
37=head1 SYNOPSIS
38
39This subclass supports L<DBD::Sybase> for real (non-Microsoft) Sybase databases.
40
41=head1 DESCRIPTION
42
43If your version of Sybase does not support placeholders, then your storage will
44be reblessed to L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
45You can also enable that driver explicitly, see the documentation for more
46details.
47
48With this driver there is unfortunately no way to get the C<last_insert_id>
49without doing a C<SELECT MAX(col)>. This is done safely in a transaction
50(locking the table.) See L</INSERTS WITH PLACEHOLDERS>.
51
52A recommended L<DBIx::Class::Storage::DBI/connect_info> setting:
53
54 on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]]
55
56=head1 METHODS
57
58=cut
59
60sub _rebless {
61 my $self = shift;
62
63 my $no_bind_vars = __PACKAGE__ . '::NoBindVars';
64
65 if ($self->using_freetds) {
66 carp <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN};
67
68You are using FreeTDS with Sybase.
69
70We will do our best to support this configuration, but please consider this
71support experimental.
72
73TEXT/IMAGE columns will definitely not work.
74
75You are encouraged to recompile DBD::Sybase with the Sybase Open Client libraries
76instead.
77
78See perldoc DBIx::Class::Storage::DBI::Sybase::ASE for more details.
79
80To turn off this warning set the DBIC_SYBASE_FREETDS_NOWARN environment
81variable.
82EOF
83
84 if (not $self->_typeless_placeholders_supported) {
85 if ($self->_placeholders_supported) {
86 $self->auto_cast(1);
87 }
88 else {
89 $self->ensure_class_loaded($no_bind_vars);
90 bless $self, $no_bind_vars;
91 $self->_rebless;
92 }
93 }
94 }
95
96 elsif (not $self->_get_dbh->{syb_dynamic_supported}) {
97 # not necessarily FreeTDS, but no placeholders nevertheless
98 $self->ensure_class_loaded($no_bind_vars);
99 bless $self, $no_bind_vars;
100 $self->_rebless;
101 }
102 # this is highly unlikely, but we check just in case
103 elsif (not $self->_typeless_placeholders_supported) {
104 $self->auto_cast(1);
105 }
106}
107
108sub _init {
109 my $self = shift;
110 $self->_set_max_connect(256);
111
112# create storage for insert/(update blob) transactions,
113# unless this is that storage
114 return if $self->_is_extra_storage;
115
116 my $writer_storage = (ref $self)->new;
117
118 $writer_storage->_is_extra_storage(1);
119 $writer_storage->connect_info($self->connect_info);
120 $writer_storage->auto_cast($self->auto_cast);
121
122 $self->_writer_storage($writer_storage);
123
124# create a bulk storage unless connect_info is a coderef
125 return if ref($self->_dbi_connect_info->[0]) eq 'CODE';
126
127 my $bulk_storage = (ref $self)->new;
128
129 $bulk_storage->_is_extra_storage(1);
130 $bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics
131 $bulk_storage->connect_info($self->connect_info);
132
133# this is why
134 $bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1';
135
136 $self->_bulk_storage($bulk_storage);
137}
138
139for my $method (@also_proxy_to_extra_storages) {
140 no strict 'refs';
141 no warnings 'redefine';
142
143 my $replaced = __PACKAGE__->can($method);
144
145 *{$method} = Sub::Name::subname $method => sub {
146 my $self = shift;
147 $self->_writer_storage->$replaced(@_) if $self->_writer_storage;
148 $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage;
149 return $self->$replaced(@_);
150 };
151}
152
153sub disconnect {
154 my $self = shift;
155
156# Even though we call $sth->finish for uses off the bulk API, there's still an
157# "active statement" warning on disconnect, which we throw away here.
158# This is due to the bug described in insert_bulk.
159# Currently a noop because 'prepare' is used instead of 'prepare_cached'.
160 local $SIG{__WARN__} = sub {
161 warn $_[0] unless $_[0] =~ /active statement/i;
162 } if $self->_is_bulk_storage;
163
164# so that next transaction gets a dbh
165 $self->_began_bulk_work(0) if $self->_is_bulk_storage;
166
167 $self->next::method;
168}
169
170# Set up session settings for Sybase databases for the connection.
171#
172# Make sure we have CHAINED mode turned on if AutoCommit is off in non-FreeTDS
173# DBD::Sybase (since we don't know how DBD::Sybase was compiled.) If however
174# we're using FreeTDS, CHAINED mode turns on an implicit transaction which we
175# only want when AutoCommit is off.
176#
177# Also SET TEXTSIZE for FreeTDS because LongReadLen doesn't work.
178sub _run_connection_actions {
179 my $self = shift;
180
181 if ($self->_is_bulk_storage) {
182# this should be cleared on every reconnect
183 $self->_began_bulk_work(0);
184 return;
185 }
186
187 if (not $self->using_freetds) {
188 $self->_dbh->{syb_chained_txn} = 1;
189 } else {
190 # based on LongReadLen in connect_info
191 $self->set_textsize;
192
193 if ($self->_dbh_autocommit) {
194 $self->_dbh->do('SET CHAINED OFF');
195 } else {
196 $self->_dbh->do('SET CHAINED ON');
197 }
198 }
199
200 $self->next::method(@_);
201}
202
203=head2 connect_call_blob_setup
204
205Used as:
206
207 on_connect_call => [ [ 'blob_setup', log_on_update => 0 ] ]
208
209Does C<< $dbh->{syb_binary_images} = 1; >> to return C<IMAGE> data as raw binary
210instead of as a hex string.
211
212Recommended.
213
214Also sets the C<log_on_update> value for blob write operations. The default is
215C<1>, but C<0> is better if your database is configured for it.
216
217See
218L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
219
220=cut
221
222sub connect_call_blob_setup {
223 my $self = shift;
224 my %args = @_;
225 my $dbh = $self->_dbh;
226 $dbh->{syb_binary_images} = 1;
227
228 $self->_blob_log_on_update($args{log_on_update})
229 if exists $args{log_on_update};
230}
231
232sub _is_lob_type {
233 my $self = shift;
234 my $type = shift;
235 $type && $type =~ /(?:text|image|lob|bytea|binary|memo)/i;
236}
237
238sub _is_lob_column {
239 my ($self, $source, $column) = @_;
240
241 return $self->_is_lob_type($source->column_info($column)->{data_type});
242}
243
244sub _prep_for_execute {
245 my $self = shift;
246 my ($op, $extra_bind, $ident, $args) = @_;
247
248 my ($sql, $bind) = $self->next::method (@_);
249
250 my $table = Scalar::Util::blessed($ident) ? $ident->from : $ident;
251
252 my $bind_info = $self->_resolve_column_info(
253 $ident, [map $_->[0], @{$bind}]
254 );
255 my $bound_identity_col = List::Util::first
256 { $bind_info->{$_}{is_auto_increment} }
257 (keys %$bind_info)
258 ;
259 my $identity_col = Scalar::Util::blessed($ident) &&
260 List::Util::first
261 { $ident->column_info($_)->{is_auto_increment} }
262 $ident->columns
263 ;
264
265 if (($op eq 'insert' && $bound_identity_col) ||
266 ($op eq 'update' && exists $args->[0]{$identity_col})) {
267 $sql = join ("\n",
268 $self->_set_table_identity_sql($op => $table, 'on'),
269 $sql,
270 $self->_set_table_identity_sql($op => $table, 'off'),
271 );
272 }
273
274 if ($op eq 'insert' && (not $bound_identity_col) && $identity_col &&
275 (not $self->{insert_bulk})) {
276 $sql =
277 "$sql\n" .
278 $self->_fetch_identity_sql($ident, $identity_col);
279 }
280
281 return ($sql, $bind);
282}
283
284sub _set_table_identity_sql {
285 my ($self, $op, $table, $on_off) = @_;
286
287 return sprintf 'SET IDENTITY_%s %s %s',
288 uc($op), $self->sql_maker->_quote($table), uc($on_off);
289}
290
291# Stolen from SQLT, with some modifications. This is a makeshift
292# solution before a sane type-mapping library is available, thus
293# the 'our' for easy overrides.
294our %TYPE_MAPPING = (
295 number => 'numeric',
296 money => 'money',
297 varchar => 'varchar',
298 varchar2 => 'varchar',
299 timestamp => 'datetime',
300 text => 'varchar',
301 real => 'double precision',
302 comment => 'text',
303 bit => 'bit',
304 tinyint => 'smallint',
305 float => 'double precision',
306 serial => 'numeric',
307 bigserial => 'numeric',
308 boolean => 'varchar',
309 long => 'varchar',
310);
311
312sub _native_data_type {
313 my ($self, $type) = @_;
314
315 $type = lc $type;
316 $type =~ s/\s* identity//x;
317
318 return uc($TYPE_MAPPING{$type} || $type);
319}
320
321sub _fetch_identity_sql {
322 my ($self, $source, $col) = @_;
323
324 return sprintf ("SELECT MAX(%s) FROM %s",
325 map { $self->sql_maker->_quote ($_) } ($col, $source->from)
326 );
327}
328
329sub _execute {
330 my $self = shift;
331 my ($op) = @_;
332
333 my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
334
335 if ($op eq 'insert') {
336 $self->_identity($sth->fetchrow_array);
337 $sth->finish;
338 }
339
340 return wantarray ? ($rv, $sth, @bind) : $rv;
341}
342
343sub last_insert_id { shift->_identity }
344
345# handles TEXT/IMAGE and transaction for last_insert_id
346sub insert {
347 my $self = shift;
348 my ($source, $to_insert) = @_;
349
350 my $identity_col = (List::Util::first
351 { $source->column_info($_)->{is_auto_increment} }
352 $source->columns) || '';
353
354 # check for empty insert
355 # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
356 # try to insert explicit 'DEFAULT's instead (except for identity)
357 if (not %$to_insert) {
358 for my $col ($source->columns) {
359 next if $col eq $identity_col;
360 $to_insert->{$col} = \'DEFAULT';
361 }
362 }
363
364 my $blob_cols = $self->_remove_blob_cols($source, $to_insert);
365
366 # do we need the horrific SELECT MAX(COL) hack?
367 my $dumb_last_insert_id =
368 $identity_col
369 && (not exists $to_insert->{$identity_col})
370 && ($self->_identity_method||'') ne '@@IDENTITY';
371
372 my $next = $self->next::can;
373
374 # we are already in a transaction, or there are no blobs
375 # and we don't need the PK - just (try to) do it
376 if ($self->{transaction_depth}
377 || (!$blob_cols && !$dumb_last_insert_id)
378 ) {
379 return $self->_insert (
380 $next, $source, $to_insert, $blob_cols, $identity_col
381 );
382 }
383
384 # otherwise use the _writer_storage to do the insert+transaction on another
385 # connection
386 my $guard = $self->_writer_storage->txn_scope_guard;
387
388 my $updated_cols = $self->_writer_storage->_insert (
389 $next, $source, $to_insert, $blob_cols, $identity_col
390 );
391
392 $self->_identity($self->_writer_storage->_identity);
393
394 $guard->commit;
395
396 return $updated_cols;
397}
398
399sub _insert {
400 my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_;
401
402 my $updated_cols = $self->$next ($source, $to_insert);
403
404 my $final_row = {
405 ($identity_col ?
406 ($identity_col => $self->last_insert_id($source, $identity_col)) : ()),
407 %$to_insert,
408 %$updated_cols,
409 };
410
411 $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols;
412
413 return $updated_cols;
414}
415
416sub update {
417 my $self = shift;
418 my ($source, $fields, $where, @rest) = @_;
419
420 my $wantarray = wantarray;
421
422 my $blob_cols = $self->_remove_blob_cols($source, $fields);
423
424 my $table = $source->name;
425
426 my $identity_col = List::Util::first
427 { $source->column_info($_)->{is_auto_increment} }
428 $source->columns;
429
430 my $is_identity_update = $identity_col && defined $fields->{$identity_col};
431
432 return $self->next::method(@_) unless $blob_cols;
433
434# If there are any blobs in $where, Sybase will return a descriptive error
435# message.
436# XXX blobs can still be used with a LIKE query, and this should be handled.
437
438# update+blob update(s) done atomically on separate connection
439 $self = $self->_writer_storage;
440
441 my $guard = $self->txn_scope_guard;
442
443# First update the blob columns to be updated to '' (taken from $fields, where
444# it is originally put by _remove_blob_cols .)
445 my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;
446
447# We can't only update NULL blobs, because blobs cannot be in the WHERE clause.
448
449 $self->next::method($source, \%blobs_to_empty, $where, @rest);
450
451# Now update the blobs before the other columns in case the update of other
452# columns makes the search condition invalid.
453 $self->_update_blobs($source, $blob_cols, $where);
454
455 my @res;
456 if (%$fields) {
457 if ($wantarray) {
458 @res = $self->next::method(@_);
459 }
460 elsif (defined $wantarray) {
461 $res[0] = $self->next::method(@_);
462 }
463 else {
464 $self->next::method(@_);
465 }
466 }
467
468 $guard->commit;
469
470 return $wantarray ? @res : $res[0];
471}
472
473sub insert_bulk {
474 my $self = shift;
475 my ($source, $cols, $data) = @_;
476
477 my $identity_col = List::Util::first
478 { $source->column_info($_)->{is_auto_increment} }
479 $source->columns;
480
481 my $is_identity_insert = (List::Util::first
482 { $_ eq $identity_col }
483 @{$cols}
484 ) ? 1 : 0;
485
486 my @source_columns = $source->columns;
487
488 my $use_bulk_api =
489 $self->_bulk_storage &&
490 $self->_get_dbh->{syb_has_blk};
491
492 if ((not $use_bulk_api)
493 &&
494 (ref($self->_dbi_connect_info->[0]) eq 'CODE')
495 &&
496 (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
497 carp <<'EOF';
498Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
499regular array inserts.
500EOF
501 $self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
502 }
503
504 if (not $use_bulk_api) {
505 my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data);
506
507# _execute_array uses a txn anyway, but it ends too early in case we need to
508# select max(col) to get the identity for inserting blobs.
509 ($self, my $guard) = $self->{transaction_depth} == 0 ?
510 ($self->_writer_storage, $self->_writer_storage->txn_scope_guard)
511 :
512 ($self, undef);
513
514 local $self->{insert_bulk} = 1;
515
516 $self->next::method(@_);
517
518 if ($blob_cols) {
519 if ($is_identity_insert) {
520 $self->_insert_blobs_array ($source, $blob_cols, $cols, $data);
521 }
522 else {
523 my @cols_with_identities = (@$cols, $identity_col);
524
525 ## calculate identities
526 # XXX This assumes identities always increase by 1, which may or may not
527 # be true.
528 my ($last_identity) =
529 $self->_dbh->selectrow_array (
530 $self->_fetch_identity_sql($source, $identity_col)
531 );
532 my @identities = (($last_identity - @$data + 1) .. $last_identity);
533
534 my @data_with_identities = map [@$_, shift @identities], @$data;
535
536 $self->_insert_blobs_array (
537 $source, $blob_cols, \@cols_with_identities, \@data_with_identities
538 );
539 }
540 }
541
542 $guard->commit if $guard;
543
544 return;
545 }
546
547# otherwise, use the bulk API
548
549# rearrange @$data so that columns are in database order
550 my %orig_idx;
551 @orig_idx{@$cols} = 0..$#$cols;
552
553 my %new_idx;
554 @new_idx{@source_columns} = 0..$#source_columns;
555
556 my @new_data;
557 for my $datum (@$data) {
558 my $new_datum = [];
559 for my $col (@source_columns) {
560# identity data will be 'undef' if not $is_identity_insert
561# columns with defaults will also be 'undef'
562 $new_datum->[ $new_idx{$col} ] =
563 exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
564 }
565 push @new_data, $new_datum;
566 }
567
568# bcp identity index is 1-based
569 my $identity_idx = exists $new_idx{$identity_col} ?
570 $new_idx{$identity_col} + 1 : 0;
571
572## Set a client-side conversion error handler, straight from DBD::Sybase docs.
573# This ignores any data conversion errors detected by the client side libs, as
574# they are usually harmless.
575 my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
576 Sub::Name::subname insert_bulk => sub {
577 my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
578
579 return 1 if $errno == 36;
580
581 carp
582 "Layer: $layer, Origin: $origin, Severity: $severity, Error: $errno" .
583 ($errmsg ? "\n$errmsg" : '') .
584 ($osmsg ? "\n$osmsg" : '') .
585 ($blkmsg ? "\n$blkmsg" : '');
586
587 return 0;
588 });
589
590 eval {
591 my $bulk = $self->_bulk_storage;
592
593 my $guard = $bulk->txn_scope_guard;
594
595## XXX get this to work instead of our own $sth
596## will require SQLA or *Hacks changes for ordered columns
597# $bulk->next::method($source, \@source_columns, \@new_data, {
598# syb_bcp_attribs => {
599# identity_flag => $is_identity_insert,
600# identity_column => $identity_idx,
601# }
602# });
603 my $sql = 'INSERT INTO ' .
604 $bulk->sql_maker->_quote($source->name) . ' (' .
605# colname list is ignored for BCP, but does no harm
606 (join ', ', map $bulk->sql_maker->_quote($_), @source_columns) . ') '.
607 ' VALUES ('. (join ', ', ('?') x @source_columns) . ')';
608
609## XXX there's a bug in the DBD::Sybase bulk support that makes $sth->finish for
610## a prepare_cached statement ineffective. Replace with ->sth when fixed, or
611## better yet the version above. Should be fixed in DBD::Sybase .
612 my $sth = $bulk->_get_dbh->prepare($sql,
613# 'insert', # op
614 {
615 syb_bcp_attribs => {
616 identity_flag => $is_identity_insert,
617 identity_column => $identity_idx,
618 }
619 }
620 );
621
622 my @bind = do {
623 my $idx = 0;
624 map [ $_, $idx++ ], @source_columns;
625 };
626
627 $self->_execute_array(
628 $source, $sth, \@bind, \@source_columns, \@new_data, sub {
629 $guard->commit
630 }
631 );
632
633 $bulk->_query_end($sql);
634 };
635
636 my $exception = $@;
637 DBD::Sybase::set_cslib_cb($orig_cslib_cb);
638
639 if ($exception =~ /-Y option/) {
640 carp <<"EOF";
641
642Sybase bulk API operation failed due to character set incompatibility, reverting
643to regular array inserts:
644
645*** Try unsetting the LANG environment variable.
646
647$exception
648EOF
649 $self->_bulk_storage(undef);
650 unshift @_, $self;
651 goto \&insert_bulk;
652 }
653 elsif ($exception) {
654# rollback makes the bulkLogin connection unusable
655 $self->_bulk_storage->disconnect;
656 $self->throw_exception($exception);
657 }
658}
659
660sub _dbh_execute_array {
661 my ($self, $sth, $tuple_status, $cb) = @_;
662
663 my $rv = $self->next::method($sth, $tuple_status);
664 $cb->() if $cb;
665
666 return $rv;
667}
668
669# Make sure blobs are not bound as placeholders, and return any non-empty ones
670# as a hash.
671sub _remove_blob_cols {
672 my ($self, $source, $fields) = @_;
673
674 my %blob_cols;
675
676 for my $col (keys %$fields) {
677 if ($self->_is_lob_column($source, $col)) {
678 my $blob_val = delete $fields->{$col};
679 if (not defined $blob_val) {
680 $fields->{$col} = \'NULL';
681 }
682 else {
683 $fields->{$col} = \"''";
684 $blob_cols{$col} = $blob_val unless $blob_val eq '';
685 }
686 }
687 }
688
689 return %blob_cols ? \%blob_cols : undef;
690}
691
692# same for insert_bulk
693sub _remove_blob_cols_array {
694 my ($self, $source, $cols, $data) = @_;
695
696 my @blob_cols;
697
698 for my $i (0..$#$cols) {
699 my $col = $cols->[$i];
700
701 if ($self->_is_lob_column($source, $col)) {
702 for my $j (0..$#$data) {
703 my $blob_val = delete $data->[$j][$i];
704 if (not defined $blob_val) {
705 $data->[$j][$i] = \'NULL';
706 }
707 else {
708 $data->[$j][$i] = \"''";
709 $blob_cols[$j][$i] = $blob_val
710 unless $blob_val eq '';
711 }
712 }
713 }
714 }
715
716 return @blob_cols ? \@blob_cols : undef;
717}
718
719sub _update_blobs {
720 my ($self, $source, $blob_cols, $where) = @_;
721
722 my (@primary_cols) = $source->primary_columns;
723
724 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key')
725 unless @primary_cols;
726
727# check if we're updating a single row by PK
728 my $pk_cols_in_where = 0;
729 for my $col (@primary_cols) {
730 $pk_cols_in_where++ if defined $where->{$col};
731 }
732 my @rows;
733
734 if ($pk_cols_in_where == @primary_cols) {
735 my %row_to_update;
736 @row_to_update{@primary_cols} = @{$where}{@primary_cols};
737 @rows = \%row_to_update;
738 } else {
739 my $cursor = $self->select ($source, \@primary_cols, $where, {});
740 @rows = map {
741 my %row; @row{@primary_cols} = @$_; \%row
742 } $cursor->all;
743 }
744
745 for my $row (@rows) {
746 $self->_insert_blobs($source, $blob_cols, $row);
747 }
748}
749
750sub _insert_blobs {
751 my ($self, $source, $blob_cols, $row) = @_;
752 my $dbh = $self->_get_dbh;
753
754 my $table = $source->name;
755
756 my %row = %$row;
757 my (@primary_cols) = $source->primary_columns;
758
759 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key')
760 unless @primary_cols;
761
762 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
763 if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
764
765 for my $col (keys %$blob_cols) {
766 my $blob = $blob_cols->{$col};
767
768 my %where = map { ($_, $row{$_}) } @primary_cols;
769
770 my $cursor = $self->select ($source, [$col], \%where, {});
771 $cursor->next;
772 my $sth = $cursor->sth;
773
774 if (not $sth) {
775
776 $self->throw_exception(
777 "Could not find row in table '$table' for blob update:\n"
778 . Data::Dumper::Concise::Dumper (\%where)
779 );
780 }
781
782 eval {
783 do {
784 $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
785 } while $sth->fetch;
786
787 $sth->func('ct_prepare_send') or die $sth->errstr;
788
789 my $log_on_update = $self->_blob_log_on_update;
790 $log_on_update = 1 if not defined $log_on_update;
791
792 $sth->func('CS_SET', 1, {
793 total_txtlen => length($blob),
794 log_on_update => $log_on_update
795 }, 'ct_data_info') or die $sth->errstr;
796
797 $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
798
799 $sth->func('ct_finish_send') or die $sth->errstr;
800 };
801 my $exception = $@;
802 $sth->finish if $sth;
803 if ($exception) {
804 if ($self->using_freetds) {
805 $self->throw_exception (
806 'TEXT/IMAGE operation failed, probably because you are using FreeTDS: '
807 . $exception
808 );
809 } else {
810 $self->throw_exception($exception);
811 }
812 }
813 }
814}
815
816sub _insert_blobs_array {
817 my ($self, $source, $blob_cols, $cols, $data) = @_;
818
819 for my $i (0..$#$data) {
820 my $datum = $data->[$i];
821
822 my %row;
823 @row{ @$cols } = @$datum;
824
825 my %blob_vals;
826 for my $j (0..$#$cols) {
827 if (exists $blob_cols->[$i][$j]) {
828 $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j];
829 }
830 }
831
832 $self->_insert_blobs ($source, \%blob_vals, \%row);
833 }
834}
835
836=head2 connect_call_datetime_setup
837
838Used as:
839
840 on_connect_call => 'datetime_setup'
841
842In L<DBIx::Class::Storage::DBI/connect_info> to set:
843
844 $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
845 $dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080
846
847On connection for use with L<DBIx::Class::InflateColumn::DateTime>, using
848L<DateTime::Format::Sybase>, which you will need to install.
849
850This works for both C<DATETIME> and C<SMALLDATETIME> columns, although
851C<SMALLDATETIME> columns only have minute precision.
852
853=cut
854
855{
856 my $old_dbd_warned = 0;
857
858 sub connect_call_datetime_setup {
859 my $self = shift;
860 my $dbh = $self->_get_dbh;
861
862 if ($dbh->can('syb_date_fmt')) {
863 # amazingly, this works with FreeTDS
864 $dbh->syb_date_fmt('ISO_strict');
865 } elsif (not $old_dbd_warned) {
866 carp "Your DBD::Sybase is too old to support ".
867 "DBIx::Class::InflateColumn::DateTime, please upgrade!";
868 $old_dbd_warned = 1;
869 }
870
871 $dbh->do('SET DATEFORMAT mdy');
872
873 1;
874 }
875}
876
877sub datetime_parser_type { "DateTime::Format::Sybase" }
878
879# ->begin_work and such have no effect with FreeTDS but we run them anyway to
880# let the DBD keep any state it needs to.
881#
882# If they ever do start working, the extra statements will do no harm (because
883# Sybase supports nested transactions.)
884
885sub _dbh_begin_work {
886 my $self = shift;
887
888# bulkLogin=1 connections are always in a transaction, and can only call BEGIN
889# TRAN once. However, we need to make sure there's a $dbh.
890 return if $self->_is_bulk_storage && $self->_dbh && $self->_began_bulk_work;
891
892 $self->next::method(@_);
893
894 if ($self->using_freetds) {
895 $self->_get_dbh->do('BEGIN TRAN');
896 }
897
898 $self->_began_bulk_work(1) if $self->_is_bulk_storage;
899}
900
901sub _dbh_commit {
902 my $self = shift;
903 if ($self->using_freetds) {
904 $self->_dbh->do('COMMIT');
905 }
906 return $self->next::method(@_);
907}
908
909sub _dbh_rollback {
910 my $self = shift;
911 if ($self->using_freetds) {
912 $self->_dbh->do('ROLLBACK');
913 }
914 return $self->next::method(@_);
915}
916
917# savepoint support using ASE syntax
918
919sub _svp_begin {
920 my ($self, $name) = @_;
921
922 $self->_get_dbh->do("SAVE TRANSACTION $name");
923}
924
925# A new SAVE TRANSACTION with the same name releases the previous one.
926sub _svp_release { 1 }
927
928sub _svp_rollback {
929 my ($self, $name) = @_;
930
931 $self->_get_dbh->do("ROLLBACK TRANSACTION $name");
932}
933
9341;
935
936=head1 Schema::Loader Support
937
938There is an experimental branch of L<DBIx::Class::Schema::Loader> that will
939allow you to dump a schema from most (if not all) versions of Sybase.
940
941It is available via subversion from:
942
943 http://dev.catalyst.perl.org/repos/bast/branches/DBIx-Class-Schema-Loader/current/
944
945=head1 FreeTDS
946
947This driver supports L<DBD::Sybase> compiled against FreeTDS
948(L<http://www.freetds.org/>) to the best of our ability, however it is
949recommended that you recompile L<DBD::Sybase> against the Sybase Open Client
950libraries. They are a part of the Sybase ASE distribution:
951
952The Open Client FAQ is here:
953L<http://www.isug.com/Sybase_FAQ/ASE/section7.html>.
954
955Sybase ASE for Linux (which comes with the Open Client libraries) may be
956downloaded here: L<http://response.sybase.com/forms/ASE_Linux_Download>.
957
958To see if you're using FreeTDS check C<< $schema->storage->using_freetds >>, or run:
959
960 perl -MDBI -le 'my $dbh = DBI->connect($dsn, $user, $pass); print $dbh->{syb_oc_version}'
961
962Some versions of the libraries involved will not support placeholders, in which
963case the storage will be reblessed to
964L<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars>.
965
966In some configurations, placeholders will work but will throw implicit type
967conversion errors for anything that's not expecting a string. In such a case,
968the C<auto_cast> option from L<DBIx::Class::Storage::DBI::AutoCast> is
969automatically set, which you may enable on connection with
970L<DBIx::Class::Storage::DBI::AutoCast/connect_call_set_auto_cast>. The type info
971for the C<CAST>s is taken from the L<DBIx::Class::ResultSource/data_type>
972definitions in your Result classes, and are mapped to a Sybase type (if it isn't
973already) using a mapping based on L<SQL::Translator>.
974
975In other configurations, placeholers will work just as they do with the Sybase
976Open Client libraries.
977
978Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS.
979
980=head1 INSERTS WITH PLACEHOLDERS
981
982With placeholders enabled, inserts are done in a transaction so that there are
983no concurrency issues with getting the inserted identity value using
984C<SELECT MAX(col)>, which is the only way to get the C<IDENTITY> value in this
985mode.
986
987In addition, they are done on a separate connection so that it's possible to
988have active cursors when doing an insert.
989
990When using C<DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars> transactions
991are disabled, as there are no concurrency issues with C<SELECT @@IDENTITY> as
992it's a session variable.
993
994=head1 TRANSACTIONS
995
996Due to limitations of the TDS protocol, L<DBD::Sybase>, or both; you cannot
997begin a transaction while there are active cursors; nor can you use multiple
998active cursors within a transaction. An active cursor is, for example, a
999L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or
1000C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>.
1001
1002For example, this will not work:
1003
1004 $schema->txn_do(sub {
1005 my $rs = $schema->resultset('Book');
1006 while (my $row = $rs->next) {
1007 $schema->resultset('MetaData')->create({
1008 book_id => $row->id,
1009 ...
1010 });
1011 }
1012 });
1013
1014This won't either:
1015
1016 my $first_row = $large_rs->first;
1017 $schema->txn_do(sub { ... });
1018
1019Transactions done for inserts in C<AutoCommit> mode when placeholders are in use
1020are not affected, as they are done on an extra database handle.
1021
1022Some workarounds:
1023
1024=over 4
1025
1026=item * use L<DBIx::Class::Storage::DBI::Replicated>
1027
1028=item * L<connect|DBIx::Class::Schema/connect> another L<Schema|DBIx::Class::Schema>
1029
1030=item * load the data from your cursor with L<DBIx::Class::ResultSet/all>
1031
1032=back
1033
1034=head1 MAXIMUM CONNECTIONS
1035
1036The TDS protocol makes separate connections to the server for active statements
1037in the background. By default the number of such connections is limited to 25,
1038on both the client side and the server side.
1039
1040This is a bit too low for a complex L<DBIx::Class> application, so on connection
1041the client side setting is set to C<256> (see L<DBD::Sybase/maxConnect>.) You
1042can override it to whatever setting you like in the DSN.
1043
1044See
1045L<http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.ase_15.0.sag1/html/sag1/sag1272.htm>
1046for information on changing the setting on the server side.
1047
1048=head1 DATES
1049
1050See L</connect_call_datetime_setup> to setup date formats
1051for L<DBIx::Class::InflateColumn::DateTime>.
1052
1053=head1 TEXT/IMAGE COLUMNS
1054
1055L<DBD::Sybase> compiled with FreeTDS will B<NOT> allow you to insert or update
1056C<TEXT/IMAGE> columns.
1057
1058Setting C<< $dbh->{LongReadLen} >> will also not work with FreeTDS use either:
1059
1060 $schema->storage->dbh->do("SET TEXTSIZE $bytes");
1061
1062or
1063
1064 $schema->storage->set_textsize($bytes);
1065
1066instead.
1067
1068However, the C<LongReadLen> you pass in
1069L<DBIx::Class::Storage::DBI/connect_info> is used to execute the equivalent
1070C<SET TEXTSIZE> command on connection.
1071
1072See L</connect_call_blob_setup> for a L<DBIx::Class::Storage::DBI/connect_info>
1073setting you need to work with C<IMAGE> columns.
1074
1075=head1 BULK API
1076
1077The experimental L<DBD::Sybase> Bulk API support is used for
1078L<populate|DBIx::Class::ResultSet/populate> in B<void> context, in a transaction
1079on a separate connection.
1080
1081To use this feature effectively, use a large number of rows for each
1082L<populate|DBIx::Class::ResultSet/populate> call, eg.:
1083
1084 while (my $rows = $data_source->get_100_rows()) {
1085 $rs->populate($rows);
1086 }
1087
1088B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns>
1089calls in your C<Result> classes B<must> list columns in database order for this
1090to work. Also, you may have to unset the C<LANG> environment variable before
1091loading your app, if it doesn't match the character set of your database.
1092
1093When inserting IMAGE columns using this method, you'll need to use
1094L</connect_call_blob_setup> as well.
1095
1096=head1 TODO
1097
1098=over
1099
1100=item *
1101
1102Transitions to AutoCommit=0 (starting a transaction) mode by exhausting
1103any active cursors, using eager cursors.
1104
1105=item *
1106
1107Real limits and limited counts using stored procedures deployed on startup.
1108
1109=item *
1110
1111Adaptive Server Anywhere (ASA) support, with possible SQLA::Limit support.
1112
1113=item *
1114
1115Blob update with a LIKE query on a blob, without invalidating the WHERE condition.
1116
1117=item *
1118
1119bulk_insert using prepare_cached (see comments.)
1120
1121=back
1122
1123=head1 AUTHOR
1124
1125See L<DBIx::Class/CONTRIBUTORS>.
1126
1127=head1 LICENSE
1128
1129You may distribute this code under the same terms as Perl itself.
1130
1131=cut
1132# vim:sts=2 sw=2: