Revision history for DBIx::Class
+ - add support for binding BYTEA and similar parameters (w/Pg impl)
- add support to Ordered for multiple ordering columns
- mark DB.pm and compose_connection as deprecated
- switch tests to compose_namespace
typester: Daisuke Murase <typester@cpan.org>
+victori: Victor Igumnov <victori@cpan.org>
+
wdh: Will Hawes
willert: Sebastian Willert <willert@cpan.org>
--- /dev/null
+package DBIx::Class::FileColumn;
+
+use strict;
+use warnings;
+use base 'DBIx::Class';
+use File::Path;
+use File::Copy;
+use IO::File;
+
+sub inflate_result {
+ my $self = shift;
+ my $ret = $self->next::method(@_);
+
+ $self->_inflate_file_column($ret);
+ return $ret;
+}
+
+sub insert {
+ my ( $self, @rest ) = @_;
+
+ my ( $file, @column_names ) = $self->_load_file_column_information;
+ my $ret = $self->next::method(@rest);
+ $self->_save_file_column( $file, $ret, @column_names );
+ return $ret;
+}
+
+sub update {
+ my ($self, @rest ) = @_;
+
+ my ( $file, @column_names ) = $self->_load_file_column_information;
+ my $ret = $self->next::method(@rest);
+ $self->_save_file_column( $file, $ret, @column_names );
+ return $ret;
+}
+
+sub delete {
+ my ( $self, @rest ) = @_;
+
+ my @column_names = $self->columns;
+ for (@column_names) {
+ if ( $self->column_info($_)->{is_file_column} ) {
+ my $path =
+ File::Spec->catdir( $self->column_info($_)->{file_column_path},
+ $self->id );
+ rmtree( [$path], 0, 0 );
+ }
+ }
+
+ my $ret = $self->next::method(@rest);
+
+ return $ret;
+}
+
+sub _inflate_file_column {
+ my $self = shift;
+ my $ret = shift;
+
+ my @column_names = $self->columns;
+ for(@column_names) {
+ if ( $ret->column_info($_)->{is_file_column} ) {
+ # make sure everything checks out
+ unless (defined $ret->$_) {
+ # if something is wrong set it to undef
+ $ret->$_(undef);
+ next;
+ }
+ my $fs_file =
+ File::Spec->catfile( $ret->column_info($_)->{file_column_path},
+ $ret->id, $ret->$_ );
+ $ret->$_({handle => new IO::File($fs_file, "r"), filename => $ret->$_});
+ }
+ }
+}
+
+sub _load_file_column_information {
+ my $self = shift;
+
+ my $file;
+ my @column_names;
+
+ @column_names = $self->columns;
+ for (@column_names) {
+ if ( $self->column_info($_)->{is_file_column} ) {
+ # make sure everything checks out
+ unless ((defined $self->$_) ||
+ (defined $self->$_->{filename} && defined $self->$_->{handle})) {
+ # if something is wrong set it to undef
+ $self->$_(undef);
+ next;
+ }
+ $file->{$_} = $self->$_;
+ $self->$_( $self->$_->{filename} );
+ }
+ }
+
+ return ( $file, @column_names );
+}
+
+sub _save_file_column {
+ my ( $self, $file, $ret, @column_names ) = @_;
+
+ for (@column_names) {
+ if ( $ret->column_info($_)->{is_file_column} ) {
+ next unless (defined $ret->$_);
+ my $file_path =
+ File::Spec->catdir( $ret->column_info($_)->{file_column_path},
+ $ret->id );
+ mkpath [$file_path];
+
+ my $outfile =
+ File::Spec->catfile( $file_path, $file->{$_}->{filename} );
+ File::Copy::copy( $file->{$_}->{handle}, $outfile );
+
+ $self->_file_column_callback($file->{$_},$ret,$_);
+ }
+ }
+}
+
+=head1 METHODS
+
+=cut
+
+
+=head2 _file_column_callback ($file,$ret,$target)
+
+method made to be overridden for callback purposes.
+
+=cut
+
+sub _file_column_callback {
+ my ($self,$file,$ret,$target) = @_;
+}
+
+=head1 NAME
+
+DBIx::Class::FileColumn - FileColumn map files from the Database to the filesystem.
+
+=head1 DESCRIPTION
+
+FileColumn
+
+=head1 SYNOPSIS
+
+In your L<DBIx::Class> table class:
+
+ __PACKAGE__->load_components( "PK::Auto", "FileColumn", "Core" );
+
+ # define your columns
+ __PACKAGE__->add_columns(
+ "id",
+ {
+ data_type => "integer",
+ is_auto_increment => 1,
+ is_nullable => 0,
+ size => 4,
+ },
+ "filename",
+ {
+ data_type => "varchar",
+ is_file_column => 1,
+ file_column_path =>'/tmp/uploaded_files',
+ # or for a Catalyst application
+ # file_column_path => MyApp->path_to('root','static','files'),
+ default_value => undef,
+ is_nullable => 1,
+ size => 255,
+ },
+ );
+
+
+In your L<Catalyst::Controller> class:
+
+FileColumn requires a hash that contains L<IO::File> as handle and the file's name as name.
+
+ my $entry = $c->model('MyAppDB::Articles')->create({
+ subject => 'blah',
+ filename => {
+ handle => $c->req->upload('myupload')->fh,
+ filename => $c->req->upload('myupload')->basename
+ },
+ body => '....'
+ });
+ $c->stash->{entry}=$entry;
+
+
+And Place the following in your TT template
+
+ Article Subject: [% entry.subject %]
+ Uploaded File:
+ <a href="/static/files/[% entry.id %]/[% entry.filename.filename %]">File</a>
+ Body: [% entry.body %]
+
+The file will be stored on the filesystem for later retrieval.
+Calling delete on your resultset will delete the file from the filesystem.
+Retrevial of the record automatically inflates the column back to the set hash with the IO::File handle and filename.
+
+=head1 AUTHOR
+
+Victor Igumnov
+
+=head1 LICENSE
+
+This library is free software, you can redistribute it and/or modify
+it under the same terms as Perl itself.
+
+=cut
+
+1;
unless ref $values eq 'HASH';
my $cond = $self->_cond_for_update_delete;
-
+
return $self->result_source->storage->update(
- $self->result_source->from, $values, $cond
+ $self->result_source, $values, $cond
);
}
my $cond = $self->_cond_for_update_delete;
- $self->result_source->storage->delete($self->result_source->from, $cond);
+ $self->result_source->storage->delete($self->result_source, $cond);
return 1;
}
if $self->can('result_source_instance');
$self->throw_exception("No result_source set on this object; can't insert")
unless $source;
- #use Data::Dumper; warn Dumper($self);
+
# Check if we stored uninserted relobjs here in new()
$source->storage->txn_begin if(!$self->{_rel_in_storage});
my $ident_cond = $self->ident_condition;
$self->throw_exception("Cannot safely update a row in a PK-less table")
if ! keys %$ident_cond;
+
if ($upd) {
foreach my $key (keys %$upd) {
if (ref $upd->{$key}) {
my %to_update = $self->get_dirty_columns;
return $self unless keys %to_update;
my $rows = $self->result_source->storage->update(
- $self->result_source->from, \%to_update, $self->{_orig_ident} || $ident_cond);
+ $self->result_source, \%to_update,
+ $self->{_orig_ident} || $ident_cond
+ );
if ($rows == 0) {
$self->throw_exception( "Can't update ${self}: row not found" );
} elsif ($rows > 1) {
unless exists $self->{_column_data}{$column};
}
$self->result_source->storage->delete(
- $self->result_source->from, $ident_cond);
+ $self->result_source, $ident_cond);
$self->in_storage(undef);
} else {
$self->throw_exception("Can't do class delete without a ResultSource instance")
}
return @created;
}
- $self->storage->insert_bulk($self->source($name)->from, \@names, $data);
+ $self->storage->insert_bulk($self->source($name), \@names, $data);
}
=head2 exception_action
use DBIx::Class::Storage::DBI::Cursor;
use DBIx::Class::Storage::Statistics;
use IO::File;
+use Scalar::Util 'blessed';
__PACKAGE__->mk_group_accessors(
'simple' =>
sub _sql_maker_args {
my ($self) = @_;
- return ( limit_dialect => $self->dbh, %{$self->_sql_maker_opts} );
+ return ( bindtype=>'columns', limit_dialect => $self->dbh, %{$self->_sql_maker_opts} );
}
sub sql_maker {
my ($self, $op, $extra_bind, $ident, @args) = @_;
my ($sql, @bind) = $self->sql_maker->$op($ident, @args);
- unshift(@bind, @$extra_bind) if $extra_bind;
+ unshift(@bind,
+ map { ref $_ eq 'ARRAY' ? $_ : [ '!!dummy', $_ ] } @$extra_bind)
+ if $extra_bind;
@bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args
return ($sql, @bind);
}
sub _execute {
- my $self = shift;
-
- my ($sql, @bind) = $self->_prep_for_execute(@_);
-
+ my ($self, $op, $extra_bind, $ident, $bind_attributes, @args) = @_;
+
+ if( blessed($ident) && $ident->isa("DBIx::Class::ResultSource") )
+ {
+ $ident = $ident->from();
+ }
+
+ my ($sql, @bind) = $self->sql_maker->$op($ident, @args);
+ unshift(@bind,
+ map { ref $_ eq 'ARRAY' ? $_ : [ '!!dummy', $_ ] } @$extra_bind)
+ if $extra_bind;
if ($self->debug) {
- my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind;
+ my @debug_bind =
+ map { defined ($_ && $_->[1]) ? qq{'$_->[1]'} : q{'NULL'} } @bind;
$self->debugobj->query_start($sql, @debug_bind);
}
+ my $sth = eval { $self->sth($sql,$op) };
- my $sth = $self->sth($sql);
+ if (!$sth || $@) {
+ $self->throw_exception(
+ 'no sth generated via sql (' . ($@ || $self->_dbh->errstr) . "): $sql"
+ );
+ }
my $rv;
if ($sth) {
my $time = time();
- $rv = eval { $sth->execute(@bind) };
+
+ $rv = eval {
+
+ my $placeholder_index = 1;
+ foreach my $bound (@bind) {
+
+ my $attributes = {};
+ my($column_name, @data) = @$bound;
+
+ if( $bind_attributes ) {
+ $attributes = $bind_attributes->{$column_name}
+ if defined $bind_attributes->{$column_name};
+ }
+
+ foreach my $data (@data)
+ {
+ $data = ref $data ? ''.$data : $data; # stringify args
+
+ $sth->bind_param($placeholder_index, $data, $attributes);
+ $placeholder_index++;
+ }
+ }
+ $sth->execute();
+ };
+
if ($@ || !$rv) {
$self->throw_exception("Error executing '$sql': ".($@ || $sth->errstr));
}
$self->throw_exception("'$sql' did not generate a statement.");
}
if ($self->debug) {
- my @debug_bind = map { defined $_ ? qq{`$_'} : q{`NULL'} } @bind;
- $self->debugobj->query_end($sql, @debug_bind);
+ my @debug_bind =
+ map { defined ($_ && $_->[1]) ? qq{'$_->[1]'} : q{'NULL'} } @bind;
+ $self->debugobj->query_end($sql, @debug_bind);
}
return (wantarray ? ($rv, $sth, @bind) : $rv);
}
sub insert {
- my ($self, $ident, $to_insert) = @_;
+ my ($self, $source, $to_insert) = @_;
+
+ my $ident = $source->from;
+ my $bind_attributes = $self->source_bind_attributes($source);
+
$self->throw_exception(
"Couldn't insert ".join(', ',
map "$_ => $to_insert->{$_}", keys %$to_insert
)." into ${ident}"
- ) unless ($self->_execute('insert' => [], $ident, $to_insert));
+ ) unless ($self->_execute('insert' => [], $source, $bind_attributes, $to_insert));
return $to_insert;
}
## scalar refs, or at least, all the same type as the first set, the statement is
## only prepped once.
sub insert_bulk {
- my ($self, $table, $cols, $data) = @_;
+ my ($self, $source, $cols, $data) = @_;
my %colvalues;
+ my $table = $source->from;
@colvalues{@$cols} = (0..$#$cols);
my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues);
-# print STDERR "BIND".Dumper(\@bind);
-
+
if ($self->debug) {
- my @debug_bind = map { defined $_ ? qq{'$_'} : q{'NULL'} } @bind;
+ my @debug_bind = map { defined $_->[1] ? qq{$_->[1]} : q{'NULL'} } @bind;
$self->debugobj->query_start($sql, @debug_bind);
}
my $sth = $self->sth($sql);
# @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args
my $rv;
+
## This must be an arrayref, else nothing works!
+
my $tuple_status = [];
-# use Data::Dumper;
-# print STDERR Dumper($data);
+
+ ##use Data::Dumper;
+ ##print STDERR Dumper( $data, $sql, [@bind] );
+
if ($sth) {
+
my $time = time();
- $rv = eval { $sth->execute_array({ ArrayTupleFetch => sub { my $values = shift @$data; return if !$values; return [ @{$values}[@bind] ]},
- ArrayTupleStatus => $tuple_status }) };
-# print STDERR Dumper($tuple_status);
-# print STDERR "RV: $rv\n";
+
+ #$rv = eval {
+ #
+ # $sth->execute_array({
+
+ # ArrayTupleFetch => sub {
+
+ # my $values = shift @$data;
+ # return if !$values;
+ # return [ @{$values}[@bind] ];
+ # },
+
+ # ArrayTupleStatus => $tuple_status,
+ # })
+ #};
+
+ ## Get the bind_attributes, if any exist
+ my $bind_attributes = $self->source_bind_attributes($source);
+
+ ## Bind the values and execute
+ $rv = eval {
+
+ my $placeholder_index = 1;
+
+ foreach my $bound (@bind) {
+
+ my $attributes = {};
+ my ($column_name, $data_index) = @$bound;
+
+ if( $bind_attributes ) {
+ $attributes = $bind_attributes->{$column_name}
+ if defined $bind_attributes->{$column_name};
+ }
+
+ my @data = map { $_->[$data_index] } @$data;
+
+ $sth->bind_param_array( $placeholder_index, [@data], $attributes );
+ $placeholder_index++;
+ }
+ $sth->execute_array( {ArrayTupleStatus => $tuple_status} );
+
+ };
+
if ($@ || !defined $rv) {
my $errors = '';
foreach my $tuple (@$tuple_status)
}
sub update {
- return shift->_execute('update' => [], @_);
+ my $self = shift @_;
+ my $source = shift @_;
+ my $bind_attributes = $self->source_bind_attributes($source);
+
+ return $self->_execute('update' => [], $source, $bind_attributes, @_);
}
+
sub delete {
- return shift->_execute('delete' => [], @_);
+ my $self = shift @_;
+ my $source = shift @_;
+
+ my $bind_attrs = {}; ## If ever it's needed...
+
+ return $self->_execute('delete' => [], $source, $bind_attrs, @_);
}
sub _select {
($order ? (order_by => $order) : ())
};
}
- my @args = ('select', $attrs->{bind}, $ident, $select, $condition, $order);
+ my $bind_attrs = {}; ## Future support
+ my @args = ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $condition, $order);
if ($attrs->{software_limit} ||
$self->sql_maker->_default_limit_syntax eq "GenericSubQ") {
$attrs->{software_limit} = 1;
return $self->_execute(@args);
}
+sub source_bind_attributes {
+ my ($self, $source) = @_;
+
+ my $bind_attributes;
+ foreach my $column ($source->columns) {
+
+ my $data_type = $source->column_info($column)->{data_type} || '';
+ $bind_attributes->{$column} = $self->bind_attribute_by_data_type($data_type)
+ if $data_type;
+ }
+
+ return $bind_attributes;
+}
+
=head2 select
=over 4
sub sqlt_type { shift->dbh->{Driver}->{Name} }
+=head2 bind_attribute_by_data_type
+
+Given a datatype from column info, returns a database specific bind attribute for
+$dbh->bind_param($val,$attribute) or nothing if we will let the database planner
+just handle it.
+
+Generally only needed for special case column types, like bytea in postgres.
+
+=cut
+
+sub bind_attribute_by_data_type {
+ return;
+}
+
=head2 create_ddl_dir (EXPERIMENTAL)
=over 4
use strict;
use warnings;
-use DBD::Pg;
+use DBD::Pg qw(:pg_types);
use base qw/DBIx::Class::Storage::DBI/;
sub datetime_parser_type { return "DateTime::Format::Pg"; }
+sub bind_attribute_by_data_type {
+ my ($self,$data_type) = @_;
+
+ my $bind_attributes = {
+ bytea => { pg_type => DBD::Pg::PG_BYTEA },
+ };
+
+ if( defined $bind_attributes->{$data_type} ) {
+ return $bind_attributes->{$data_type};
+ }
+ else {
+ return;
+ }
+}
+
1;
=head1 NAME
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use IO::File;
+
+my $schema = DBICTest->init_schema();
+
+plan tests => 2;
+
+
+eval { $schema->resultset('FileColumn')->create({file=>'wrong set'}) };
+ok($@, 'FileColumn checking for checks against bad sets');
+my $fh = new IO::File('t/96file_column.pm','r');
+eval { $schema->resultset('FileColumn')->create({file => {handle => $fh, filename =>'96file_column.pm'}})};
+ok(!$@,'FileColumn checking if file handled properly.');
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my ($dsn, $dbuser, $dbpass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
+
+$dsn = 'dbi:Pg:dbname=postgres;host=localhost' unless $dsn;
+$dbuser = 'postgres' unless $dbuser;
+$dbpass = 'postgres' unless $dbpass;
+
+plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
+ unless ($dsn && $dbuser);
+
+plan tests => 3;
+
+DBICTest::Schema->compose_connection('PGTest' => $dsn, $dbuser, $dbpass);
+
+my $dbh = PGTest->schema->storage->dbh;
+
+$dbh->do(qq[
+
+ CREATE TABLE artist
+ (
+ artistid serial NOT NULL PRIMARY KEY,
+ media bytea NOT NULL,
+ name varchar NULL
+ );
+],{ RaiseError => 1, PrintError => 1 });
+
+
+PGTest::Artist->load_components(qw/
+
+ PK::Auto
+ Core
+/);
+
+PGTest::Artist->add_columns(
+
+ "media", {
+
+ data_type => "bytea",
+ is_nullable => 0,
+ },
+);
+
+# test primary key handling
+my $big_long_string = 'abcd' x 250000;
+
+my $new = PGTest::Artist->create({ media => $big_long_string });
+
+ok($new->artistid, "Created a blob row");
+is($new->media, $big_long_string, "Set the blob correctly.");
+
+my $rs = PGTest::Artist->find({artistid=>$new->artistid});
+
+is($rs->get_column('media'), $big_long_string, "Created the blob correctly.");
+
+$dbh->do("DROP TABLE artist");
+
+
+
use lib 't/lib';
use_ok('DBICTest');
-my $schema = DBICTest->init_schema();
-DBICTest::CD->load_components(qw/CDBICompat CDBICompat::Pager/);
+DBICTest::Schema::CD->load_components(qw/CDBICompat CDBICompat::Pager/);
-DBICTest::CD->result_source_instance->schema($schema);
+my $schema = DBICTest->init_schema(compose_connection => 1);
+
+DBICTest::CD->result_source_instance->schema->storage($schema->storage);
my ( $pager, $it ) = DBICTest::CD->page(
{},
my $dbuser = $ENV{"DBICTEST_DBUSER"} || '';
my $dbpass = $ENV{"DBICTEST_DBPASS"} || '';
- my $schema = DBICTest::Schema->compose_namespace('DBICTest')
+ my $compose_method = ($args{compose_connection}
+ ? 'compose_connection'
+ : 'compose_namespace');
+
+ my $schema = DBICTest::Schema->$compose_method('DBICTest')
->connect($dsn, $dbuser, $dbpass);
$schema->storage->on_connect_do(['PRAGMA synchronous = OFF']);
if ( !$args{no_deploy} ) {
Artist
Employee
CD
+ FileColumn
Link
Bookmark
#dummy
__PACKAGE__->add_columns(
'artistid' => {
data_type => 'integer',
- is_auto_increment => 1
+ is_auto_increment => 1,
},
'name' => {
data_type => 'varchar',
--- /dev/null
+package
+DBICTest::Schema::FileColumn;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Core/;
+
+__PACKAGE__->load_components(qw/FileColumn/);
+
+__PACKAGE__->table('file_columns');
+
+__PACKAGE__->add_columns(
+ id => { data_type => 'integer', is_auto_increment => 1 },
+ file => { data_type => 'varchar', is_file_column => 1, file_column_path => '/tmp', size=>255 }
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;
);
--
+-- Table: file_columns
+--
+CREATE TABLE file_columns (
+ id INTEGER PRIMARY KEY NOT NULL,
+ file varchar(255)
+);
+
+--
-- Table: tags
--
CREATE TABLE tags (