- add accessors for unique constraint names and coulums to
ResultSource/ResultSourceProxy
- rework ResultSet::find() to search unique constraints
+ - CDBICompat: modify retrieve to fix column casing when ColumnCase is
+ loaded
+ - CDBICompat: override find_or_create to fix column casing when
+ ColumnCase is loaded
0.06002
+ - grab $self->dbh once per function in Storage::DBI
+ - nuke ResultSource caching of ->resultset for consistency reasons
- fix for -and conditions when updating or deleting on a ResultSet
-0.06001
+0.06001 2006-04-08 21:48:43
- minor fix to update in case of undefined rels
- fixes for cascade delete
- substantial improvements and fixes to deploy
- bugfix to Cursor to avoid error during DESTROY
- transaction DBI operations now in debug trace output
-0.06000
+0.06000 2006-03-25 18:03:46
- Lots of documentation improvements
- Minor tweak to related_resultset to prevent it storing a searched rs
- Fixup to columns_info_for when database returns type(size)
- Made do_txn respect void context (on the off-chance somebody cares)
- Fix exception text for nonexistent key in ResultSet::find()
-0.05999_04
+0.05999_04 2006-03-18 19:20:49
- Fix for delete on full-table resultsets
- Removed caching on count() and added _count for pager()
- ->connection does nothing if ->storage defined and no args
return $class->next::method(lc($col));
}
+# _build_query
+#
+# Build a query hash for find, et al. Overrides Retrieve::_build_query.
+
+sub _build_query {
+ my ($self, $query) = @_;
+
+ my %new_query;
+ $new_query{lc $_} = $query->{$_} for keys %$query;
+
+ return \%new_query;
+}
+
sub _mk_group_accessors {
my ($class, $type, $group, @fields) = @_;
#warn join(', ', map { ref $_ ? (@$_) : ($_) } @fields);
use warnings FATAL => 'all';
-sub retrieve {
- die "No args to retrieve" unless @_ > 1;
- shift->find(@_);
+sub retrieve {
+ my $self = shift;
+ die "No args to retrieve" unless @_ > 0;
+
+ my @cols = $self->primary_columns;
+
+ my $query;
+ if (ref $_[0] eq 'HASH') {
+ $query = { %{$_[0]} };
+ }
+ elsif (@_ == @cols) {
+ $query = {};
+ @{$query}{@cols} = @_;
+ }
+ else {
+ $query = {@_};
+ }
+
+ $query = $self->_build_query($query);
+ $self->find($query);
+}
+
+sub find_or_create {
+ my $self = shift;
+ my $query = ref $_[0] eq 'HASH' ? shift : {@_};
+
+ $query = $self->_build_query($query);
+ $self->next::method($query);
+}
+
+# _build_query
+#
+# Build a query hash. Defaults to a no-op; ColumnCase overrides.
+
+sub _build_query {
+ my ($self, $query) = @_;
+
+ return $query;
}
sub retrieve_from_sql {
my ($class, $target, @to_inject) = @_;
{
no strict 'refs';
- unshift(@{"${target}::ISA"}, grep { $target ne $_ && !$target->isa($_)} @to_inject);
+ my %seen;
+ unshift( @{"${target}::ISA"},
+ grep { !$seen{ $_ }++ && $target ne $_ && !$target->isa($_) }
+ @to_inject
+ );
}
# Yes, this is hack. But it *does* work. Please don't submit tickets about
},
);
- $translator->parser('DBIx::Class');
- $translator->producer('DBIx::Class::File');
+ $translator->parser('SQL::Translator::Parser::DBIx::Class');
+ $translator->producer('SQL::Translator::Producer::DBIx::Class::File');
my $output = $translator->translate(@args) or die
"Error: " . $translator->error;
$schema->resultset('Actor')->roles();
$schema->resultset('Role')->search_related('actors', { Name => 'Fred' });
- $schema->resultset('ActorRole')->add_to_role({ Name => 'Sherlock Holmes'});
+ $schema->resultset('ActorRole')->add_to_roles({ Name => 'Sherlock Holmes'});
See L<DBIx::Class::Manual::Cookbook> for more.
'My::DBIC::Schema::Actor' );
My::DBIC::Schema::Actor->many_to_many( roles => 'actor_roles',
- 'My::DBIC::Schema::Roles' );
+ 'role' );
...
sub search {
my $self = shift;
-
- my $rs;
- if( @_ ) {
- my $attrs = { %{$self->{attrs}} };
- my $having = delete $attrs->{having};
- $attrs = { %$attrs, %{ pop(@_) } } if @_ > 1 and ref $_[$#_] eq 'HASH';
-
- my $where = (@_
- ? ((@_ == 1 || ref $_[0] eq "HASH")
- ? shift
- : ((@_ % 2)
- ? $self->throw_exception(
- "Odd number of arguments to search")
- : {@_}))
- : undef());
- if (defined $where) {
- $attrs->{where} = (defined $attrs->{where}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $where, $attrs->{where} ] }
- : $where);
- }
-
- if (defined $having) {
- $attrs->{having} = (defined $attrs->{having}
- ? { '-and' =>
- [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
- $having, $attrs->{having} ] }
- : $having);
- }
+ my $attrs = { %{$self->{attrs}} };
+ my $having = delete $attrs->{having};
+ $attrs = { %$attrs, %{ pop(@_) } } if @_ > 1 and ref $_[$#_] eq 'HASH';
+
+ my $where = (@_
+ ? ((@_ == 1 || ref $_[0] eq "HASH")
+ ? shift
+ : ((@_ % 2)
+ ? $self->throw_exception(
+ "Odd number of arguments to search")
+ : {@_}))
+ : undef());
+ if (defined $where) {
+ $attrs->{where} = (defined $attrs->{where}
+ ? { '-and' =>
+ [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
+ $where, $attrs->{where} ] }
+ : $where);
+ }
- $rs = (ref $self)->new($self->result_source, $attrs);
+ if (defined $having) {
+ $attrs->{having} = (defined $attrs->{having}
+ ? { '-and' =>
+ [ map { ref $_ eq 'ARRAY' ? [ -or => $_ ] : $_ }
+ $having, $attrs->{having} ] }
+ : $having);
}
- else {
- $rs = $self;
- $rs->reset;
+
+ my $rs = (ref $self)->new($self->result_source, $attrs);
+
+ my $rows = $self->get_cache;
+ if( @{$rows} ) {
+ $rs->set_cache($rows);
}
+
return (wantarray ? $rs->all : $rs);
}
? $self->result_source->unique_constraint_columns($attrs->{key})
: $self->result_source->primary_columns;
- my %hash;
+ my $hash;
if (ref $_[0] eq 'HASH') {
- %hash = %{ $_[0] };
+ $hash = { %{$_[0]} };
}
elsif (@_ == @cols) {
- @hash{@cols} = @_;
+ $hash = {};
+ @{$hash}{@cols} = @_;
}
else {
- # Hack for CDBI queries
- %hash = @_;
+ $self->throw_exception(
+ "Arguments to find must be a hashref or match the number of columns in the "
+ . exists $attrs->{key} ? "$attrs->{key} unique constraint" : "primary key"
+ );
}
# Check the hash we just parsed against our source's unique constraints
"Can't find unless a primary key or unique constraint is defined"
) unless @constraint_names;
- my @unique_hashes;
+ my @unique_queries;
foreach my $name (@constraint_names) {
my @unique_cols = $self->result_source->unique_constraint_columns($name);
- my %unique_hash = $self->_unique_hash(\%hash, \@unique_cols);
+ my $unique_query = $self->_build_unique_query($hash, \@unique_cols);
# Add the ResultSet's alias
- foreach my $key (grep { ! m/\./ } keys %unique_hash) {
- $unique_hash{"$self->{attrs}{alias}.$key"} = delete $unique_hash{$key};
+ foreach my $key (grep { ! m/\./ } keys %$unique_query) {
+ $unique_query->{"$self->{attrs}{alias}.$key"} = delete $unique_query->{$key};
}
- push @unique_hashes, \%unique_hash if %unique_hash;
+ push @unique_queries, $unique_query if %$unique_query;
}
# Handle cases where the ResultSet already defines the query
- my $query = @unique_hashes ? \@unique_hashes : undef;
+ my $query = @unique_queries ? \@unique_queries : undef;
# Run the query
if (keys %$attrs) {
}
}
-# _unique_hash
+# _build_unique_query
#
-# Constrain the specified hash based on the specified column names.
+# Constrain the specified query hash based on the specified column names.
-sub _unique_hash {
- my ($self, $hash, $unique_cols) = @_;
-
- # Ugh, CDBI lowercases column names
- if (exists $INC{'DBIx/Class/CDBICompat/ColumnCase.pm'}) {
- foreach my $key (keys %$hash) {
- $hash->{lc $key} = delete $hash->{$key};
- }
- }
+sub _build_unique_query {
+ my ($self, $query, $unique_cols) = @_;
- my %unique_hash =
- map { $_ => $hash->{$_} }
- grep { exists $hash->{$_} }
+ my %unique_query =
+ map { $_ => $query->{$_} }
+ grep { exists $query->{$_} }
@$unique_cols;
- return %unique_hash;
+ return \%unique_query;
}
=head2 search_related
'resultset does not take any arguments. If you want another resultset, '.
'call it on the schema instead.'
) if scalar @_;
- return $self->{_resultset}
- if ref $self->{_resultset} eq $self->resultset_class;
- return $self->{_resultset} = $self->resultset_class->new(
+
+ # disabled until we can figure out a way to do it without consistency issues
+ #
+ #return $self->{_resultset}
+ # if ref $self->{_resultset} eq $self->resultset_class;
+ #return $self->{_resultset} =
+
+ return $self->resultset_class->new(
$self, $self->{resultset_attributes}
);
}
sub txn_begin {
my $self = shift;
- if (($self->{transaction_depth}++ == 0) and ($self->dbh->{AutoCommit})) {
- $self->debugfh->print("BEGIN WORK\n")
- if ($self->debug);
- $self->dbh->begin_work;
+ if ($self->{transaction_depth}++ == 0) {
+ my $dbh = $self->dbh;
+ if ($dbh->{AutoCommit}) {
+ $self->debugfh->print("BEGIN WORK\n")
+ if ($self->debug);
+ $dbh->begin_work;
+ }
}
}
sub txn_commit {
my $self = shift;
if ($self->{transaction_depth} == 0) {
- unless ($self->dbh->{AutoCommit}) {
+ my $dbh = $self->dbh;
+ unless ($dbh->{AutoCommit}) {
$self->debugfh->print("COMMIT\n")
if ($self->debug);
- $self->dbh->commit;
+ $dbh->commit;
}
}
else {
eval {
if ($self->{transaction_depth} == 0) {
- unless ($self->dbh->{AutoCommit}) {
+ my $dbh = $self->dbh;
+ unless ($dbh->{AutoCommit}) {
$self->debugfh->print("ROLLBACK\n")
if ($self->debug);
- $self->dbh->rollback;
+ $dbh->rollback;
}
}
else {
sub columns_info_for {
my ($self, $table) = @_;
- if ($self->dbh->can('column_info')) {
+ my $dbh = $self->dbh;
+
+ if ($dbh->can('column_info')) {
my %result;
- my $old_raise_err = $self->dbh->{RaiseError};
- my $old_print_err = $self->dbh->{PrintError};
- $self->dbh->{RaiseError} = 1;
- $self->dbh->{PrintError} = 0;
+ my $old_raise_err = $dbh->{RaiseError};
+ my $old_print_err = $dbh->{PrintError};
+ $dbh->{RaiseError} = 1;
+ $dbh->{PrintError} = 0;
eval {
- my $sth = $self->dbh->column_info( undef, undef, $table, '%' );
+ my $sth = $dbh->column_info( undef, undef, $table, '%' );
$sth->execute();
while ( my $info = $sth->fetchrow_hashref() ){
my %column_info;
$result{$info->{COLUMN_NAME}} = \%column_info;
}
};
- $self->dbh->{RaiseError} = $old_raise_err;
- $self->dbh->{PrintError} = $old_print_err;
+ $dbh->{RaiseError} = $old_raise_err;
+ $dbh->{PrintError} = $old_print_err;
return \%result if !$@;
}
my %result;
- my $sth = $self->dbh->prepare("SELECT * FROM $table WHERE 1=0");
+ my $sth = $dbh->prepare("SELECT * FROM $table WHERE 1=0");
$sth->execute;
my @columns = @{$sth->{NAME_lc}};
for my $i ( 0 .. $#columns ){
my %column_info;
my $type_num = $sth->{TYPE}->[$i];
my $type_name;
- if(defined $type_num && $self->dbh->can('type_info')) {
- my $type_info = $self->dbh->type_info($type_num);
+ if(defined $type_num && $dbh->can('type_info')) {
+ my $type_info = $dbh->type_info($type_num);
$type_name = $type_info->{TYPE_NAME} if $type_info;
}
$column_info{data_type} = $type_name ? $type_name : $type_num;
use lib qw(t/lib);
use DBICTest::ForeignComponent;
-plan tests => 1;
+plan tests => 2;
# Tests if foreign component was loaded by calling foreign's method
ok( DBICTest::ForeignComponent->foreign_test_method, 'foreign component' );
+# Test for inject_base to filter out duplicates
+{ package DBICTest::_InjectBaseTest;
+ use base qw/ DBIx::Class /;
+}
+DBICTest::_InjectBaseTest->inject_base( 'DBICTest::_InjectBaseTest', qw/
+ DBICTest::_InjectBaseTest::A
+ DBICTest::_InjectBaseTest::B
+ DBICTest::_InjectBaseTest::B
+ DBICTest::_InjectBaseTest::C
+/);
+is_deeply( \@DBICTest::_InjectBaseTest::ISA,
+ [qw/
+ DBICTest::_InjectBaseTest::A
+ DBICTest::_InjectBaseTest::B
+ DBICTest::_InjectBaseTest::C
+ DBIx::Class
+ /],
+ 'inject_base filters duplicates'
+);