From: Matt S Trout Date: Mon, 16 Jan 2006 04:06:56 +0000 (+0000) Subject: Changed inflate_result API to include ResultSource, added update and update_all to... X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=c01ab17249845f2df2cde39a0381971fc64479e4;p=dbsrgits%2FDBIx-Class-Historic.git Changed inflate_result API to include ResultSource, added update and update_all to ResultSet --- diff --git a/lib/DBIx/Class/CDBICompat/ImaDBI.pm b/lib/DBIx/Class/CDBICompat/ImaDBI.pm index 4651263..9d29486 100644 --- a/lib/DBIx/Class/CDBICompat/ImaDBI.pm +++ b/lib/DBIx/Class/CDBICompat/ImaDBI.pm @@ -91,7 +91,7 @@ sub sth_to_objects { my ($class, $sth) = @_; my @ret; while (my $row = $sth->fetchrow_hashref) { - push(@ret, $class->inflate_result($row)); + push(@ret, $class->inflate_result($class->result_source, $row)); } return @ret; } diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 881033e..5d5e811 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -266,7 +266,8 @@ sub _construct_object { $me{$col} = shift @row; } } - my $new = $self->{source}->result_class->inflate_result(\%me, \%pre); + my $new = $self->{source}->result_class->inflate_result( + $self->{source}, \%me, \%pre); $new = $self->{attrs}{record_filter}->($new) if exists $self->{attrs}{record_filter}; return $new; @@ -344,19 +345,59 @@ sub first { return $_[0]->reset->next; } +=head2 update(\%values) + +Sets the specified columns in the resultset to the supplied values + +=cut + +sub update { + my ($self, $values) = @_; + die "Values for update must be a hash" unless ref $values eq 'HASH'; + return $self->{source}->storage->update( + $self->{source}->from, $values, $self->{cond}); +} + +=head2 update_all(\%values) + +Fetches all objects and updates them one at a time. ->update_all will run +cascade triggers, ->update will not. + +=cut + +sub update_all { + my ($self, $values) = @_; + die "Values for update must be a hash" unless ref $values eq 'HASH'; + foreach my $obj ($self->all) { + $obj->set_columns($values)->update; + } + return 1; +} + =head2 delete -Deletes all elements in the resultset. +Deletes the contents of the resultset from its result source. =cut sub delete { my ($self) = @_; - $_->delete for $self->all; + $self->{source}->storage->delete($self->{source}->from, $self->{cond}); return 1; } -*delete_all = \&delete; # Yeah, yeah, yeah ... +=head2 delete_all + +Fetches all objects and deletes them one at a time. ->delete_all will run +cascade triggers, ->delete will not. + +=cut + +sub delete_all { + my ($self) = @_; + $_->delete for $self->all; + return 1; +} =head2 pager diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index d325254..f2bd3e9 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -88,7 +88,7 @@ sub update { my ($self, $upd) = @_; $self->throw( "Not in database" ) unless $self->in_storage; my %to_update = $self->get_dirty_columns; - return -1 unless keys %to_update; + return $self unless keys %to_update; my $rows = $self->result_source->storage->update( $self->result_source->from, \%to_update, $self->ident_condition); if ($rows == 0) { @@ -114,20 +114,16 @@ sub delete { my $self = shift; if (ref $self) { $self->throw( "Not in database" ) unless $self->in_storage; - #warn $self->_ident_cond.' '.join(', ', $self->_ident_values); $self->result_source->storage->delete( $self->result_source->from, $self->ident_condition); $self->in_storage(undef); - #$self->store_column($_ => undef) for $self->primary_columns; - # Should probably also arrange to trash PK if auto - # but if we do, post-delete cascade triggers fail :/ } else { my $attrs = { }; if (@_ > 1 && ref $_[$#_] eq 'HASH') { $attrs = { %{ pop(@_) } }; } my $query = (ref $_[0] eq 'HASH' ? $_[0] : {@_}); - $self->storage->delete($self->_table_name, $query); + $self->result_source->resultset->search(@_)->delete; } return $self; } @@ -210,6 +206,7 @@ sub set_columns { while (my ($col,$val) = each %$data) { $self->set_column($col,$val); } + return $self; } =head2 copy @@ -220,6 +217,13 @@ Inserts a new row with the specified changes. =cut +sub copy { + my ($self, $changes) = @_; + my $new = bless({ _column_data => { %{$self->{_column_data}}} }, ref $self); + $new->set_column($_ => $changes->{$_}) for keys %$changes; + return $new->insert; +} + =head2 store_column $obj->store_column($col => $val); @@ -239,24 +243,28 @@ sub store_column { =head2 inflate_result - Class->inflate_result(\%me, \%prefetch?) + Class->inflate_result($result_source, \%me, \%prefetch?) Called by ResultSet to inflate a result from storage =cut sub inflate_result { - my ($class, $me, $prefetch) = @_; + my ($class, $source, $me, $prefetch) = @_; #use Data::Dumper; print Dumper(@_); - my $new = bless({ _column_data => $me, _in_storage => 1 }, - ref $class || $class); + my $new = bless({ result_source => $source, + _column_data => $me, + _in_storage => 1 + }, + ref $class || $class); my $schema; PRE: foreach my $pre (keys %{$prefetch||{}}) { my $rel_obj = $class->relationship_info($pre); die "Can't prefetch non-eistant relationship ${pre}" unless $rel_obj; - $schema ||= $new->result_source->schema; + $schema ||= $source->schema; my $pre_class = $schema->class($rel_obj->{class}); - my $fetched = $pre_class->inflate_result(@{$prefetch->{$pre}}); + my $fetched = $pre_class->inflate_result( + $schema->source($pre_class), @{$prefetch->{$pre}}); $class->throw("No accessor for prefetched $pre") unless defined $rel_obj->{attrs}{accessor}; PRIMARY: foreach my $pri ($rel_obj->{class}->primary_columns) { @@ -276,13 +284,6 @@ sub inflate_result { return $new; } -sub copy { - my ($self, $changes) = @_; - my $new = bless({ _column_data => { %{$self->{_column_data}}} }, ref $self); - $new->set_column($_ => $changes->{$_}) for keys %$changes; - return $new->insert; -} - =head2 insert_or_update $obj->insert_or_update