From: Oliver Gorwits Date: Tue, 27 Apr 2010 22:35:29 +0000 (+0000) Subject: add prepopulate_journal method based on pseudocode from frew. can it be optimized... X-Git-Tag: v0.900201~23 X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=1d09727da140af581226bb64bcca01b8d25a060d;p=dbsrgits%2FDBIx-Class-Journal.git add prepopulate_journal method based on pseudocode from frew. can it be optimized any? --- diff --git a/lib/DBIx/Class/Journal.pm b/lib/DBIx/Class/Journal.pm index 11b18fc..e6bb3c7 100644 --- a/lib/DBIx/Class/Journal.pm +++ b/lib/DBIx/Class/Journal.pm @@ -256,6 +256,19 @@ C. Not recommended, but present for backwards compatibility. +=item prepopulate_journal + +Will load the current state of your original source tables into the audit +history as fake inserts in a single initial changeset. The advantage to this +is that later deletetions of the row will be consistent in the journal with an +initial state. + +Note that this can be an intensive and time consuming task, depending on how +much data you have in your original sources; all of it will be copied to the +journal history. However this step is essential if you are retrofitting +Journalling to a schema with existing data, otherwise when you delete a row +the Journal will die because it cannot relate that to an initial row insert. + =item changeset_user $user_id Set the C for the following changeset(s). This must be an integer. diff --git a/lib/DBIx/Class/Schema/Journal.pm b/lib/DBIx/Class/Schema/Journal.pm index 6c1678a..e20f72c 100644 --- a/lib/DBIx/Class/Schema/Journal.pm +++ b/lib/DBIx/Class/Schema/Journal.pm @@ -138,6 +138,61 @@ sub create_journal_for { } } +sub prepopulate_journal { + my $self = shift; + + my %j_sources = map { $_ => 1 } $self->journal_sources + ? @{$self->journal_sources} + : $self->sources; + + my $schema = $self; + my $j_schema = $self->_journal_schema; + my $changelog_rs = $j_schema->resultset('ChangeLog'); + + # using our own overridden txn_do (see below) will create a changeset + $schema->txn_do( sub { + my $chs_id = $j_schema->current_changeset; + + foreach my $s_name ($self->sources) { + next unless $j_sources{$s_name}; + + my $from_rs = $schema->resultset($s_name); + my ($pk) = $from_rs->result_source->primary_columns; + $from_rs->result_class('DBIx::Class::ResultClass::HashRefInflator'); + + my $to_rs = $j_schema->resultset("${s_name}AuditHistory"); + my $log_rs = $j_schema->resultset("${s_name}AuditLog"); + + my $page = 1; + while ( + my @x = $from_rs->search(undef, { + rows => 1_000, + page => $page++, + }) + ) { + # get some number of change log IDs to be generated for this page + my @log_ids = map { $_->id } + $changelog_rs->populate([ + map {{ changeset_id => $chs_id }} (0 .. $#x) + ]); + + # create the audit log entries for the rows in this page + $log_rs->populate([ + map {{ create_id => $log_ids[$_], id => $x[$_]->{$pk} }} (0 .. $#x) + ]); + + # now populate the audit history + $to_rs->populate([ + map +{ + %{$x[$_]}, + audit_change_id => $log_ids[$_], + }, (0 .. $#x) + ]); + } + } + }); +} + sub txn_do { my ($self, $user_code, @args) = @_;