use strict;
use warnings;
-use DBIx::Class 0.08099_07;
+use DBIx::Class 0.08100;
use DBIx::Class::Exception;
use Class::Accessor::Grouped;
use Path::Class qw(dir file);
=head1 VERSION
-Version 1.001000
+Version 1.001013
=cut
-our $VERSION = '1.001002';
+our $VERSION = '1.001013';
=head1 NAME
-DBIx::Class::Fixtures
+DBIx::Class::Fixtures - Dump data and repopulate a database using rules
=head1 SYNOPSIS
For example:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [
{
- rel: 'artists',
- quantity: '2'
+ "rel": "artists",
+ "quantity": "2"
}
]
}
rule to specify this. For example:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [
{
- rel: 'artists',
- quantity: '2'
+ "rel": "artists",
+ "quantity": "2"
}
]
}
],
- rules: {
- Artist: {
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "rules": {
+ "Artist": {
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
}
}
to CD. This is eqivalent to:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3'],
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [ {
- rel: 'artists',
- quantity: '2',
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [ {
+ "rel": "artists",
+ "quantity": "2",
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
} ]
}
example:
{
- sets: [ {
- class: 'Producer',
- ids: ['5']
+ "sets": [ {
+ "class": "Producer",
+ "ids": ["5"]
} ],
- includes: [
- { file: 'base.json' }
+ "includes": [
+ { "file": "base.json" }
]
}
DateTime::Format::* can parse. For example:
{
- sets: [ {
- class: 'RecentItems',
- ids: ['9']
+ "sets": [ {
+ "class": "RecentItems",
+ "ids": ["9"]
} ],
- datetime_relative : "2007-10-30 00:00:00"
+ "datetime_relative": "2007-10-30 00:00:00"
}
This will work when dumping from a MySQL database and will cause any datetime
hash with one attribute - fetch. Set fetch to 1 or 0.
{
- might_have: { fetch: 1 },
- sets: [
+ "might_have": { "fetch": 1 },
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5']
+ "class": "Producer",
+ "ids": ["5"]
}
]
}
a JSON representation of a DBIx::Class search clause. For example:
{
- sets: [{
- class: 'Artist',
- quantiy: 'all',
- cond: { name: 'Dave' }
+ "sets": [{
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "name": "Dave" }
}]
}
This could be specified in the cond hash like so:
{
- sets: [ {
- class: 'Artist',
- quantiy: 'all',
- cond: { no1_singles: '\> no1_albums' }
+ "sets": [ {
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "no1_singles": "\> no1_albums" }
} ]
}
An array of relationships to be used in the cond clause.
{
- sets: [ {
- class: 'Artist',
- quantiy: 'all',
- cond: { 'cds.position': { '>': 4 } },
- join: ['cds']
+ "sets": [ {
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "cds.position": { ">": 4 } },
+ "join": ["cds"]
} ]
}
Must be an array of hashes. Specifies which rels to also dump. For example:
{
- sets: [ {
- class: 'Artist',
- ids: ['1', '3'],
- fetch: [ {
- rel: 'cds',
- quantity: '3',
- cond: { position: '2' }
+ "sets": [ {
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "3",
+ "cond": { "position": "2" }
} ]
} ]
}
my $self = {
config_dir => $config_dir,
- _inherited_attributes => [qw/datetime_relative might_have rules/],
+ _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
debug => $params->{debug} || 0,
ignore_sql_errors => $params->{ignore_sql_errors},
- dumped_objects => {}
+ dumped_objects => {},
+ use_create => $params->{use_create} || 0
};
bless $self, $class;
return $self;
}
+=head2 available_config_sets
+
+Returns a list of all the config sets found in the L</config_dir>. These will
+be a list of the json based files containing dump rules.
+
+=cut
+
+my @config_sets;
+sub available_config_sets {
+ @config_sets = scalar(@config_sets) ? @config_sets : map {
+ $_->basename;
+ } grep {
+ -f $_ && $_=~/json$/;
+ } dir((shift)->config_dir)->children;
+}
+
=head2 dump
=over 4
/home/me/app/fixtures/artist/3.fix
/home/me/app/fixtures/producer/5.fix
-schema and directory are required attributes. also, one of config or all must be specified.
+schema and directory are required attributes. also, one of config or all must
+be specified.
+
+Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
+If this form is used your HashRef should conform to the structure rules defined
+for the JSON representations.
=cut
}
}
+ if($params->{excludes} && !$params->{all}) {
+ return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
+ }
+
my $schema = $params->{schema};
my $config;
if ($params->{config}) {
- #read config
- my $config_file = $self->config_dir->file($params->{config});
- $config = $self->load_config_file($config_file);
+ $config = ref $params->{config} eq 'HASH' ?
+ $params->{config} :
+ do {
+ #read config
+ my $config_file = $self->config_dir->file($params->{config});
+ $self->load_config_file($config_file);
+ };
} elsif ($params->{all}) {
+ my %excludes = map {$_=>1} @{$params->{excludes}||[]};
$config = {
might_have => { fetch => 0 },
has_many => { fetch => 0 },
belongs_to => { fetch => 0 },
- sets => [map {{ class => $_, quantity => 'all' }} $schema->sources]
+ sets => [
+ map {
+ { class => $_, quantity => 'all' };
+ } grep {
+ !$excludes{$_}
+ } $schema->sources],
};
} else {
DBIx::Class::Exception->throw('must pass config or set all');
$config->{rules} ||= {};
my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
+ while ( my ($k,$v) = each %{ $config->{rules} } ) {
+ if ( my $source = eval { $schema->source($k) } ) {
+ $config->{rules}{$source->source_name} = $v;
+ }
+ }
+
foreach my $source (@sources) {
# apply rule to set if specified
my $rule = $config->{rules}->{$source->{class}};
# write dir and gen filename
- my $source_dir = $params->{set_dir}->subdir(lc $src->from);
+ my $source_dir = $params->{set_dir}->subdir($src->from);
$source_dir->mkpath(0, 0777);
# strip dir separators from file name
# don't bother looking at rels unless we are actually planning to dump at least one type
my ($might_have, $belongs_to, $has_many) = map {
- $set->{$_}{fetch};
+ $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
} qw/might_have belongs_to has_many/;
return unless $might_have
# clear existing db
$self->msg("- clearing DB of existing tables");
- $pre_schema->storage->with_deferred_fk_checks(sub {
- foreach my $table (@tables) {
- eval {
- $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
- };
- }
+ $pre_schema->storage->txn_do(sub {
+ $pre_schema->storage->with_deferred_fk_checks(sub {
+ foreach my $table (@tables) {
+ eval {
+ $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
+ };
+ }
+ });
});
# import new ddl file to db
return \@data;
}
+=head2 dump_config_sets
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set named in the C<configs> parameter.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ configs => [qw/one.json other.json/],
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_config_sets {
+ my ($self, $params) = @_;
+ my $available_config_sets = delete $params->{configs};
+ my $directory_template = delete $params->{directory_template} ||
+ DBIx::Class::Exception->throw("'directory_template is required parameter");
+
+ for my $set (@$available_config_sets) {
+ my $localparams = $params;
+ $localparams->{directory} = $directory_template->($self, $localparams, $set);
+ $localparams->{config} = $set;
+ $self->dump($localparams);
+ $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
+ }
+}
+
+=head2 dump_all_config_sets
+
+ my %local_params = %$params;
+ my $local_self = bless { %$self }, ref($self);
+ $local_params{directory} = $directory_template->($self, \%local_params, $set);
+ $local_params{config} = $set;
+ $self->dump(\%local_params);
+
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set in turn to the specified directory.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_all_config_sets {
+ my ($self, $params) = @_;
+ $self->dump_config_sets({
+ %$params,
+ configs=>[$self->available_config_sets],
+ });
+}
+
=head2 populate
=over 4
# optional, set to 1 to run ddl but not populate
no_populate => 0,
+ # optional, set to 1 to run each fixture through ->create rather than have
+ # each $rs populated using $rs->populate. Useful if you have overridden new() logic
+ # that effects the value of column(s).
+ use_create => 0,
+
# Dont try to clean the database, just populate over whats there. Requires
# schema option. Use this if you want to handle removing old data yourself
# no_deploy => 1
$self->msg("\nimporting fixtures");
my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
-
my $version_file = file($fixture_dir, '_dumper_version');
# DBIx::Class::Exception->throw('no version file found');
# unless -e $version_file;
$tmp_fixture_dir->rmtree;
}
$self->msg("- creating temp dir");
+ $tmp_fixture_dir->mkpath();
for ( map { $schema->source($_)->from } $schema->sources) {
my $from_dir = $fixture_dir->subdir($_);
next unless -e $from_dir;
$fixup_visitor = new Data::Visitor::Callback(%callbacks);
}
- $schema->storage->with_deferred_fk_checks(sub {
- foreach my $source (sort $schema->sources) {
- $self->msg("- adding " . $source);
- my $rs = $schema->resultset($source);
- my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
- next unless (-e $source_dir);
- my @rows;
- while (my $file = $source_dir->next) {
- next unless ($file =~ /\.fix$/);
- next if $file->is_dir;
- my $contents = $file->slurp;
- my $HASH1;
- eval($contents);
- $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
- push(@rows, $HASH1);
+ $schema->storage->txn_do(sub {
+ $schema->storage->with_deferred_fk_checks(sub {
+ foreach my $source (sort $schema->sources) {
+ $self->msg("- adding " . $source);
+ my $rs = $schema->resultset($source);
+ my $source_dir = $tmp_fixture_dir->subdir( $rs->result_source->from );
+ next unless (-e $source_dir);
+ my @rows;
+ while (my $file = $source_dir->next) {
+ next unless ($file =~ /\.fix$/);
+ next if $file->is_dir;
+ my $contents = $file->slurp;
+ my $HASH1;
+ eval($contents);
+ $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
+ if ( $params->{use_create} ) {
+ $rs->create( $HASH1 );
+ } else {
+ push(@rows, $HASH1);
+ }
+ }
+ $rs->populate(\@rows) if scalar(@rows);
}
- $rs->populate(\@rows) if scalar(@rows);
- }
+ });
});
-
$self->do_post_ddl( {
schema=>$schema,
post_ddl=>$params->{post_ddl}
Drew Taylor <taylor.andrew.j@gmail.com>
+ Frank Switalski <fswitalski@gmail.com>
+
=head1 LICENSE
This library is free software under the same license as perl itself