use strict;
use warnings;
-use DBIx::Class 0.08099_07;
+use DBIx::Class 0.08100;
use DBIx::Class::Exception;
use Class::Accessor::Grouped;
use Path::Class qw(dir file);
+use File::Spec::Functions 'catfile', 'catdir';
use Config::Any::JSON;
use Data::Dump::Streamer;
use Data::Visitor::Callback;
use Hash::Merge qw( merge );
use Data::Dumper;
use Class::C3::Componentised;
+use MIME::Base64;
+use File::Temp qw/tempdir/;
use base qw(Class::Accessor::Grouped);
our $namespace_counter = 0;
__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
- _inherited_attributes debug schema_class dumped_objects/);
+ _inherited_attributes debug schema_class dumped_objects config_attrs/);
-=head1 VERSION
-
-Version 1.001000
-
-=cut
-
-our $VERSION = '1.001002';
+our $VERSION = '1.001018';
=head1 NAME
-DBIx::Class::Fixtures
+DBIx::Class::Fixtures - Dump data and repopulate a database using rules
=head1 SYNOPSIS
For example:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [
{
- rel: 'artists',
- quantity: '2'
+ "rel": "artists",
+ "quantity": "2"
}
]
}
rule to specify this. For example:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [
{
- rel: 'artists',
- quantity: '2'
+ "rel": "artists",
+ "quantity": "2"
}
]
}
],
- rules: {
- Artist: {
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "rules": {
+ "Artist": {
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
}
}
to CD. This is eqivalent to:
{
- sets: [
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3'],
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
},
{
- class: 'Producer',
- ids: ['5'],
- fetch: [ {
- rel: 'artists',
- quantity: '2',
- fetch: [ {
- rel: 'cds',
- quantity: 'all'
+ "class": "Producer",
+ "ids": ["5"],
+ "fetch": [ {
+ "rel": "artists",
+ "quantity": "2",
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "all"
} ]
} ]
}
example:
{
- sets: [ {
- class: 'Producer',
- ids: ['5']
+ "sets": [ {
+ "class": "Producer",
+ "ids": ["5"]
} ],
- includes: [
- { file: 'base.json' }
+ "includes": [
+ { "file": "base.json" }
]
}
DateTime::Format::* can parse. For example:
{
- sets: [ {
- class: 'RecentItems',
- ids: ['9']
+ "sets": [ {
+ "class": "RecentItems",
+ "ids": ["9"]
} ],
- datetime_relative : "2007-10-30 00:00:00"
+ "datetime_relative": "2007-10-30 00:00:00"
}
This will work when dumping from a MySQL database and will cause any datetime
hash with one attribute - fetch. Set fetch to 1 or 0.
{
- might_have: { fetch: 1 },
- sets: [
+ "might_have": { "fetch": 1 },
+ "sets": [
{
- class: 'Artist',
- ids: ['1', '3']
+ "class": "Artist",
+ "ids": ["1", "3"]
},
{
- class: 'Producer',
- ids: ['5']
+ "class": "Producer",
+ "ids": ["5"]
}
]
}
a JSON representation of a DBIx::Class search clause. For example:
{
- sets: [{
- class: 'Artist',
- quantiy: 'all',
- cond: { name: 'Dave' }
+ "sets": [{
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "name": "Dave" }
}]
}
This could be specified in the cond hash like so:
{
- sets: [ {
- class: 'Artist',
- quantiy: 'all',
- cond: { no1_singles: '\> no1_albums' }
+ "sets": [ {
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "no1_singles": "\> no1_albums" }
} ]
}
An array of relationships to be used in the cond clause.
{
- sets: [ {
- class: 'Artist',
- quantiy: 'all',
- cond: { 'cds.position': { '>': 4 } },
- join: ['cds']
+ "sets": [ {
+ "class": "Artist",
+ "quantiy": "all",
+ "cond": { "cds.position": { ">": 4 } },
+ "join": ["cds"]
} ]
}
Must be an array of hashes. Specifies which rels to also dump. For example:
{
- sets: [ {
- class: 'Artist',
- ids: ['1', '3'],
- fetch: [ {
- rel: 'cds',
- quantity: '3',
- cond: { position: '2' }
+ "sets": [ {
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "3",
+ "cond": { "position": "2" }
} ]
} ]
}
This value will be inherited by all fetches in this set. This is not true for
the has_many attribute.
+=head2 external
+
+In some cases your database information might be keys to values in some sort of
+external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
+to store blob information on the filesystem. In this case you may wish the ability
+to backup your external storage in the same way your database data. The L</external>
+attribute lets you specify a handler for this type of issue. For example:
+
+ {
+ "sets": [{
+ "class": "Photo",
+ "quantity": "all",
+ "external": {
+ "file": {
+ "class": "File",
+ "args": {"path":"__ATTR(photo_dir)__"}
+ }
+ }
+ }]
+ }
+
+This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
+where the path to a file is specified by the C<file> field of the C<Photo> source.
+We use the uninflated value of the field so you need to completely handle backup
+and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
+and you can create your own custom handlers by placing a '+' in the namespace:
+
+ "class": "+MyApp::Schema::SomeExternalStorage",
+
+Although if possible I'd love to get patches to add some of the other common
+types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
+
+See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
+
=head1 RULE ATTRIBUTES
=head2 cond
Same as with L</SET ATTRIBUTES>
+=head1 RULE SUBSTITUTIONS
+
+You can provide the following substitution patterns for your rule values. An
+example of this might be:
+
+ {
+ "sets": [{
+ "class": "Photo",
+ "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
+ }]
+ }
+
+=head2 ENV
+
+Provide a value from %ENV
+
+=head2 ATTR
+
+Provide a value from L</config_attrs>
+
+=head2 catfile
+
+Create the path to a file from a list
+
+=head2 catdir
+
+Create the path to a directory from a list
+
=head1 METHODS
=head2 new
ignore errors on import of DDL etc
+=item config_attrs
+
+A hash of information you can use to do replacements inside your configuration
+sets. For example, if your set looks like:
+
+ {
+ "sets": [ {
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "__ATTR(quantity)__",
+ } ]
+ } ]
+ }
+
+ my $fixtures = DBIx::Class::Fixtures->new( {
+ config_dir => '/home/me/app/fixture_configs'
+ config_attrs => {
+ quantity => 100,
+ },
+ });
+
+You may wish to do this if you want to let whoever runs the dumps have a bit
+more control
+
=back
my $fixtures = DBIx::Class::Fixtures->new( {
my $self = {
config_dir => $config_dir,
- _inherited_attributes => [qw/datetime_relative might_have rules/],
+ _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
debug => $params->{debug} || 0,
- ignore_sql_errors => $params->{ignore_sql_errors}
+ ignore_sql_errors => $params->{ignore_sql_errors},
+ dumped_objects => {},
+ use_create => $params->{use_create} || 0,
+ config_attrs => $params->{config_attrs} || {},
};
bless $self, $class;
- $self->dumped_objects({});
-
return $self;
}
+=head2 available_config_sets
+
+Returns a list of all the config sets found in the L</config_dir>. These will
+be a list of the json based files containing dump rules.
+
+=cut
+
+my @config_sets;
+sub available_config_sets {
+ @config_sets = scalar(@config_sets) ? @config_sets : map {
+ $_->basename;
+ } grep {
+ -f $_ && $_=~/json$/;
+ } dir((shift)->config_dir)->children;
+}
+
=head2 dump
=over 4
/home/me/app/fixtures/artist/3.fix
/home/me/app/fixtures/producer/5.fix
-schema and directory are required attributes. also, one of config or all must be specified.
+schema and directory are required attributes. also, one of config or all must
+be specified.
+
+Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
+If this form is used your HashRef should conform to the structure rules defined
+for the JSON representations.
=cut
}
}
+ if($params->{excludes} && !$params->{all}) {
+ return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
+ }
+
my $schema = $params->{schema};
my $config;
if ($params->{config}) {
- #read config
- my $config_file = $self->config_dir->file($params->{config});
- $config = $self->load_config_file($config_file);
+ $config = ref $params->{config} eq 'HASH' ?
+ $params->{config} :
+ do {
+ #read config
+ my $config_file = $self->config_dir->file($params->{config});
+ $self->load_config_file($config_file);
+ };
} elsif ($params->{all}) {
+ my %excludes = map {$_=>1} @{$params->{excludes}||[]};
$config = {
might_have => { fetch => 0 },
has_many => { fetch => 0 },
belongs_to => { fetch => 0 },
- sets => [map {{ class => $_, quantity => 'all' }} $schema->sources]
+ sets => [
+ map {
+ { class => $_, quantity => 'all' };
+ } grep {
+ !$excludes{$_}
+ } $schema->sources],
};
} else {
DBIx::Class::Exception->throw('must pass config or set all');
}
$self->msg("generating fixtures");
- my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
+ my $tmp_output_dir = dir(tmpdir());
if (-e $tmp_output_dir) {
$self->msg("- clearing existing $tmp_output_dir");
->openw
->print($VERSION);
+ # write our current config set
+ $tmp_output_dir->file('_config_set')
+ ->openw
+ ->print( Dumper $config );
+
$config->{rules} ||= {};
my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
+ while ( my ($k,$v) = each %{ $config->{rules} } ) {
+ if ( my $source = eval { $schema->source($k) } ) {
+ $config->{rules}{$source->source_name} = $v;
+ }
+ }
+
foreach my $source (@sources) {
# apply rule to set if specified
my $rule = $config->{rules}->{$source->{class}};
my $rs = $schema->resultset($source->{class});
if ($source->{cond} and ref $source->{cond} eq 'HASH') {
- # if value starts with \ assume it's meant to be passed as a scalar ref to dbic
- # ideally this would substitute deeply
- $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} };
+ # if value starts with \ assume it's meant to be passed as a scalar ref
+ # to dbic. ideally this would substitute deeply
+ $source->{cond} = {
+ map {
+ $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
+ : $source->{cond}->{$_}
+ } keys %{$source->{cond}}
+ };
}
- $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond});
+ $rs = $rs->search($source->{cond}, { join => $source->{join} })
+ if $source->{cond};
+
$self->msg("- dumping $source->{class}");
+
my %source_options = ( set => { %{$config}, %{$source} } );
if ($source->{quantity}) {
- $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by});
+ $rs = $rs->search({}, { order_by => $source->{order_by} })
+ if $source->{order_by};
+
if ($source->{quantity} =~ /^\d+$/) {
$rs = $rs->search({}, { rows => $source->{quantity} });
} elsif ($source->{quantity} ne 'all') {
- DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity});
+ DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
}
}
elsif ($source->{ids} && @{$source->{ids}}) {
sub dump_object {
my ($self, $object, $params) = @_;
my $set = $params->{set};
+
+ my $v = Data::Visitor::Callback->new(
+ plain_value => sub {
+ my ($visitor, $data) = @_;
+ my $subs = {
+ ENV => sub {
+ my ( $self, $v ) = @_;
+ if (! defined($ENV{$v})) {
+ return "";
+ } else {
+ return $ENV{ $v };
+ }
+ },
+ ATTR => sub {
+ my ($self, $v) = @_;
+ if(my $attr = $self->config_attrs->{$v}) {
+ return $attr;
+ } else {
+ return "";
+ }
+ },
+ catfile => sub {
+ my ($self, @args) = @_;
+ catfile(@args);
+ },
+ catdir => sub {
+ my ($self, @args) = @_;
+ catdir(@args);
+ },
+ };
+
+ my $subsre = join( '|', keys %$subs );
+ $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
+
+ return $_;
+ }
+ );
+
+ $v->visit( $set );
+
die 'no dir passed to dump_object' unless $params->{set_dir};
die 'no object passed to dump_object' unless $object;
$source_dir->mkpath(0, 0777);
# strip dir separators from file name
- my $file = $source_dir->file(join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix');
-
+ my $file = $source_dir->file(
+ join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
+ );
# write file
unless ($exists) {
$self->msg('-- dumping ' . $file->stringify, 2);
my %ds = $object->get_columns;
+ if($set->{external}) {
+ foreach my $field (keys %{$set->{external}}) {
+ my $key = $ds{$field};
+ my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
+ my $args = $set->{external}->{$field}->{args};
+
+ $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
+ eval "use $class";
+
+ $ds{external}->{$field} =
+ encode_base64( $class
+ ->backup($key => $args));
+ }
+ }
+
# mess with dates if specified
if ($set->{datetime_relative}) {
my $formatter= $object->result_source->schema->storage->datetime_parser;
next unless $value
&& $col_info->{_inflate_info}
- && uc($col_info->{data_type}) eq 'DATETIME';
+ && (
+ (uc($col_info->{data_type}) eq 'DATETIME')
+ or (uc($col_info->{data_type}) eq 'DATE')
+ or (uc($col_info->{data_type}) eq 'TIME')
+ or (uc($col_info->{data_type}) eq 'TIMESTAMP')
+ or (uc($col_info->{data_type}) eq 'INTERVAL')
+ );
$ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
}
}
# don't bother looking at rels unless we are actually planning to dump at least one type
- return unless $set->{might_have}->{fetch}
- || $set->{belongs_to}->{fetch}
- || $set->{has_many}->{fetch}
+ my ($might_have, $belongs_to, $has_many) = map {
+ $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
+ } qw/might_have belongs_to has_many/;
+
+ return unless $might_have
+ || $belongs_to
+ || $has_many
|| $set->{fetch};
# dump rels of object
foreach my $name (sort $src->relationships) {
my $info = $src->relationship_info($name);
my $r_source = $src->related_source($name);
- # if belongs_to or might_have with might_have param set or has_many with has_many param set then
+ # if belongs_to or might_have with might_have param set or has_many with
+ # has_many param set then
if (
- ( $info->{attrs}{accessor} eq 'single' &&
- (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))
- ) ||
- $info->{attrs}{accessor} eq 'filter' ||
- ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))
+ ( $info->{attrs}{accessor} eq 'single' &&
+ (!$info->{attrs}{join_type} || $might_have)
+ )
+ || $info->{attrs}{accessor} eq 'filter'
+ ||
+ ($info->{attrs}{accessor} eq 'multi' && $has_many)
) {
my $related_rs = $object->related_resultset($name);
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
# these parts of the rule only apply to has_many rels
if ($rule && $info->{attrs}{accessor} eq 'multi') {
- $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond});
- $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all');
- $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by});
+ $related_rs = $related_rs->search(
+ $rule->{cond},
+ { join => $rule->{join} }
+ ) if ($rule->{cond});
+
+ $related_rs = $related_rs->search(
+ {},
+ { rows => $rule->{quantity} }
+ ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
+
+ $related_rs = $related_rs->search(
+ {},
+ { order_by => $rule->{order_by} }
+ ) if ($rule->{order_by});
+
}
- if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) {
- $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} });
+ if ($set->{has_many}{quantity} &&
+ $set->{has_many}{quantity} =~ /^\d+$/) {
+ $related_rs = $related_rs->search(
+ {},
+ { rows => $set->{has_many}->{quantity} }
+ );
}
+
my %c_params = %{$params};
# inherit date param
- my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs;
+ my %mock_set = map {
+ $_ => $set->{$_}
+ } grep { $set->{$_} } @inherited_attrs;
+
$c_params{set} = \%mock_set;
- # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch});
- $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch});
- # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch});
+ $c_params{set} = merge( $c_params{set}, $rule)
+ if $rule && $rule->{fetch};
+
$self->dump_rs($related_rs, \%c_params);
}
}
return unless $set && $set->{fetch};
foreach my $fetch (@{$set->{fetch}}) {
# inherit date param
- $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
+ $fetch->{$_} = $set->{$_} foreach
+ grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
my $related_rs = $object->related_resultset($fetch->{rel});
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
}
}
- die "relationship " . $fetch->{rel} . " does not exist for " . $src->source_name
+ die "relationship $fetch->{rel} does not exist for " . $src->source_name
unless ($related_rs);
if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
- # if value starts with \ assume it's meant to be passed as a scalar ref to dbic
- # ideally this would substitute deeply
+ # if value starts with \ assume it's meant to be passed as a scalar ref
+ # to dbic. ideally this would substitute deeply
$fetch->{cond} = { map {
$_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
: $fetch->{cond}->{$_}
} keys %{$fetch->{cond}} };
}
- $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} })
- if ($fetch->{cond});
- $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} })
- if ($fetch->{quantity} && $fetch->{quantity} ne 'all');
- $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} })
- if ($fetch->{order_by});
+ $related_rs = $related_rs->search(
+ $fetch->{cond},
+ { join => $fetch->{join} }
+ ) if $fetch->{cond};
+
+ $related_rs = $related_rs->search(
+ {},
+ { rows => $fetch->{quantity} }
+ ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
+ $related_rs = $related_rs->search(
+ {},
+ { order_by => $fetch->{order_by} }
+ ) if $fetch->{order_by};
$self->dump_rs($related_rs, { %{$params}, set => $fetch });
}
my $params = shift || {};
require DBI;
$self->msg("\ncreating schema");
- # die 'must pass version param to generate_schema_from_ddl' unless $params->{version};
my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
eval "require $schema_class";
# clear existing db
$self->msg("- clearing DB of existing tables");
- $pre_schema->storage->with_deferred_fk_checks(sub {
- foreach my $table (@tables) {
- eval {
- $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
- };
- }
+ $pre_schema->storage->txn_do(sub {
+ $pre_schema->storage->with_deferred_fk_checks(sub {
+ foreach my $table (@tables) {
+ eval {
+ $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
+ };
+ }
+ });
});
# import new ddl file to db
# load schema object from our new DB
$namespace_counter++;
- my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
+ my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
my $schema = $namespace2->connect(@{$connection_details});
return $schema;
return \@data;
}
+=head2 dump_config_sets
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set named in the C<configs> parameter.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ configs => [qw/one.json other.json/],
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_config_sets {
+ my ($self, $params) = @_;
+ my $available_config_sets = delete $params->{configs};
+ my $directory_template = delete $params->{directory_template} ||
+ DBIx::Class::Exception->throw("'directory_template is required parameter");
+
+ for my $set (@$available_config_sets) {
+ my $localparams = $params;
+ $localparams->{directory} = $directory_template->($self, $localparams, $set);
+ $localparams->{config} = $set;
+ $self->dump($localparams);
+ $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
+ }
+}
+
+=head2 dump_all_config_sets
+
+ my %local_params = %$params;
+ my $local_self = bless { %$self }, ref($self);
+ $local_params{directory} = $directory_template->($self, \%local_params, $set);
+ $local_params{config} = $set;
+ $self->dump(\%local_params);
+
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set in turn to the specified directory.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_all_config_sets {
+ my ($self, $params) = @_;
+ $self->dump_config_sets({
+ %$params,
+ configs=>[$self->available_config_sets],
+ });
+}
+
=head2 populate
=over 4
# optional, set to 1 to run ddl but not populate
no_populate => 0,
+ # optional, set to 1 to run each fixture through ->create rather than have
+ # each $rs populated using $rs->populate. Useful if you have overridden new() logic
+ # that effects the value of column(s).
+ use_create => 0,
+
# Dont try to clean the database, just populate over whats there. Requires
# schema option. Use this if you want to handle removing old data yourself
# no_deploy => 1
sub populate {
my $self = shift;
my ($params) = @_;
- unless (ref $params eq 'HASH') {
- return DBIx::Class::Exception->throw('first arg to populate must be hash ref');
- }
+ DBIx::Class::Exception->throw('first arg to populate must be hash ref')
+ unless ref $params eq 'HASH';
+
+ DBIx::Class::Exception->throw('directory param not specified')
+ unless $params->{directory};
- foreach my $param (qw/directory/) {
- unless ($params->{$param}) {
- return DBIx::Class::Exception->throw($param . ' param not specified');
- }
- }
my $fixture_dir = dir(delete $params->{directory});
- unless (-e $fixture_dir) {
- return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir);
- }
+ DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
+ unless -d $fixture_dir;
my $ddl_file;
my $dbh;
} elsif ($params->{schema} && $params->{no_deploy}) {
$schema = $params->{schema};
} else {
- return DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
+ DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
}
return 1 if $params->{no_populate};
$self->msg("\nimporting fixtures");
- my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
-
+ my $tmp_fixture_dir = dir(tmpdir());
my $version_file = file($fixture_dir, '_dumper_version');
- unless (-e $version_file) {
-# return DBIx::Class::Exception->throw('no version file found');
+ my $config_set_path = file($fixture_dir, '_config_set');
+ my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
+
+ my $v = Data::Visitor::Callback->new(
+ plain_value => sub {
+ my ($visitor, $data) = @_;
+ my $subs = {
+ ENV => sub {
+ my ( $self, $v ) = @_;
+ if (! defined($ENV{$v})) {
+ return "";
+ } else {
+ return $ENV{ $v };
+ }
+ },
+ ATTR => sub {
+ my ($self, $v) = @_;
+ if(my $attr = $self->config_attrs->{$v}) {
+ return $attr;
+ } else {
+ return "";
+ }
+ },
+ catfile => sub {
+ my ($self, @args) = @_;
+ catfile(@args);
+ },
+ catdir => sub {
+ my ($self, @args) = @_;
+ catdir(@args);
+ },
+ };
+
+ my $subsre = join( '|', keys %$subs );
+ $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
+
+ return $_;
+ }
+ );
+
+ $v->visit( $config_set );
+
+
+ my %sets_by_src;
+ if($config_set) {
+ %sets_by_src = map { delete($_->{class}) => $_ }
+ @{$config_set->{sets}}
}
+# DBIx::Class::Exception->throw('no version file found');
+# unless -e $version_file;
+
if (-e $tmp_fixture_dir) {
$self->msg("- deleting existing temp directory $tmp_fixture_dir");
$tmp_fixture_dir->rmtree;
}
$self->msg("- creating temp dir");
- dircopy(
- dir($fixture_dir, $schema->source($_)->from),
- dir($tmp_fixture_dir, $schema->source($_)->from)
- ) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources;
+ $tmp_fixture_dir->mkpath();
+ for ( map { $schema->source($_)->from } $schema->sources) {
+ my $from_dir = $fixture_dir->subdir($_);
+ next unless -e $from_dir;
+ dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
+ }
unless (-d $tmp_fixture_dir) {
- return DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
+ DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
}
my $fixup_visitor;
- my $formatter= $schema->storage->datetime_parser;
+ my $formatter = $schema->storage->datetime_parser;
unless ($@ || !$formatter) {
my %callbacks;
if ($params->{datetime_relative_to}) {
$fixup_visitor = new Data::Visitor::Callback(%callbacks);
}
- $schema->storage->with_deferred_fk_checks(sub {
- foreach my $source (sort $schema->sources) {
- $self->msg("- adding " . $source);
- my $rs = $schema->resultset($source);
- my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from));
- next unless (-e $source_dir);
- my @rows;
- while (my $file = $source_dir->next) {
- next unless ($file =~ /\.fix$/);
- next if $file->is_dir;
- my $contents = $file->slurp;
- my $HASH1;
- eval($contents);
- $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
- push(@rows, $HASH1);
+ $schema->storage->txn_do(sub {
+ $schema->storage->with_deferred_fk_checks(sub {
+ foreach my $source (sort $schema->sources) {
+ $self->msg("- adding " . $source);
+ my $rs = $schema->resultset($source);
+ my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
+ next unless (-e $source_dir);
+ my @rows;
+ while (my $file = $source_dir->next) {
+ next unless ($file =~ /\.fix$/);
+ next if $file->is_dir;
+ my $contents = $file->slurp;
+ my $HASH1;
+ eval($contents);
+ $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
+ if(my $external = delete $HASH1->{external}) {
+ my @fields = keys %{$sets_by_src{$source}->{external}};
+ foreach my $field(@fields) {
+ my $key = $HASH1->{$field};
+ my $content = decode_base64 ($external->{$field});
+ my $args = $sets_by_src{$source}->{external}->{$field}->{args};
+ my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
+ $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
+ eval "use $class";
+ $class->restore($key, $content, $args);
+ }
+ }
+ if ( $params->{use_create} ) {
+ $rs->create( $HASH1 );
+ } else {
+ push(@rows, $HASH1);
+ }
+ }
+ $rs->populate(\@rows) if scalar(@rows);
+
+ ## Now we need to do some db specific cleanup
+ ## this probably belongs in a more isolated space. Right now this is
+ ## to just handle postgresql SERIAL types that use Sequences
+
+ my $table = $rs->result_source->name;
+ for my $column(my @columns = $rs->result_source->columns) {
+ my $info = $rs->result_source->column_info($column);
+ if(my $sequence = $info->{sequence}) {
+ $self->msg("- updating sequence $sequence");
+ $rs->result_source->storage->dbh_do(sub {
+ my ($storage, $dbh, @cols) = @_;
+ $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
+ my $sth = $dbh->prepare($sql);
+ my $rv = $sth->execute or die $sth->errstr;
+ $self->msg("- $sql");
+ });
+ }
+ }
+
}
- $rs->populate(\@rows) if (scalar(@rows));
- }
+ });
});
-
$self->do_post_ddl( {
- schema=>$schema,
+ schema=>$schema,
post_ddl=>$params->{post_ddl}
} ) if $params->{post_ddl};
Drew Taylor <taylor.andrew.j@gmail.com>
+ Frank Switalski <fswitalski@gmail.com>
+
=head1 LICENSE
This library is free software under the same license as perl itself