use DBIx::Class 0.08100;
use DBIx::Class::Exception;
use Class::Accessor::Grouped;
-use Path::Class qw(dir file);
use Config::Any::JSON;
use Data::Dump::Streamer;
use Data::Visitor::Callback;
-use File::Path;
-use File::Copy::Recursive qw/dircopy/;
-use File::Copy qw/move/;
use Hash::Merge qw( merge );
use Data::Dumper;
use Class::C3::Componentised;
+use MIME::Base64;
+use IO::All;
+use File::Temp qw/tempdir/;
use base qw(Class::Accessor::Grouped);
our $namespace_counter = 0;
__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
- _inherited_attributes debug schema_class dumped_objects/);
+ _inherited_attributes debug schema_class dumped_objects config_attrs/);
-=head1 VERSION
+our $VERSION = '1.001_030';
-Version 1.001010
-
-=cut
-
-our $VERSION = '1.001010';
+$VERSION = eval $VERSION;
=head1 NAME
-DBIx::Class::Fixtures
+DBIx::Class::Fixtures - Dump data and repopulate a database using rules
=head1 SYNOPSIS
...
- my $fixtures = DBIx::Class::Fixtures->new({
- config_dir => '/home/me/app/fixture_configs'
+ my $fixtures = DBIx::Class::Fixtures->new({
+ config_dir => '/home/me/app/fixture_configs'
});
$fixtures->dump({
}
]
}
- ]
+ ]
}
This will fetch artists with primary keys 1 and 3, the producer with primary
{
"class": "Artist",
"ids": ["1", "3"]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
"fetch": [
- {
+ {
"rel": "artists",
"quantity": "2"
}
"rel": "cds",
"quantity": "all"
} ]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
- "fetch": [ {
+ "fetch": [ {
"rel": "artists",
"quantity": "2",
"fetch": [ {
=head2 has_many
Specifies whether to fetch has_many rels for this set. Must be a hash
-containing keys fetch and quantity.
+containing keys fetch and quantity.
Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
integer.
This value will be inherited by all fetches in this set. This is not true for
the has_many attribute.
+=head2 external
+
+In some cases your database information might be keys to values in some sort of
+external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
+to store blob information on the filesystem. In this case you may wish the ability
+to backup your external storage in the same way your database data. The L</external>
+attribute lets you specify a handler for this type of issue. For example:
+
+ {
+ "sets": [{
+ "class": "Photo",
+ "quantity": "all",
+ "external": {
+ "file": {
+ "class": "File",
+ "args": {"path":"__ATTR(photo_dir)__"}
+ }
+ }
+ }]
+ }
+
+This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
+where the path to a file is specified by the C<file> field of the C<Photo> source.
+We use the uninflated value of the field so you need to completely handle backup
+and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
+and you can create your own custom handlers by placing a '+' in the namespace:
+
+ "class": "+MyApp::Schema::SomeExternalStorage",
+
+Although if possible I'd love to get patches to add some of the other common
+types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
+
+See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
+
=head1 RULE ATTRIBUTES
=head2 cond
Same as with L</SET ATTRIBUTES>
+=head1 RULE SUBSTITUTIONS
+
+You can provide the following substitution patterns for your rule values. An
+example of this might be:
+
+ {
+ "sets": [{
+ "class": "Photo",
+ "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
+ }]
+ }
+
+=head2 ENV
+
+Provide a value from %ENV
+
+=head2 ATTR
+
+Provide a value from L</config_attrs>
+
+=head2 catfile
+
+Create the path to a file from a list
+
+=head2 catdir
+
+Create the path to a directory from a list
+
=head1 METHODS
=head2 new
=over
-=item config_dir:
+=item config_dir:
required. must contain a valid path to the directory in which your .json
configs reside.
-=item debug:
+=item debug:
determines whether to be verbose
-=item ignore_sql_errors:
+=item ignore_sql_errors:
ignore errors on import of DDL etc
+=item config_attrs
+
+A hash of information you can use to do replacements inside your configuration
+sets. For example, if your set looks like:
+
+ {
+ "sets": [ {
+ "class": "Artist",
+ "ids": ["1", "3"],
+ "fetch": [ {
+ "rel": "cds",
+ "quantity": "__ATTR(quantity)__",
+ } ]
+ } ]
+ }
+
+ my $fixtures = DBIx::Class::Fixtures->new( {
+ config_dir => '/home/me/app/fixture_configs'
+ config_attrs => {
+ quantity => 100,
+ },
+ });
+
+You may wish to do this if you want to let whoever runs the dumps have a bit
+more control
+
=back
my $fixtures = DBIx::Class::Fixtures->new( {
return DBIx::Class::Exception->throw('config_dir param not specified');
}
- my $config_dir = dir($params->{config_dir});
+ my $config_dir = io->dir($params->{config_dir});
unless (-e $params->{config_dir}) {
return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
}
my $self = {
- config_dir => $config_dir,
+ config_dir => $config_dir,
_inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
- debug => $params->{debug} || 0,
- ignore_sql_errors => $params->{ignore_sql_errors},
- dumped_objects => {},
- use_create => $params->{use_create} || 0
+ debug => $params->{debug} || 0,
+ ignore_sql_errors => $params->{ignore_sql_errors},
+ dumped_objects => {},
+ use_create => $params->{use_create} || 0,
+ use_find_or_create => $params->{use_find_or_create} || 0,
+ config_attrs => $params->{config_attrs} || {},
};
bless $self, $class;
return $self;
}
+=head2 available_config_sets
+
+Returns a list of all the config sets found in the L</config_dir>. These will
+be a list of the json based files containing dump rules.
+
+=cut
+
+my @config_sets;
+sub available_config_sets {
+ @config_sets = scalar(@config_sets) ? @config_sets : map {
+ $_->filename;
+ } grep {
+ -f "$_" && $_=~/json$/;
+ } shift->config_dir->all;
+}
+
=head2 dump
=over 4
/home/me/app/fixtures/artist/3.fix
/home/me/app/fixtures/producer/5.fix
-schema and directory are required attributes. also, one of config or all must be specified.
+schema and directory are required attributes. also, one of config or all must
+be specified.
+
+Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
+If this form is used your HashRef should conform to the structure rules defined
+for the JSON representations.
=cut
my $schema = $params->{schema};
my $config;
if ($params->{config}) {
- #read config
- my $config_file = $self->config_dir->file($params->{config});
- $config = $self->load_config_file($config_file);
+ $config = ref $params->{config} eq 'HASH' ?
+ $params->{config} :
+ do {
+ #read config
+ my $config_file = io->catfile($self->config_dir, $params->{config});
+ $self->load_config_file("$config_file");
+ };
} elsif ($params->{all}) {
my %excludes = map {$_=>1} @{$params->{excludes}||[]};
- $config = {
+ $config = {
might_have => { fetch => 0 },
has_many => { fetch => 0 },
belongs_to => { fetch => 0 },
DBIx::Class::Exception->throw('must pass config or set all');
}
- my $output_dir = dir($params->{directory});
- unless (-e $output_dir) {
+ my $output_dir = io->dir($params->{directory});
+ unless (-e "$output_dir") {
$output_dir->mkpath ||
DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
}
$self->msg("generating fixtures");
- my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
+ my $tmp_output_dir = io->dir(tempdir);
- if (-e $tmp_output_dir) {
+ if (-e "$tmp_output_dir") {
$self->msg("- clearing existing $tmp_output_dir");
$tmp_output_dir->rmtree;
}
$tmp_output_dir->mkpath;
# write version file (for the potential benefit of populate)
- $tmp_output_dir->file('_dumper_version')
- ->openw
- ->print($VERSION);
+ $tmp_output_dir->file('_dumper_version')->print($VERSION);
+
+ # write our current config set
+ $tmp_output_dir->file('_config_set')->print( Dumper $config );
$config->{rules} ||= {};
my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
if ($source->{cond} and ref $source->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $source->{cond} = {
- map {
- $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
- : $source->{cond}->{$_}
- } keys %{$source->{cond}}
+ $source->{cond} = {
+ map {
+ $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
+ : $source->{cond}->{$_}
+ } keys %{$source->{cond}}
};
}
- $rs = $rs->search($source->{cond}, { join => $source->{join} })
+ $rs = $rs->search($source->{cond}, { join => $source->{join} })
if $source->{cond};
$self->msg("- dumping $source->{class}");
my %source_options = ( set => { %{$config}, %{$source} } );
if ($source->{quantity}) {
- $rs = $rs->search({}, { order_by => $source->{order_by} })
+ $rs = $rs->search({}, { order_by => $source->{order_by} })
if $source->{order_by};
if ($source->{quantity} =~ /^\d+$/) {
}
# clear existing output dir
- foreach my $child ($output_dir->children) {
+ foreach my $child ($output_dir->all) {
if ($child->is_dir) {
- next if ($child eq $tmp_output_dir);
- if (grep { $_ =~ /\.fix/ } $child->children) {
+ next if ("$child" eq "$tmp_output_dir");
+ if (grep { $_ =~ /\.fix/ } $child->all) {
$child->rmtree;
}
} elsif ($child =~ /_dumper_version$/) {
- $child->remove;
+ $child->unlink;
}
}
$self->msg("- moving temp dir to $output_dir");
- move($_, dir($output_dir, $_->relative($_->parent)->stringify))
- for $tmp_output_dir->children;
+ $tmp_output_dir->copy("$output_dir");
- if (-e $output_dir) {
+ if (-e "$output_dir") {
$self->msg("- clearing tmp dir $tmp_output_dir");
# delete existing fixture set
- $tmp_output_dir->remove;
+ $tmp_output_dir->rmtree;
}
$self->msg("done");
sub load_config_file {
my ($self, $config_file) = @_;
DBIx::Class::Exception->throw("config does not exist at $config_file")
- unless -e $config_file;
+ unless -e "$config_file";
my $config = Config::Any::JSON->load($config_file);
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless ref $incs eq 'ARRAY';
-
+
foreach my $include_config (@$incs) {
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless (ref $include_config eq 'HASH') && $include_config->{file};
-
+
my $include_file = $self->config_dir->file($include_config->{file});
DBIx::Class::Exception->throw("config does not exist at $include_file")
- unless -e $include_file;
-
+ unless -e "$include_file";
+
my $include = Config::Any::JSON->load($include_file);
$self->msg($include);
$config = merge( $config, $include );
}
delete $config->{includes};
}
-
+
# validate config
return DBIx::Class::Exception->throw('config has no sets')
- unless $config && $config->{sets} &&
+ unless $config && $config->{sets} &&
ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
$config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
$self->dump_object($row, $params);
}
}
-
+
sub dump_object {
- my ($self, $object, $params) = @_;
+ my ($self, $object, $params) = @_;
my $set = $params->{set};
+
+ my $v = Data::Visitor::Callback->new(
+ plain_value => sub {
+ my ($visitor, $data) = @_;
+ my $subs = {
+ ENV => sub {
+ my ( $self, $v ) = @_;
+ if (! defined($ENV{$v})) {
+ return "";
+ } else {
+ return $ENV{ $v };
+ }
+ },
+ ATTR => sub {
+ my ($self, $v) = @_;
+ if(my $attr = $self->config_attrs->{$v}) {
+ return $attr;
+ } else {
+ return "";
+ }
+ },
+ catfile => sub {
+ my ($self, @args) = @_;
+ "".io->catfile(@args);
+ },
+ catdir => sub {
+ my ($self, @args) = @_;
+ "".io->catdir(@args);
+ },
+ };
+
+ my $subsre = join( '|', keys %$subs );
+ $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
+
+ return $_;
+ }
+ );
+
+ $v->visit( $set );
+
die 'no dir passed to dump_object' unless $params->{set_dir};
die 'no object passed to dump_object' unless $object;
my @inherited_attrs = @{$self->_inherited_attributes};
my @pk_vals = map {
- $object->get_column($_)
+ $object->get_column($_)
} $object->primary_columns;
my $key = join("\0", @pk_vals);
# write dir and gen filename
- my $source_dir = $params->{set_dir}->subdir(lc $src->from);
+ my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
$source_dir->mkpath(0, 0777);
- # strip dir separators from file name
- my $file = $source_dir->file(
- join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
+ # Convert characters not allowed on windows
+ my $file = io->catfile("$source_dir",
+ join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
);
-
# write file
unless ($exists) {
- $self->msg('-- dumping ' . $file->stringify, 2);
+ $self->msg('-- dumping ' . "$file", 2);
my %ds = $object->get_columns;
+ if($set->{external}) {
+ foreach my $field (keys %{$set->{external}}) {
+ my $key = $ds{$field};
+ my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
+ my $args = $set->{external}->{$field}->{args};
+
+ $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
+ eval "use $class";
+
+ $ds{external}->{$field} =
+ encode_base64( $class
+ ->backup($key => $args),'');
+ }
+ }
+
# mess with dates if specified
if ($set->{datetime_relative}) {
my $formatter= $object->result_source->schema->storage->datetime_parser;
next unless $value
&& $col_info->{_inflate_info}
- && uc($col_info->{data_type}) eq 'DATETIME';
+ && (
+ (uc($col_info->{data_type}) eq 'DATETIME')
+ or (uc($col_info->{data_type}) eq 'DATE')
+ or (uc($col_info->{data_type}) eq 'TIME')
+ or (uc($col_info->{data_type}) eq 'TIMESTAMP')
+ or (uc($col_info->{data_type}) eq 'INTERVAL')
+ );
$ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
}
# do the actual dumping
my $serialized = Dump(\%ds)->Out();
- $file->openw->print($serialized);
+
+ $file->print($serialized);
}
# don't bother looking at rels unless we are actually planning to dump at least one type
# if belongs_to or might_have with might_have param set or has_many with
# has_many param set then
if (
- ( $info->{attrs}{accessor} eq 'single' &&
- (!$info->{attrs}{join_type} || $might_have)
+ ( $info->{attrs}{accessor} eq 'single' &&
+ (!$info->{attrs}{join_type} || $might_have)
)
- || $info->{attrs}{accessor} eq 'filter'
- ||
+ || $info->{attrs}{accessor} eq 'filter'
+ ||
($info->{attrs}{accessor} eq 'multi' && $has_many)
) {
- my $related_rs = $object->related_resultset($name);
+ my $related_rs = $object->related_resultset($name);
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
# these parts of the rule only apply to has_many rels
- if ($rule && $info->{attrs}{accessor} eq 'multi') {
+ if ($rule && $info->{attrs}{accessor} eq 'multi') {
$related_rs = $related_rs->search(
- $rule->{cond},
+ $rule->{cond},
{ join => $rule->{join} }
) if ($rule->{cond});
) if ($rule->{quantity} && $rule->{quantity} ne 'all');
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $rule->{order_by} }
- ) if ($rule->{order_by});
+ ) if ($rule->{order_by});
}
- if ($set->{has_many}{quantity} &&
+ if ($set->{has_many}{quantity} &&
$set->{has_many}{quantity} =~ /^\d+$/) {
$related_rs = $related_rs->search(
- {},
+ {},
{ rows => $set->{has_many}->{quantity} }
);
}
my %c_params = %{$params};
# inherit date param
- my %mock_set = map {
- $_ => $set->{$_}
+ my %mock_set = map {
+ $_ => $set->{$_}
} grep { $set->{$_} } @inherited_attrs;
$c_params{set} = \%mock_set;
if $rule && $rule->{fetch};
$self->dump_rs($related_rs, \%c_params);
- }
+ }
}
}
-
+
return unless $set && $set->{fetch};
foreach my $fetch (@{$set->{fetch}}) {
# inherit date param
- $fetch->{$_} = $set->{$_} foreach
+ $fetch->{$_} = $set->{$_} foreach
grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
my $related_rs = $object->related_resultset($fetch->{rel});
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
} elsif ($rule->{fetch}) {
$fetch = merge( $fetch, { fetch => $rule->{fetch} } );
}
- }
+ }
- die "relationship $fetch->{rel} does not exist for " . $src->source_name
+ die "relationship $fetch->{rel} does not exist for " . $src->source_name
unless ($related_rs);
if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $fetch->{cond} = { map {
- $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
- : $fetch->{cond}->{$_}
+ $fetch->{cond} = { map {
+ $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
+ : $fetch->{cond}->{$_}
} keys %{$fetch->{cond}} };
}
$related_rs = $related_rs->search(
- $fetch->{cond},
+ $fetch->{cond},
{ join => $fetch->{join} }
) if $fetch->{cond};
{ rows => $fetch->{quantity} }
) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $fetch->{order_by} }
) if $fetch->{order_by};
unless( $pre_schema ) {
return DBIx::Class::Exception->throw('connection details not valid');
}
- my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
+ my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources;
$self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
my $dbh = $pre_schema->storage->dbh;
$pre_schema->storage->txn_do(sub {
$pre_schema->storage->with_deferred_fk_checks(sub {
foreach my $table (@tables) {
- eval {
- $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
+ eval {
+ $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
};
}
});
return \@data;
}
+=head2 dump_config_sets
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set named in the C<configs> parameter.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ configs => [qw/one.json other.json/],
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_config_sets {
+ my ($self, $params) = @_;
+ my $available_config_sets = delete $params->{configs};
+ my $directory_template = delete $params->{directory_template} ||
+ DBIx::Class::Exception->throw("'directory_template is required parameter");
+
+ for my $set (@$available_config_sets) {
+ my $localparams = $params;
+ $localparams->{directory} = $directory_template->($self, $localparams, $set);
+ $localparams->{config} = $set;
+ $self->dump($localparams);
+ $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
+ }
+}
+
+=head2 dump_all_config_sets
+
+ my %local_params = %$params;
+ my $local_self = bless { %$self }, ref($self);
+ $local_params{directory} = $directory_template->($self, \%local_params, $set);
+ $local_params{config} = $set;
+ $self->dump(\%local_params);
+
+
+Works just like L</dump> but instead of specifying a single json config set
+located in L</config_dir> we dump each set in turn to the specified directory.
+
+The parameters are the same as for L</dump> except instead of a C<directory>
+parameter we have a C<directory_template> which is a coderef expected to return
+a scalar that is a root directory where we will do the actual dumping. This
+coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
+example:
+
+ $fixture->dump_all_config_sets({
+ schema => $schema,
+ directory_template => sub {
+ my ($fixture, $params, $set) = @_;
+ return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ },
+ });
+
+=cut
+
+sub dump_all_config_sets {
+ my ($self, $params) = @_;
+ $self->dump_config_sets({
+ %$params,
+ configs=>[$self->available_config_sets],
+ });
+}
+
=head2 populate
=over 4
$fixtures->populate( {
# directory to look for fixtures in, as specified to dump
- directory => '/home/me/app/fixtures',
+ directory => '/home/me/app/fixtures',
# DDL to deploy
- ddl => '/home/me/app/sql/ddl.sql',
+ ddl => '/home/me/app/sql/ddl.sql',
# database to clear, deploy and then populate
- connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
+ connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
# DDL to deploy after populating records, ie. FK constraints
post_ddl => '/home/me/app/sql/post_ddl.sql',
# use CASCADE option when dropping tables
cascade => 1,
- # optional, set to 1 to run ddl but not populate
+ # optional, set to 1 to run ddl but not populate
no_populate => 0,
- # optional, set to 1 to run each fixture through ->create rather than have
+ # optional, set to 1 to run each fixture through ->create rather than have
# each $rs populated using $rs->populate. Useful if you have overridden new() logic
- # that effects the value of column(s).
- use_create => 0,
+ # that effects the value of column(s).
+ use_create => 0,
+
+ # optional, same as use_create except with find_or_create.
+ # Useful if you are populating a persistent data store.
+ use_find_or_create => 0,
# Dont try to clean the database, just populate over whats there. Requires
# schema option. Use this if you want to handle removing old data yourself
attribute which will make the drop table functionality cascade, ie 'DROP TABLE
$table CASCADE'.
-C<directory> is a required attribute.
+C<directory> is a required attribute.
If you wish for DBIx::Class::Fixtures to clear the database for you pass in
C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
DBIx::Class::Exception->throw('directory param not specified')
unless $params->{directory};
- my $fixture_dir = dir(delete $params->{directory});
+ my $fixture_dir = io->dir(delete $params->{directory});
DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
- unless -d $fixture_dir;
+ unless -d "$fixture_dir";
my $ddl_file;
my $dbh;
my $schema;
if ($params->{ddl} && $params->{connection_details}) {
- $ddl_file = file(delete $params->{ddl});
- unless (-e $ddl_file) {
+ $ddl_file = io->file(delete $params->{ddl});
+ unless (-e "$ddl_file") {
return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
}
unless (ref $params->{connection_details} eq 'ARRAY') {
return DBIx::Class::Exception->throw('connection details must be an arrayref');
}
- $schema = $self->_generate_schema({
- ddl => $ddl_file,
+ $schema = $self->_generate_schema({
+ ddl => "$ddl_file",
connection_details => delete $params->{connection_details},
%{$params}
});
}
- return 1 if $params->{no_populate};
-
+ return 1 if $params->{no_populate};
+
$self->msg("\nimporting fixtures");
- my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
- my $version_file = file($fixture_dir, '_dumper_version');
-# DBIx::Class::Exception->throw('no version file found');
-# unless -e $version_file;
+ my $tmp_fixture_dir = io->dir(tempdir());
+ my $config_set_path = io->file($fixture_dir, '_config_set');
+ my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
+
+ my $v = Data::Visitor::Callback->new(
+ plain_value => sub {
+ my ($visitor, $data) = @_;
+ my $subs = {
+ ENV => sub {
+ my ( $self, $v ) = @_;
+ if (! defined($ENV{$v})) {
+ return "";
+ } else {
+ return $ENV{ $v };
+ }
+ },
+ ATTR => sub {
+ my ($self, $v) = @_;
+ if(my $attr = $self->config_attrs->{$v}) {
+ return $attr;
+ } else {
+ return "";
+ }
+ },
+ catfile => sub {
+ my ($self, @args) = @_;
+ io->catfile(@args);
+ },
+ catdir => sub {
+ my ($self, @args) = @_;
+ io->catdir(@args);
+ },
+ };
+
+ my $subsre = join( '|', keys %$subs );
+ $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
+
+ return $_;
+ }
+ );
+
+ $v->visit( $config_set );
- if (-e $tmp_fixture_dir) {
+
+ my %sets_by_src;
+ if($config_set) {
+ %sets_by_src = map { delete($_->{class}) => $_ }
+ @{$config_set->{sets}}
+ }
+
+ if (-e "$tmp_fixture_dir") {
$self->msg("- deleting existing temp directory $tmp_fixture_dir");
$tmp_fixture_dir->rmtree;
}
$self->msg("- creating temp dir");
$tmp_fixture_dir->mkpath();
- for ( map { $schema->source($_)->from } $schema->sources) {
- my $from_dir = $fixture_dir->subdir($_);
- next unless -e $from_dir;
- dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
+ for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
+ my $from_dir = io->catdir($fixture_dir, $_);
+ next unless -e "$from_dir";
+ $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
}
- unless (-d $tmp_fixture_dir) {
+ unless (-d "$tmp_fixture_dir") {
DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
}
$formatter->format_datetime(DateTime->today->add_duration($_))
};
}
- $callbacks{object} ||= "visit_ref";
+ $callbacks{object} ||= "visit_ref";
$fixup_visitor = new Data::Visitor::Callback(%callbacks);
}
+ my @sorted_source_names = $self->_get_sorted_sources( $schema );
$schema->storage->txn_do(sub {
$schema->storage->with_deferred_fk_checks(sub {
- foreach my $source (sort $schema->sources) {
+ foreach my $source (@sorted_source_names) {
$self->msg("- adding " . $source);
my $rs = $schema->resultset($source);
- my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
- next unless (-e $source_dir);
+ my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
+ next unless (-e "$source_dir");
my @rows;
while (my $file = $source_dir->next) {
next unless ($file =~ /\.fix$/);
my $HASH1;
eval($contents);
$HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
+ if(my $external = delete $HASH1->{external}) {
+ my @fields = keys %{$sets_by_src{$source}->{external}};
+ foreach my $field(@fields) {
+ my $key = $HASH1->{$field};
+ my $content = decode_base64 ($external->{$field});
+ my $args = $sets_by_src{$source}->{external}->{$field}->{args};
+ my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
+ $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
+ eval "use $class";
+ $class->restore($key, $content, $args);
+ }
+ }
if ( $params->{use_create} ) {
$rs->create( $HASH1 );
+ } elsif( $params->{use_find_or_create} ) {
+ $rs->find_or_create( $HASH1 );
} else {
push(@rows, $HASH1);
}
}
$rs->populate(\@rows) if scalar(@rows);
+
+ ## Now we need to do some db specific cleanup
+ ## this probably belongs in a more isolated space. Right now this is
+ ## to just handle postgresql SERIAL types that use Sequences
+
+ my $table = $rs->result_source->name;
+ for my $column(my @columns = $rs->result_source->columns) {
+ my $info = $rs->result_source->column_info($column);
+ if(my $sequence = $info->{sequence}) {
+ $self->msg("- updating sequence $sequence");
+ $rs->result_source->storage->dbh_do(sub {
+ my ($storage, $dbh, @cols) = @_;
+ $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
+ my $sth = $dbh->prepare($sql);
+ my $rv = $sth->execute or die $sth->errstr;
+ $self->msg("- $sql");
+ });
+ }
+ }
+
}
});
});
$self->do_post_ddl( {
- schema=>$schema,
+ schema=>$schema,
post_ddl=>$params->{post_ddl}
} ) if $params->{post_ddl};
return 1;
}
+# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
+sub _get_sorted_sources {
+ my ( $self, $dbicschema ) = @_;
+
+
+ my %table_monikers = map { $_ => 1 } $dbicschema->sources;
+
+ my %tables;
+ foreach my $moniker (sort keys %table_monikers) {
+ my $source = $dbicschema->source($moniker);
+
+ my $table_name = $source->name;
+ my @primary = $source->primary_columns;
+ my @rels = $source->relationships();
+
+ my %created_FK_rels;
+ foreach my $rel (sort @rels) {
+ my $rel_info = $source->relationship_info($rel);
+
+ # Ignore any rel cond that isn't a straight hash
+ next unless ref $rel_info->{cond} eq 'HASH';
+
+ my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
+
+ # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
+ my $fk_constraint;
+ if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
+ $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
+ } elsif ( $rel_info->{attrs}{accessor}
+ && $rel_info->{attrs}{accessor} eq 'multi' ) {
+ $fk_constraint = 0;
+ } else {
+ $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
+ }
+
+ # Dont add a relation if its not constraining
+ next unless $fk_constraint;
+
+ my $rel_table = $source->related_source($rel)->source_name;
+ # Make sure we don't create the same relation twice
+ my $key_test = join("\x00", sort @keys);
+ next if $created_FK_rels{$rel_table}->{$key_test};
+
+ if (scalar(@keys)) {
+ $created_FK_rels{$rel_table}->{$key_test} = 1;
+
+ # calculate dependencies: do not consider deferrable constraints and
+ # self-references for dependency calculations
+ if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
+ $tables{$moniker}{$rel_table}++;
+ }
+ }
+ }
+ $tables{$moniker} = {} unless exists $tables{$moniker};
+ }
+
+ # resolve entire dep tree
+ my $dependencies = {
+ map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
+ };
+
+ # return the sorted result
+ return sort {
+ keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
+ ||
+ $a cmp $b
+ } (keys %tables);
+}
+
+sub _resolve_deps {
+ my ( $question, $answers, $seen ) = @_;
+ my $ret = {};
+ $seen ||= {};
+
+ my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
+ $seen{$question} = 1;
+
+ for my $dep (keys %{ $answers->{$question} }) {
+ return {} if $seen->{$dep};
+ my $subdeps = _resolve_deps( $dep, $answers, \%seen );
+ $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
+ ++$ret->{$dep};
+ }
+ return $ret;
+}
+
sub do_post_ddl {
my ($self, $params) = @_;
}
}
+# Helper method for ensuring that the name used for a given source
+# is always the same (This is used to name the fixture directories
+# for example)
+
+sub _name_for_source {
+ my ($self, $source) = @_;
+
+ return ref $source->name ? $source->source_name : $source->name;
+}
+
=head1 AUTHOR
Luke Saunders <luke@shadowcatsystems.co.uk>
Frank Switalski <fswitalski@gmail.com>
+ Chris Akins <chris.hexx@gmail.com>
+
=head1 LICENSE
This library is free software under the same license as perl itself