use DBIx::Class 0.08100;
use DBIx::Class::Exception;
use Class::Accessor::Grouped;
-use Path::Class qw(dir file tempdir);
-use File::Spec::Functions 'catfile', 'catdir';
use Config::Any::JSON;
use Data::Dump::Streamer;
use Data::Visitor::Callback;
-use File::Path;
-use File::Copy::Recursive qw/dircopy/;
-use File::Copy qw/move/;
use Hash::Merge qw( merge );
use Data::Dumper;
use Class::C3::Componentised;
use MIME::Base64;
+use IO::All;
+use File::Temp qw/tempdir/;
use base qw(Class::Accessor::Grouped);
__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
_inherited_attributes debug schema_class dumped_objects config_attrs/);
-our $VERSION = '1.001026';
+our $VERSION = '1.001_030';
$VERSION = eval $VERSION;
...
- my $fixtures = DBIx::Class::Fixtures->new({
- config_dir => '/home/me/app/fixture_configs'
+ my $fixtures = DBIx::Class::Fixtures->new({
+ config_dir => '/home/me/app/fixture_configs'
});
$fixtures->dump({
}
]
}
- ]
+ ]
}
This will fetch artists with primary keys 1 and 3, the producer with primary
{
"class": "Artist",
"ids": ["1", "3"]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
"fetch": [
- {
+ {
"rel": "artists",
"quantity": "2"
}
"rel": "cds",
"quantity": "all"
} ]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
- "fetch": [ {
+ "fetch": [ {
"rel": "artists",
"quantity": "2",
"fetch": [ {
=head2 has_many
Specifies whether to fetch has_many rels for this set. Must be a hash
-containing keys fetch and quantity.
+containing keys fetch and quantity.
Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
integer.
=over
-=item config_dir:
+=item config_dir:
required. must contain a valid path to the directory in which your .json
configs reside.
-=item debug:
+=item debug:
determines whether to be verbose
-=item ignore_sql_errors:
+=item ignore_sql_errors:
ignore errors on import of DDL etc
return DBIx::Class::Exception->throw('config_dir param not specified');
}
- my $config_dir = dir($params->{config_dir});
+ my $config_dir = io->dir($params->{config_dir});
unless (-e $params->{config_dir}) {
return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
}
my $self = {
- config_dir => $config_dir,
+ config_dir => $config_dir,
_inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
- debug => $params->{debug} || 0,
- ignore_sql_errors => $params->{ignore_sql_errors},
- dumped_objects => {},
- use_create => $params->{use_create} || 0,
- use_find_or_create => $params->{use_find_or_create} || 0,
- config_attrs => $params->{config_attrs} || {},
+ debug => $params->{debug} || 0,
+ ignore_sql_errors => $params->{ignore_sql_errors},
+ dumped_objects => {},
+ use_create => $params->{use_create} || 0,
+ use_find_or_create => $params->{use_find_or_create} || 0,
+ config_attrs => $params->{config_attrs} || {},
};
bless $self, $class;
my @config_sets;
sub available_config_sets {
@config_sets = scalar(@config_sets) ? @config_sets : map {
- $_->basename;
- } grep {
- -f $_ && $_=~/json$/;
- } dir((shift)->config_dir)->children;
+ $_->filename;
+ } grep {
+ -f "$_" && $_=~/json$/;
+ } shift->config_dir->all;
}
=head2 dump
my $schema = $params->{schema};
my $config;
if ($params->{config}) {
- $config = ref $params->{config} eq 'HASH' ?
- $params->{config} :
+ $config = ref $params->{config} eq 'HASH' ?
+ $params->{config} :
do {
#read config
- my $config_file = $self->config_dir->file($params->{config});
- $self->load_config_file($config_file);
+ my $config_file = io->catfile($self->config_dir, $params->{config});
+ $self->load_config_file("$config_file");
};
} elsif ($params->{all}) {
my %excludes = map {$_=>1} @{$params->{excludes}||[]};
- $config = {
+ $config = {
might_have => { fetch => 0 },
has_many => { fetch => 0 },
belongs_to => { fetch => 0 },
DBIx::Class::Exception->throw('must pass config or set all');
}
- my $output_dir = dir($params->{directory});
- unless (-e $output_dir) {
+ my $output_dir = io->dir($params->{directory});
+ unless (-e "$output_dir") {
$output_dir->mkpath ||
DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
}
$self->msg("generating fixtures");
- my $tmp_output_dir = tempdir();
+ my $tmp_output_dir = io->dir(tempdir);
- if (-e $tmp_output_dir) {
+ if (-e "$tmp_output_dir") {
$self->msg("- clearing existing $tmp_output_dir");
$tmp_output_dir->rmtree;
}
$tmp_output_dir->mkpath;
# write version file (for the potential benefit of populate)
- $tmp_output_dir->file('_dumper_version')
- ->openw
- ->print($VERSION);
+ $tmp_output_dir->file('_dumper_version')->print($VERSION);
# write our current config set
- $tmp_output_dir->file('_config_set')
- ->openw
- ->print( Dumper $config );
+ $tmp_output_dir->file('_config_set')->print( Dumper $config );
$config->{rules} ||= {};
my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
if ($source->{cond} and ref $source->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $source->{cond} = {
- map {
- $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
- : $source->{cond}->{$_}
- } keys %{$source->{cond}}
+ $source->{cond} = {
+ map {
+ $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
+ : $source->{cond}->{$_}
+ } keys %{$source->{cond}}
};
}
- $rs = $rs->search($source->{cond}, { join => $source->{join} })
+ $rs = $rs->search($source->{cond}, { join => $source->{join} })
if $source->{cond};
$self->msg("- dumping $source->{class}");
my %source_options = ( set => { %{$config}, %{$source} } );
if ($source->{quantity}) {
- $rs = $rs->search({}, { order_by => $source->{order_by} })
+ $rs = $rs->search({}, { order_by => $source->{order_by} })
if $source->{order_by};
if ($source->{quantity} =~ /^\d+$/) {
}
# clear existing output dir
- foreach my $child ($output_dir->children) {
+ foreach my $child ($output_dir->all) {
if ($child->is_dir) {
- next if ($child eq $tmp_output_dir);
- if (grep { $_ =~ /\.fix/ } $child->children) {
+ next if ("$child" eq "$tmp_output_dir");
+ if (grep { $_ =~ /\.fix/ } $child->all) {
$child->rmtree;
}
} elsif ($child =~ /_dumper_version$/) {
- $child->remove;
+ $child->unlink;
}
}
$self->msg("- moving temp dir to $output_dir");
- dircopy($tmp_output_dir, $output_dir);
+ $tmp_output_dir->copy("$output_dir");
- if (-e $output_dir) {
+ if (-e "$output_dir") {
$self->msg("- clearing tmp dir $tmp_output_dir");
# delete existing fixture set
- $tmp_output_dir->remove;
+ $tmp_output_dir->rmtree;
}
$self->msg("done");
sub load_config_file {
my ($self, $config_file) = @_;
DBIx::Class::Exception->throw("config does not exist at $config_file")
- unless -e $config_file;
+ unless -e "$config_file";
my $config = Config::Any::JSON->load($config_file);
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless ref $incs eq 'ARRAY';
-
+
foreach my $include_config (@$incs) {
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless (ref $include_config eq 'HASH') && $include_config->{file};
-
+
my $include_file = $self->config_dir->file($include_config->{file});
DBIx::Class::Exception->throw("config does not exist at $include_file")
- unless -e $include_file;
-
+ unless -e "$include_file";
+
my $include = Config::Any::JSON->load($include_file);
$self->msg($include);
$config = merge( $config, $include );
}
delete $config->{includes};
}
-
+
# validate config
return DBIx::Class::Exception->throw('config has no sets')
- unless $config && $config->{sets} &&
+ unless $config && $config->{sets} &&
ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
$config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
$self->dump_object($row, $params);
}
}
-
+
sub dump_object {
- my ($self, $object, $params) = @_;
+ my ($self, $object, $params) = @_;
my $set = $params->{set};
my $v = Data::Visitor::Callback->new(
},
catfile => sub {
my ($self, @args) = @_;
- catfile(@args);
+ "".io->catfile(@args);
},
catdir => sub {
my ($self, @args) = @_;
- catdir(@args);
+ "".io->catdir(@args);
},
};
- my $subsre = join( '|', keys %$subs );
+ my $subsre = join( '|', keys %$subs );
$_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
return $_;
}
);
-
+
$v->visit( $set );
die 'no dir passed to dump_object' unless $params->{set_dir};
my @inherited_attrs = @{$self->_inherited_attributes};
my @pk_vals = map {
- $object->get_column($_)
+ $object->get_column($_)
} $object->primary_columns;
my $key = join("\0", @pk_vals);
# write dir and gen filename
- my $source_dir = $params->{set_dir}->subdir($self->_name_for_source($src));
+ my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
$source_dir->mkpath(0, 0777);
- # strip dir separators from file name
- my $file = $source_dir->file(
- join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
+ # Convert characters not allowed on windows
+ my $file = io->catfile("$source_dir",
+ join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
);
# write file
unless ($exists) {
- $self->msg('-- dumping ' . $file->stringify, 2);
+ $self->msg('-- dumping ' . "$file", 2);
my %ds = $object->get_columns;
if($set->{external}) {
# do the actual dumping
my $serialized = Dump(\%ds)->Out();
- $file->openw->print($serialized);
+
+ $file->print($serialized);
}
# don't bother looking at rels unless we are actually planning to dump at least one type
# if belongs_to or might_have with might_have param set or has_many with
# has_many param set then
if (
- ( $info->{attrs}{accessor} eq 'single' &&
- (!$info->{attrs}{join_type} || $might_have)
+ ( $info->{attrs}{accessor} eq 'single' &&
+ (!$info->{attrs}{join_type} || $might_have)
)
- || $info->{attrs}{accessor} eq 'filter'
- ||
+ || $info->{attrs}{accessor} eq 'filter'
+ ||
($info->{attrs}{accessor} eq 'multi' && $has_many)
) {
- my $related_rs = $object->related_resultset($name);
+ my $related_rs = $object->related_resultset($name);
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
# these parts of the rule only apply to has_many rels
- if ($rule && $info->{attrs}{accessor} eq 'multi') {
+ if ($rule && $info->{attrs}{accessor} eq 'multi') {
$related_rs = $related_rs->search(
- $rule->{cond},
+ $rule->{cond},
{ join => $rule->{join} }
) if ($rule->{cond});
) if ($rule->{quantity} && $rule->{quantity} ne 'all');
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $rule->{order_by} }
- ) if ($rule->{order_by});
+ ) if ($rule->{order_by});
}
- if ($set->{has_many}{quantity} &&
+ if ($set->{has_many}{quantity} &&
$set->{has_many}{quantity} =~ /^\d+$/) {
$related_rs = $related_rs->search(
- {},
+ {},
{ rows => $set->{has_many}->{quantity} }
);
}
my %c_params = %{$params};
# inherit date param
- my %mock_set = map {
- $_ => $set->{$_}
+ my %mock_set = map {
+ $_ => $set->{$_}
} grep { $set->{$_} } @inherited_attrs;
$c_params{set} = \%mock_set;
if $rule && $rule->{fetch};
$self->dump_rs($related_rs, \%c_params);
- }
+ }
}
}
-
+
return unless $set && $set->{fetch};
foreach my $fetch (@{$set->{fetch}}) {
# inherit date param
- $fetch->{$_} = $set->{$_} foreach
+ $fetch->{$_} = $set->{$_} foreach
grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
my $related_rs = $object->related_resultset($fetch->{rel});
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
} elsif ($rule->{fetch}) {
$fetch = merge( $fetch, { fetch => $rule->{fetch} } );
}
- }
+ }
- die "relationship $fetch->{rel} does not exist for " . $src->source_name
+ die "relationship $fetch->{rel} does not exist for " . $src->source_name
unless ($related_rs);
if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $fetch->{cond} = { map {
- $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
- : $fetch->{cond}->{$_}
+ $fetch->{cond} = { map {
+ $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
+ : $fetch->{cond}->{$_}
} keys %{$fetch->{cond}} };
}
$related_rs = $related_rs->search(
- $fetch->{cond},
+ $fetch->{cond},
{ join => $fetch->{join} }
) if $fetch->{cond};
{ rows => $fetch->{quantity} }
) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $fetch->{order_by} }
) if $fetch->{order_by};
$pre_schema->storage->txn_do(sub {
$pre_schema->storage->with_deferred_fk_checks(sub {
foreach my $table (@tables) {
- eval {
- $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
+ eval {
+ $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
};
}
});
configs => [qw/one.json other.json/],
directory_template => sub {
my ($fixture, $params, $set) = @_;
- return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
},
});
schema => $schema,
directory_template => sub {
my ($fixture, $params, $set) = @_;
- return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
+ return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
},
});
$fixtures->populate( {
# directory to look for fixtures in, as specified to dump
- directory => '/home/me/app/fixtures',
+ directory => '/home/me/app/fixtures',
# DDL to deploy
- ddl => '/home/me/app/sql/ddl.sql',
+ ddl => '/home/me/app/sql/ddl.sql',
# database to clear, deploy and then populate
- connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
+ connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
# DDL to deploy after populating records, ie. FK constraints
post_ddl => '/home/me/app/sql/post_ddl.sql',
# use CASCADE option when dropping tables
cascade => 1,
- # optional, set to 1 to run ddl but not populate
+ # optional, set to 1 to run ddl but not populate
no_populate => 0,
# optional, set to 1 to run each fixture through ->create rather than have
attribute which will make the drop table functionality cascade, ie 'DROP TABLE
$table CASCADE'.
-C<directory> is a required attribute.
+C<directory> is a required attribute.
If you wish for DBIx::Class::Fixtures to clear the database for you pass in
C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
DBIx::Class::Exception->throw('directory param not specified')
unless $params->{directory};
- my $fixture_dir = dir(delete $params->{directory});
+ my $fixture_dir = io->dir(delete $params->{directory});
DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
- unless -d $fixture_dir;
+ unless -d "$fixture_dir";
my $ddl_file;
my $dbh;
my $schema;
if ($params->{ddl} && $params->{connection_details}) {
- $ddl_file = file(delete $params->{ddl});
- unless (-e $ddl_file) {
+ $ddl_file = io->file(delete $params->{ddl});
+ unless (-e "$ddl_file") {
return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
}
unless (ref $params->{connection_details} eq 'ARRAY') {
return DBIx::Class::Exception->throw('connection details must be an arrayref');
}
- $schema = $self->_generate_schema({
- ddl => $ddl_file,
+ $schema = $self->_generate_schema({
+ ddl => "$ddl_file",
connection_details => delete $params->{connection_details},
%{$params}
});
}
- return 1 if $params->{no_populate};
-
+ return 1 if $params->{no_populate};
+
$self->msg("\nimporting fixtures");
- my $tmp_fixture_dir = tempdir();
- my $version_file = file($fixture_dir, '_dumper_version');
- my $config_set_path = file($fixture_dir, '_config_set');
- my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
+ my $tmp_fixture_dir = io->dir(tempdir());
+ my $config_set_path = io->file($fixture_dir, '_config_set');
+ my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
my $v = Data::Visitor::Callback->new(
plain_value => sub {
},
catfile => sub {
my ($self, @args) = @_;
- catfile(@args);
+ io->catfile(@args);
},
catdir => sub {
my ($self, @args) = @_;
- catdir(@args);
+ io->catdir(@args);
},
};
- my $subsre = join( '|', keys %$subs );
+ my $subsre = join( '|', keys %$subs );
$_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
return $_;
}
);
-
+
$v->visit( $config_set );
@{$config_set->{sets}}
}
-# DBIx::Class::Exception->throw('no version file found');
-# unless -e $version_file;
-
- if (-e $tmp_fixture_dir) {
+ if (-e "$tmp_fixture_dir") {
$self->msg("- deleting existing temp directory $tmp_fixture_dir");
$tmp_fixture_dir->rmtree;
}
$self->msg("- creating temp dir");
$tmp_fixture_dir->mkpath();
for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
- my $from_dir = $fixture_dir->subdir($_);
- next unless -e $from_dir;
- dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
+ my $from_dir = io->catdir($fixture_dir, $_);
+ next unless -e "$from_dir";
+ $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
}
- unless (-d $tmp_fixture_dir) {
+ unless (-d "$tmp_fixture_dir") {
DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
}
$formatter->format_datetime(DateTime->today->add_duration($_))
};
}
- $callbacks{object} ||= "visit_ref";
+ $callbacks{object} ||= "visit_ref";
$fixup_visitor = new Data::Visitor::Callback(%callbacks);
}
+ my @sorted_source_names = $self->_get_sorted_sources( $schema );
$schema->storage->txn_do(sub {
$schema->storage->with_deferred_fk_checks(sub {
- foreach my $source (sort $schema->sources) {
+ foreach my $source (@sorted_source_names) {
$self->msg("- adding " . $source);
my $rs = $schema->resultset($source);
- my $source_dir = $tmp_fixture_dir->subdir( $self->_name_for_source($rs->result_source) );
- next unless (-e $source_dir);
+ my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
+ next unless (-e "$source_dir");
my @rows;
while (my $file = $source_dir->next) {
next unless ($file =~ /\.fix$/);
return 1;
}
+# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
+sub _get_sorted_sources {
+ my ( $self, $dbicschema ) = @_;
+
+
+ my %table_monikers = map { $_ => 1 } $dbicschema->sources;
+
+ my %tables;
+ foreach my $moniker (sort keys %table_monikers) {
+ my $source = $dbicschema->source($moniker);
+
+ my $table_name = $source->name;
+ my @primary = $source->primary_columns;
+ my @rels = $source->relationships();
+
+ my %created_FK_rels;
+ foreach my $rel (sort @rels) {
+ my $rel_info = $source->relationship_info($rel);
+
+ # Ignore any rel cond that isn't a straight hash
+ next unless ref $rel_info->{cond} eq 'HASH';
+
+ my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
+
+ # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
+ my $fk_constraint;
+ if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
+ $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
+ } elsif ( $rel_info->{attrs}{accessor}
+ && $rel_info->{attrs}{accessor} eq 'multi' ) {
+ $fk_constraint = 0;
+ } else {
+ $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
+ }
+
+ # Dont add a relation if its not constraining
+ next unless $fk_constraint;
+
+ my $rel_table = $source->related_source($rel)->source_name;
+ # Make sure we don't create the same relation twice
+ my $key_test = join("\x00", sort @keys);
+ next if $created_FK_rels{$rel_table}->{$key_test};
+
+ if (scalar(@keys)) {
+ $created_FK_rels{$rel_table}->{$key_test} = 1;
+
+ # calculate dependencies: do not consider deferrable constraints and
+ # self-references for dependency calculations
+ if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
+ $tables{$moniker}{$rel_table}++;
+ }
+ }
+ }
+ $tables{$moniker} = {} unless exists $tables{$moniker};
+ }
+
+ # resolve entire dep tree
+ my $dependencies = {
+ map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
+ };
+
+ # return the sorted result
+ return sort {
+ keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
+ ||
+ $a cmp $b
+ } (keys %tables);
+}
+
+sub _resolve_deps {
+ my ( $question, $answers, $seen ) = @_;
+ my $ret = {};
+ $seen ||= {};
+
+ my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
+ $seen{$question} = 1;
+
+ for my $dep (keys %{ $answers->{$question} }) {
+ return {} if $seen->{$dep};
+ my $subdeps = _resolve_deps( $dep, $answers, \%seen );
+ $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
+ ++$ret->{$dep};
+ }
+ return $ret;
+}
+
sub do_post_ddl {
my ($self, $params) = @_;