__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
_inherited_attributes debug schema_class dumped_objects config_attrs/);
-our $VERSION = '1.001026';
+our $VERSION = '1.001_030';
$VERSION = eval $VERSION;
...
- my $fixtures = DBIx::Class::Fixtures->new({
- config_dir => '/home/me/app/fixture_configs'
+ my $fixtures = DBIx::Class::Fixtures->new({
+ config_dir => '/home/me/app/fixture_configs'
});
$fixtures->dump({
}
]
}
- ]
+ ]
}
This will fetch artists with primary keys 1 and 3, the producer with primary
{
"class": "Artist",
"ids": ["1", "3"]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
"fetch": [
- {
+ {
"rel": "artists",
"quantity": "2"
}
"rel": "cds",
"quantity": "all"
} ]
- },
+ },
{
"class": "Producer",
"ids": ["5"],
- "fetch": [ {
+ "fetch": [ {
"rel": "artists",
"quantity": "2",
"fetch": [ {
=head2 has_many
Specifies whether to fetch has_many rels for this set. Must be a hash
-containing keys fetch and quantity.
+containing keys fetch and quantity.
Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
integer.
=over
-=item config_dir:
+=item config_dir:
required. must contain a valid path to the directory in which your .json
configs reside.
-=item debug:
+=item debug:
determines whether to be verbose
-=item ignore_sql_errors:
+=item ignore_sql_errors:
ignore errors on import of DDL etc
}
my $self = {
- config_dir => $config_dir,
+ config_dir => $config_dir,
_inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
- debug => $params->{debug} || 0,
- ignore_sql_errors => $params->{ignore_sql_errors},
- dumped_objects => {},
- use_create => $params->{use_create} || 0,
- use_find_or_create => $params->{use_find_or_create} || 0,
- config_attrs => $params->{config_attrs} || {},
+ debug => $params->{debug} || 0,
+ ignore_sql_errors => $params->{ignore_sql_errors},
+ dumped_objects => {},
+ use_create => $params->{use_create} || 0,
+ use_find_or_create => $params->{use_find_or_create} || 0,
+ config_attrs => $params->{config_attrs} || {},
};
bless $self, $class;
sub available_config_sets {
@config_sets = scalar(@config_sets) ? @config_sets : map {
$_->filename;
- } grep {
+ } grep {
-f "$_" && $_=~/json$/;
- } (shift)->config_dir->all;
+ } shift->config_dir->all;
}
=head2 dump
my $schema = $params->{schema};
my $config;
if ($params->{config}) {
- $config = ref $params->{config} eq 'HASH' ?
- $params->{config} :
+ $config = ref $params->{config} eq 'HASH' ?
+ $params->{config} :
do {
#read config
my $config_file = io->catfile($self->config_dir, $params->{config});
- $self->load_config_file($config_file);
+ $self->load_config_file("$config_file");
};
} elsif ($params->{all}) {
my %excludes = map {$_=>1} @{$params->{excludes}||[]};
- $config = {
+ $config = {
might_have => { fetch => 0 },
has_many => { fetch => 0 },
belongs_to => { fetch => 0 },
}
$self->msg("generating fixtures");
- my $tmp_output_dir = io->dir(tempdir);;
+ my $tmp_output_dir = io->dir(tempdir);
if (-e "$tmp_output_dir") {
$self->msg("- clearing existing $tmp_output_dir");
if ($source->{cond} and ref $source->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $source->{cond} = {
- map {
- $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
- : $source->{cond}->{$_}
- } keys %{$source->{cond}}
+ $source->{cond} = {
+ map {
+ $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
+ : $source->{cond}->{$_}
+ } keys %{$source->{cond}}
};
}
- $rs = $rs->search($source->{cond}, { join => $source->{join} })
+ $rs = $rs->search($source->{cond}, { join => $source->{join} })
if $source->{cond};
$self->msg("- dumping $source->{class}");
my %source_options = ( set => { %{$config}, %{$source} } );
if ($source->{quantity}) {
- $rs = $rs->search({}, { order_by => $source->{order_by} })
+ $rs = $rs->search({}, { order_by => $source->{order_by} })
if $source->{order_by};
if ($source->{quantity} =~ /^\d+$/) {
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless ref $incs eq 'ARRAY';
-
+
foreach my $include_config (@$incs) {
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
) unless (ref $include_config eq 'HASH') && $include_config->{file};
-
+
my $include_file = $self->config_dir->file($include_config->{file});
DBIx::Class::Exception->throw("config does not exist at $include_file")
unless -e "$include_file";
-
+
my $include = Config::Any::JSON->load($include_file);
$self->msg($include);
$config = merge( $config, $include );
}
delete $config->{includes};
}
-
+
# validate config
return DBIx::Class::Exception->throw('config has no sets')
- unless $config && $config->{sets} &&
+ unless $config && $config->{sets} &&
ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
$config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
$self->dump_object($row, $params);
}
}
-
+
sub dump_object {
- my ($self, $object, $params) = @_;
+ my ($self, $object, $params) = @_;
my $set = $params->{set};
my $v = Data::Visitor::Callback->new(
},
catfile => sub {
my ($self, @args) = @_;
- io->catfile(@args);
+ "".io->catfile(@args);
},
catdir => sub {
my ($self, @args) = @_;
- io->catdir(@args);
+ "".io->catdir(@args);
},
};
- my $subsre = join( '|', keys %$subs );
+ my $subsre = join( '|', keys %$subs );
$_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
return $_;
}
);
-
+
$v->visit( $set );
die 'no dir passed to dump_object' unless $params->{set_dir};
my @inherited_attrs = @{$self->_inherited_attributes};
my @pk_vals = map {
- $object->get_column($_)
+ $object->get_column($_)
} $object->primary_columns;
my $key = join("\0", @pk_vals);
# do the actual dumping
my $serialized = Dump(\%ds)->Out();
+
$file->print($serialized);
}
# if belongs_to or might_have with might_have param set or has_many with
# has_many param set then
if (
- ( $info->{attrs}{accessor} eq 'single' &&
- (!$info->{attrs}{join_type} || $might_have)
+ ( $info->{attrs}{accessor} eq 'single' &&
+ (!$info->{attrs}{join_type} || $might_have)
)
- || $info->{attrs}{accessor} eq 'filter'
- ||
+ || $info->{attrs}{accessor} eq 'filter'
+ ||
($info->{attrs}{accessor} eq 'multi' && $has_many)
) {
- my $related_rs = $object->related_resultset($name);
+ my $related_rs = $object->related_resultset($name);
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
# these parts of the rule only apply to has_many rels
- if ($rule && $info->{attrs}{accessor} eq 'multi') {
+ if ($rule && $info->{attrs}{accessor} eq 'multi') {
$related_rs = $related_rs->search(
- $rule->{cond},
+ $rule->{cond},
{ join => $rule->{join} }
) if ($rule->{cond});
) if ($rule->{quantity} && $rule->{quantity} ne 'all');
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $rule->{order_by} }
- ) if ($rule->{order_by});
+ ) if ($rule->{order_by});
}
- if ($set->{has_many}{quantity} &&
+ if ($set->{has_many}{quantity} &&
$set->{has_many}{quantity} =~ /^\d+$/) {
$related_rs = $related_rs->search(
- {},
+ {},
{ rows => $set->{has_many}->{quantity} }
);
}
my %c_params = %{$params};
# inherit date param
- my %mock_set = map {
- $_ => $set->{$_}
+ my %mock_set = map {
+ $_ => $set->{$_}
} grep { $set->{$_} } @inherited_attrs;
$c_params{set} = \%mock_set;
if $rule && $rule->{fetch};
$self->dump_rs($related_rs, \%c_params);
- }
+ }
}
}
-
+
return unless $set && $set->{fetch};
foreach my $fetch (@{$set->{fetch}}) {
# inherit date param
- $fetch->{$_} = $set->{$_} foreach
+ $fetch->{$_} = $set->{$_} foreach
grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
my $related_rs = $object->related_resultset($fetch->{rel});
my $rule = $set->{rules}->{$related_rs->result_source->source_name};
} elsif ($rule->{fetch}) {
$fetch = merge( $fetch, { fetch => $rule->{fetch} } );
}
- }
+ }
- die "relationship $fetch->{rel} does not exist for " . $src->source_name
+ die "relationship $fetch->{rel} does not exist for " . $src->source_name
unless ($related_rs);
if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
# if value starts with \ assume it's meant to be passed as a scalar ref
# to dbic. ideally this would substitute deeply
- $fetch->{cond} = { map {
- $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
- : $fetch->{cond}->{$_}
+ $fetch->{cond} = { map {
+ $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
+ : $fetch->{cond}->{$_}
} keys %{$fetch->{cond}} };
}
$related_rs = $related_rs->search(
- $fetch->{cond},
+ $fetch->{cond},
{ join => $fetch->{join} }
) if $fetch->{cond};
{ rows => $fetch->{quantity} }
) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
$related_rs = $related_rs->search(
- {},
+ {},
{ order_by => $fetch->{order_by} }
) if $fetch->{order_by};
$pre_schema->storage->txn_do(sub {
$pre_schema->storage->with_deferred_fk_checks(sub {
foreach my $table (@tables) {
- eval {
- $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
+ eval {
+ $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
};
}
});
$fixtures->populate( {
# directory to look for fixtures in, as specified to dump
- directory => '/home/me/app/fixtures',
+ directory => '/home/me/app/fixtures',
# DDL to deploy
- ddl => '/home/me/app/sql/ddl.sql',
+ ddl => '/home/me/app/sql/ddl.sql',
# database to clear, deploy and then populate
- connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
+ connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
# DDL to deploy after populating records, ie. FK constraints
post_ddl => '/home/me/app/sql/post_ddl.sql',
# use CASCADE option when dropping tables
cascade => 1,
- # optional, set to 1 to run ddl but not populate
+ # optional, set to 1 to run ddl but not populate
no_populate => 0,
# optional, set to 1 to run each fixture through ->create rather than have
attribute which will make the drop table functionality cascade, ie 'DROP TABLE
$table CASCADE'.
-C<directory> is a required attribute.
+C<directory> is a required attribute.
If you wish for DBIx::Class::Fixtures to clear the database for you pass in
C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
unless (ref $params->{connection_details} eq 'ARRAY') {
return DBIx::Class::Exception->throw('connection details must be an arrayref');
}
- $schema = $self->_generate_schema({
- ddl => $ddl_file,
+ $schema = $self->_generate_schema({
+ ddl => "$ddl_file",
connection_details => delete $params->{connection_details},
%{$params}
});
}
- return 1 if $params->{no_populate};
-
+ return 1 if $params->{no_populate};
+
$self->msg("\nimporting fixtures");
my $tmp_fixture_dir = io->dir(tempdir());
- my $version_file = io->file($fixture_dir, '_dumper_version');
my $config_set_path = io->file($fixture_dir, '_config_set');
my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
},
};
- my $subsre = join( '|', keys %$subs );
+ my $subsre = join( '|', keys %$subs );
$_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
return $_;
}
);
-
+
$v->visit( $config_set );
@{$config_set->{sets}}
}
-# DBIx::Class::Exception->throw('no version file found');
-# unless -e $version_file;
-
if (-e "$tmp_fixture_dir") {
$self->msg("- deleting existing temp directory $tmp_fixture_dir");
$tmp_fixture_dir->rmtree;
$formatter->format_datetime(DateTime->today->add_duration($_))
};
}
- $callbacks{object} ||= "visit_ref";
+ $callbacks{object} ||= "visit_ref";
$fixup_visitor = new Data::Visitor::Callback(%callbacks);
}
+ my @sorted_source_names = $self->_get_sorted_sources( $schema );
$schema->storage->txn_do(sub {
$schema->storage->with_deferred_fk_checks(sub {
- foreach my $source (sort $schema->sources) {
+ foreach my $source (@sorted_source_names) {
$self->msg("- adding " . $source);
my $rs = $schema->resultset($source);
my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
return 1;
}
+# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
+sub _get_sorted_sources {
+ my ( $self, $dbicschema ) = @_;
+
+
+ my %table_monikers = map { $_ => 1 } $dbicschema->sources;
+
+ my %tables;
+ foreach my $moniker (sort keys %table_monikers) {
+ my $source = $dbicschema->source($moniker);
+
+ my $table_name = $source->name;
+ my @primary = $source->primary_columns;
+ my @rels = $source->relationships();
+
+ my %created_FK_rels;
+ foreach my $rel (sort @rels) {
+ my $rel_info = $source->relationship_info($rel);
+
+ # Ignore any rel cond that isn't a straight hash
+ next unless ref $rel_info->{cond} eq 'HASH';
+
+ my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
+
+ # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
+ my $fk_constraint;
+ if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
+ $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
+ } elsif ( $rel_info->{attrs}{accessor}
+ && $rel_info->{attrs}{accessor} eq 'multi' ) {
+ $fk_constraint = 0;
+ } else {
+ $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
+ }
+
+ # Dont add a relation if its not constraining
+ next unless $fk_constraint;
+
+ my $rel_table = $source->related_source($rel)->source_name;
+ # Make sure we don't create the same relation twice
+ my $key_test = join("\x00", sort @keys);
+ next if $created_FK_rels{$rel_table}->{$key_test};
+
+ if (scalar(@keys)) {
+ $created_FK_rels{$rel_table}->{$key_test} = 1;
+
+ # calculate dependencies: do not consider deferrable constraints and
+ # self-references for dependency calculations
+ if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
+ $tables{$moniker}{$rel_table}++;
+ }
+ }
+ }
+ $tables{$moniker} = {} unless exists $tables{$moniker};
+ }
+
+ # resolve entire dep tree
+ my $dependencies = {
+ map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
+ };
+
+ # return the sorted result
+ return sort {
+ keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
+ ||
+ $a cmp $b
+ } (keys %tables);
+}
+
+sub _resolve_deps {
+ my ( $question, $answers, $seen ) = @_;
+ my $ret = {};
+ $seen ||= {};
+
+ my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
+ $seen{$question} = 1;
+
+ for my $dep (keys %{ $answers->{$question} }) {
+ return {} if $seen->{$dep};
+ my $subdeps = _resolve_deps( $dep, $answers, \%seen );
+ $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
+ ++$ret->{$dep};
+ }
+ return $ret;
+}
+
sub do_post_ddl {
my ($self, $params) = @_;