X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FFixtures.pm;h=0cf678604cf81f70604bad0bdd6802e6a44b8874;hb=9a6169dede3a03778458e7d0bc2c7fb0ad5cc8f0;hp=aef6647585f634d61f571856dd1c431648f00feb;hpb=c040a9b08976064eb97a4607484b643a1bfee88c;p=dbsrgits%2FDBIx-Class-Fixtures.git diff --git a/lib/DBIx/Class/Fixtures.pm b/lib/DBIx/Class/Fixtures.pm index aef6647..0cf6786 100644 --- a/lib/DBIx/Class/Fixtures.pm +++ b/lib/DBIx/Class/Fixtures.pm @@ -6,18 +6,15 @@ use warnings; use DBIx::Class 0.08100; use DBIx::Class::Exception; use Class::Accessor::Grouped; -use Path::Class qw(dir file); -use File::Spec::Functions 'catfile', 'catdir'; use Config::Any::JSON; use Data::Dump::Streamer; use Data::Visitor::Callback; -use File::Path; -use File::Copy::Recursive qw/dircopy/; -use File::Copy qw/move/; use Hash::Merge qw( merge ); use Data::Dumper; use Class::C3::Componentised; use MIME::Base64; +use IO::All; +use File::Temp qw/tempdir/; use base qw(Class::Accessor::Grouped); @@ -26,13 +23,9 @@ our $namespace_counter = 0; __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir _inherited_attributes debug schema_class dumped_objects config_attrs/); -=head1 VERSION +our $VERSION = '1.001_030'; -Version 1.001013 - -=cut - -our $VERSION = '1.001014'; +$VERSION = eval $VERSION; =head1 NAME @@ -44,8 +37,8 @@ DBIx::Class::Fixtures - Dump data and repopulate a database using rules ... - my $fixtures = DBIx::Class::Fixtures->new({ - config_dir => '/home/me/app/fixture_configs' + my $fixtures = DBIx::Class::Fixtures->new({ + config_dir => '/home/me/app/fixture_configs' }); $fixtures->dump({ @@ -93,7 +86,7 @@ For example: } ] } - ] + ] } This will fetch artists with primary keys 1 and 3, the producer with primary @@ -119,12 +112,12 @@ rule to specify this. For example: { "class": "Artist", "ids": ["1", "3"] - }, + }, { "class": "Producer", "ids": ["5"], "fetch": [ - { + { "rel": "artists", "quantity": "2" } @@ -154,11 +147,11 @@ to CD. This is eqivalent to: "rel": "cds", "quantity": "all" } ] - }, + }, { "class": "Producer", "ids": ["5"], - "fetch": [ { + "fetch": [ { "rel": "artists", "quantity": "2", "fetch": [ { @@ -330,7 +323,7 @@ not if using for belongs_to or might_have relationships. =head2 has_many Specifies whether to fetch has_many rels for this set. Must be a hash -containing keys fetch and quantity. +containing keys fetch and quantity. Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an integer. @@ -425,7 +418,7 @@ Provide a value from L Create the path to a file from a list -=heade catdir +=head2 catdir Create the path to a directory from a list @@ -446,16 +439,16 @@ parameters: =over -=item config_dir: +=item config_dir: required. must contain a valid path to the directory in which your .json configs reside. -=item debug: +=item debug: determines whether to be verbose -=item ignore_sql_errors: +=item ignore_sql_errors: ignore errors on import of DDL etc @@ -505,19 +498,20 @@ sub new { return DBIx::Class::Exception->throw('config_dir param not specified'); } - my $config_dir = dir($params->{config_dir}); + my $config_dir = io->dir($params->{config_dir}); unless (-e $params->{config_dir}) { return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist'); } my $self = { - config_dir => $config_dir, + config_dir => $config_dir, _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/], - debug => $params->{debug} || 0, - ignore_sql_errors => $params->{ignore_sql_errors}, - dumped_objects => {}, - use_create => $params->{use_create} || 0, - config_attrs => $params->{config_attrs} || {}, + debug => $params->{debug} || 0, + ignore_sql_errors => $params->{ignore_sql_errors}, + dumped_objects => {}, + use_create => $params->{use_create} || 0, + use_find_or_create => $params->{use_find_or_create} || 0, + config_attrs => $params->{config_attrs} || {}, }; bless $self, $class; @@ -535,10 +529,10 @@ be a list of the json based files containing dump rules. my @config_sets; sub available_config_sets { @config_sets = scalar(@config_sets) ? @config_sets : map { - $_->basename; - } grep { - -f $_ && $_=~/json$/; - } dir((shift)->config_dir)->children; + $_->filename; + } grep { + -f "$_" && $_=~/json$/; + } shift->config_dir->all; } =head2 dump @@ -603,16 +597,16 @@ sub dump { my $schema = $params->{schema}; my $config; if ($params->{config}) { - $config = ref $params->{config} eq 'HASH' ? - $params->{config} : + $config = ref $params->{config} eq 'HASH' ? + $params->{config} : do { #read config - my $config_file = $self->config_dir->file($params->{config}); - $self->load_config_file($config_file); + my $config_file = io->catfile($self->config_dir, $params->{config}); + $self->load_config_file("$config_file"); }; } elsif ($params->{all}) { my %excludes = map {$_=>1} @{$params->{excludes}||[]}; - $config = { + $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, @@ -627,16 +621,16 @@ sub dump { DBIx::Class::Exception->throw('must pass config or set all'); } - my $output_dir = dir($params->{directory}); - unless (-e $output_dir) { + my $output_dir = io->dir($params->{directory}); + unless (-e "$output_dir") { $output_dir->mkpath || DBIx::Class::Exception->throw("output directory does not exist at $output_dir"); } $self->msg("generating fixtures"); - my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); + my $tmp_output_dir = io->dir(tempdir); - if (-e $tmp_output_dir) { + if (-e "$tmp_output_dir") { $self->msg("- clearing existing $tmp_output_dir"); $tmp_output_dir->rmtree; } @@ -644,14 +638,10 @@ sub dump { $tmp_output_dir->mkpath; # write version file (for the potential benefit of populate) - $tmp_output_dir->file('_dumper_version') - ->openw - ->print($VERSION); + $tmp_output_dir->file('_dumper_version')->print($VERSION); # write our current config set - $tmp_output_dir->file('_config_set') - ->openw - ->print( Dumper $config ); + $tmp_output_dir->file('_config_set')->print( Dumper $config ); $config->{rules} ||= {}; my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; @@ -673,22 +663,22 @@ sub dump { if ($source->{cond} and ref $source->{cond} eq 'HASH') { # if value starts with \ assume it's meant to be passed as a scalar ref # to dbic. ideally this would substitute deeply - $source->{cond} = { - map { - $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} - : $source->{cond}->{$_} - } keys %{$source->{cond}} + $source->{cond} = { + map { + $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} + : $source->{cond}->{$_} + } keys %{$source->{cond}} }; } - $rs = $rs->search($source->{cond}, { join => $source->{join} }) + $rs = $rs->search($source->{cond}, { join => $source->{join} }) if $source->{cond}; $self->msg("- dumping $source->{class}"); my %source_options = ( set => { %{$config}, %{$source} } ); if ($source->{quantity}) { - $rs = $rs->search({}, { order_by => $source->{order_by} }) + $rs = $rs->search({}, { order_by => $source->{order_by} }) if $source->{order_by}; if ($source->{quantity} =~ /^\d+$/) { @@ -712,25 +702,24 @@ sub dump { } # clear existing output dir - foreach my $child ($output_dir->children) { + foreach my $child ($output_dir->all) { if ($child->is_dir) { - next if ($child eq $tmp_output_dir); - if (grep { $_ =~ /\.fix/ } $child->children) { + next if ("$child" eq "$tmp_output_dir"); + if (grep { $_ =~ /\.fix/ } $child->all) { $child->rmtree; } } elsif ($child =~ /_dumper_version$/) { - $child->remove; + $child->unlink; } } $self->msg("- moving temp dir to $output_dir"); - move($_, dir($output_dir, $_->relative($_->parent)->stringify)) - for $tmp_output_dir->children; + $tmp_output_dir->copy("$output_dir"); - if (-e $output_dir) { + if (-e "$output_dir") { $self->msg("- clearing tmp dir $tmp_output_dir"); # delete existing fixture set - $tmp_output_dir->remove; + $tmp_output_dir->rmtree; } $self->msg("done"); @@ -741,7 +730,7 @@ sub dump { sub load_config_file { my ($self, $config_file) = @_; DBIx::Class::Exception->throw("config does not exist at $config_file") - unless -e $config_file; + unless -e "$config_file"; my $config = Config::Any::JSON->load($config_file); @@ -751,27 +740,27 @@ sub load_config_file { DBIx::Class::Exception->throw( 'includes params of config must be an array ref of hashrefs' ) unless ref $incs eq 'ARRAY'; - + foreach my $include_config (@$incs) { DBIx::Class::Exception->throw( 'includes params of config must be an array ref of hashrefs' ) unless (ref $include_config eq 'HASH') && $include_config->{file}; - + my $include_file = $self->config_dir->file($include_config->{file}); DBIx::Class::Exception->throw("config does not exist at $include_file") - unless -e $include_file; - + unless -e "$include_file"; + my $include = Config::Any::JSON->load($include_file); $self->msg($include); $config = merge( $config, $include ); } delete $config->{includes}; } - + # validate config return DBIx::Class::Exception->throw('config has no sets') - unless $config && $config->{sets} && + unless $config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}}; $config->{might_have} = { fetch => 0 } unless exists $config->{might_have}; @@ -788,9 +777,9 @@ sub dump_rs { $self->dump_object($row, $params); } } - + sub dump_object { - my ($self, $object, $params) = @_; + my ($self, $object, $params) = @_; my $set = $params->{set}; my $v = Data::Visitor::Callback->new( @@ -815,21 +804,21 @@ sub dump_object { }, catfile => sub { my ($self, @args) = @_; - catfile(@args); + "".io->catfile(@args); }, catdir => sub { my ($self, @args) = @_; - catdir(@args); + "".io->catdir(@args); }, }; - my $subsre = join( '|', keys %$subs ); + my $subsre = join( '|', keys %$subs ); $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; return $_; } ); - + $v->visit( $set ); die 'no dir passed to dump_object' unless $params->{set_dir}; @@ -838,7 +827,7 @@ sub dump_object { my @inherited_attrs = @{$self->_inherited_attributes}; my @pk_vals = map { - $object->get_column($_) + $object->get_column($_) } $object->primary_columns; my $key = join("\0", @pk_vals); @@ -848,17 +837,17 @@ sub dump_object { # write dir and gen filename - my $source_dir = $params->{set_dir}->subdir(lc $src->from); + my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src)); $source_dir->mkpath(0, 0777); - # strip dir separators from file name - my $file = $source_dir->file( - join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix' + # Convert characters not allowed on windows + my $file = io->catfile("$source_dir", + join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix' ); # write file unless ($exists) { - $self->msg('-- dumping ' . $file->stringify, 2); + $self->msg('-- dumping ' . "$file", 2); my %ds = $object->get_columns; if($set->{external}) { @@ -872,7 +861,7 @@ sub dump_object { $ds{external}->{$field} = encode_base64( $class - ->backup($key => $args)); + ->backup($key => $args),''); } } @@ -892,7 +881,13 @@ sub dump_object { next unless $value && $col_info->{_inflate_info} - && uc($col_info->{data_type}) eq 'DATETIME'; + && ( + (uc($col_info->{data_type}) eq 'DATETIME') + or (uc($col_info->{data_type}) eq 'DATE') + or (uc($col_info->{data_type}) eq 'TIME') + or (uc($col_info->{data_type}) eq 'TIMESTAMP') + or (uc($col_info->{data_type}) eq 'INTERVAL') + ); $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); } @@ -903,7 +898,8 @@ sub dump_object { # do the actual dumping my $serialized = Dump(\%ds)->Out(); - $file->openw->print($serialized); + + $file->print($serialized); } # don't bother looking at rels unless we are actually planning to dump at least one type @@ -924,19 +920,19 @@ sub dump_object { # if belongs_to or might_have with might_have param set or has_many with # has_many param set then if ( - ( $info->{attrs}{accessor} eq 'single' && - (!$info->{attrs}{join_type} || $might_have) + ( $info->{attrs}{accessor} eq 'single' && + (!$info->{attrs}{join_type} || $might_have) ) - || $info->{attrs}{accessor} eq 'filter' - || + || $info->{attrs}{accessor} eq 'filter' + || ($info->{attrs}{accessor} eq 'multi' && $has_many) ) { - my $related_rs = $object->related_resultset($name); + my $related_rs = $object->related_resultset($name); my $rule = $set->{rules}->{$related_rs->result_source->source_name}; # these parts of the rule only apply to has_many rels - if ($rule && $info->{attrs}{accessor} eq 'multi') { + if ($rule && $info->{attrs}{accessor} eq 'multi') { $related_rs = $related_rs->search( - $rule->{cond}, + $rule->{cond}, { join => $rule->{join} } ) if ($rule->{cond}); @@ -946,23 +942,23 @@ sub dump_object { ) if ($rule->{quantity} && $rule->{quantity} ne 'all'); $related_rs = $related_rs->search( - {}, + {}, { order_by => $rule->{order_by} } - ) if ($rule->{order_by}); + ) if ($rule->{order_by}); } - if ($set->{has_many}{quantity} && + if ($set->{has_many}{quantity} && $set->{has_many}{quantity} =~ /^\d+$/) { $related_rs = $related_rs->search( - {}, + {}, { rows => $set->{has_many}->{quantity} } ); } my %c_params = %{$params}; # inherit date param - my %mock_set = map { - $_ => $set->{$_} + my %mock_set = map { + $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs; $c_params{set} = \%mock_set; @@ -970,14 +966,14 @@ sub dump_object { if $rule && $rule->{fetch}; $self->dump_rs($related_rs, \%c_params); - } + } } } - + return unless $set && $set->{fetch}; foreach my $fetch (@{$set->{fetch}}) { # inherit date param - $fetch->{$_} = $set->{$_} foreach + $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs; my $related_rs = $object->related_resultset($fetch->{rel}); my $rule = $set->{rules}->{$related_rs->result_source->source_name}; @@ -989,22 +985,22 @@ sub dump_object { } elsif ($rule->{fetch}) { $fetch = merge( $fetch, { fetch => $rule->{fetch} } ); } - } + } - die "relationship $fetch->{rel} does not exist for " . $src->source_name + die "relationship $fetch->{rel} does not exist for " . $src->source_name unless ($related_rs); if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') { # if value starts with \ assume it's meant to be passed as a scalar ref # to dbic. ideally this would substitute deeply - $fetch->{cond} = { map { - $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} - : $fetch->{cond}->{$_} + $fetch->{cond} = { map { + $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} + : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} }; } $related_rs = $related_rs->search( - $fetch->{cond}, + $fetch->{cond}, { join => $fetch->{join} } ) if $fetch->{cond}; @@ -1013,7 +1009,7 @@ sub dump_object { { rows => $fetch->{quantity} } ) if $fetch->{quantity} && $fetch->{quantity} ne 'all'; $related_rs = $related_rs->search( - {}, + {}, { order_by => $fetch->{order_by} } ) if $fetch->{order_by}; @@ -1043,7 +1039,7 @@ sub _generate_schema { unless( $pre_schema ) { return DBIx::Class::Exception->throw('connection details not valid'); } - my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources; + my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources; $self->msg("Tables to drop: [". join(', ', sort @tables) . "]"); my $dbh = $pre_schema->storage->dbh; @@ -1052,8 +1048,8 @@ sub _generate_schema { $pre_schema->storage->txn_do(sub { $pre_schema->storage->with_deferred_fk_checks(sub { foreach my $table (@tables) { - eval { - $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) + eval { + $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) }; } }); @@ -1105,7 +1101,7 @@ example: configs => [qw/one.json other.json/], directory_template => sub { my ($fixture, $params, $set) = @_; - return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + return io->catdir('var', 'fixtures', $params->{schema}->version, $set); }, }); @@ -1148,7 +1144,7 @@ example: schema => $schema, directory_template => sub { my ($fixture, $params, $set) = @_; - return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + return io->catdir('var', 'fixtures', $params->{schema}->version, $set); }, }); @@ -1174,13 +1170,13 @@ sub dump_all_config_sets { $fixtures->populate( { # directory to look for fixtures in, as specified to dump - directory => '/home/me/app/fixtures', + directory => '/home/me/app/fixtures', # DDL to deploy - ddl => '/home/me/app/sql/ddl.sql', + ddl => '/home/me/app/sql/ddl.sql', # database to clear, deploy and then populate - connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], + connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], # DDL to deploy after populating records, ie. FK constraints post_ddl => '/home/me/app/sql/post_ddl.sql', @@ -1188,13 +1184,17 @@ sub dump_all_config_sets { # use CASCADE option when dropping tables cascade => 1, - # optional, set to 1 to run ddl but not populate + # optional, set to 1 to run ddl but not populate no_populate => 0, - # optional, set to 1 to run each fixture through ->create rather than have + # optional, set to 1 to run each fixture through ->create rather than have # each $rs populated using $rs->populate. Useful if you have overridden new() logic - # that effects the value of column(s). - use_create => 0, + # that effects the value of column(s). + use_create => 0, + + # optional, same as use_create except with find_or_create. + # Useful if you are populating a persistent data store. + use_find_or_create => 0, # Dont try to clean the database, just populate over whats there. Requires # schema option. Use this if you want to handle removing old data yourself @@ -1219,7 +1219,7 @@ If your tables have foreign key constraints you may want to use the cascade attribute which will make the drop table functionality cascade, ie 'DROP TABLE $table CASCADE'. -C is a required attribute. +C is a required attribute. If you wish for DBIx::Class::Fixtures to clear the database for you pass in C (path to a DDL sql file) and C (array ref of DSN, @@ -1240,23 +1240,23 @@ sub populate { DBIx::Class::Exception->throw('directory param not specified') unless $params->{directory}; - my $fixture_dir = dir(delete $params->{directory}); + my $fixture_dir = io->dir(delete $params->{directory}); DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist") - unless -d $fixture_dir; + unless -d "$fixture_dir"; my $ddl_file; my $dbh; my $schema; if ($params->{ddl} && $params->{connection_details}) { - $ddl_file = file(delete $params->{ddl}); - unless (-e $ddl_file) { + $ddl_file = io->file(delete $params->{ddl}); + unless (-e "$ddl_file") { return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file); } unless (ref $params->{connection_details} eq 'ARRAY') { return DBIx::Class::Exception->throw('connection details must be an arrayref'); } - $schema = $self->_generate_schema({ - ddl => $ddl_file, + $schema = $self->_generate_schema({ + ddl => "$ddl_file", connection_details => delete $params->{connection_details}, %{$params} }); @@ -1267,13 +1267,12 @@ sub populate { } - return 1 if $params->{no_populate}; - + return 1 if $params->{no_populate}; + $self->msg("\nimporting fixtures"); - my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); - my $version_file = file($fixture_dir, '_dumper_version'); - my $config_set_path = file($fixture_dir, '_config_set'); - my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : ''; + my $tmp_fixture_dir = io->dir(tempdir()); + my $config_set_path = io->file($fixture_dir, '_config_set'); + my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : ''; my $v = Data::Visitor::Callback->new( plain_value => sub { @@ -1297,21 +1296,21 @@ sub populate { }, catfile => sub { my ($self, @args) = @_; - catfile(@args); + io->catfile(@args); }, catdir => sub { my ($self, @args) = @_; - catdir(@args); + io->catdir(@args); }, }; - my $subsre = join( '|', keys %$subs ); + my $subsre = join( '|', keys %$subs ); $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; return $_; } ); - + $v->visit( $config_set ); @@ -1321,22 +1320,19 @@ sub populate { @{$config_set->{sets}} } -# DBIx::Class::Exception->throw('no version file found'); -# unless -e $version_file; - - if (-e $tmp_fixture_dir) { + if (-e "$tmp_fixture_dir") { $self->msg("- deleting existing temp directory $tmp_fixture_dir"); $tmp_fixture_dir->rmtree; } $self->msg("- creating temp dir"); $tmp_fixture_dir->mkpath(); - for ( map { $schema->source($_)->from } $schema->sources) { - my $from_dir = $fixture_dir->subdir($_); - next unless -e $from_dir; - dircopy($from_dir, $tmp_fixture_dir->subdir($_) ); + for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) { + my $from_dir = io->catdir($fixture_dir, $_); + next unless -e "$from_dir"; + $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" ); } - unless (-d $tmp_fixture_dir) { + unless (-d "$tmp_fixture_dir") { DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!"); } @@ -1353,17 +1349,18 @@ sub populate { $formatter->format_datetime(DateTime->today->add_duration($_)) }; } - $callbacks{object} ||= "visit_ref"; + $callbacks{object} ||= "visit_ref"; $fixup_visitor = new Data::Visitor::Callback(%callbacks); } + my @sorted_source_names = $self->_get_sorted_sources( $schema ); $schema->storage->txn_do(sub { $schema->storage->with_deferred_fk_checks(sub { - foreach my $source (sort $schema->sources) { + foreach my $source (@sorted_source_names) { $self->msg("- adding " . $source); my $rs = $schema->resultset($source); - my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from ); - next unless (-e $source_dir); + my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source)); + next unless (-e "$source_dir"); my @rows; while (my $file = $source_dir->next) { next unless ($file =~ /\.fix$/); @@ -1386,16 +1383,38 @@ sub populate { } if ( $params->{use_create} ) { $rs->create( $HASH1 ); + } elsif( $params->{use_find_or_create} ) { + $rs->find_or_create( $HASH1 ); } else { push(@rows, $HASH1); } } $rs->populate(\@rows) if scalar(@rows); + + ## Now we need to do some db specific cleanup + ## this probably belongs in a more isolated space. Right now this is + ## to just handle postgresql SERIAL types that use Sequences + + my $table = $rs->result_source->name; + for my $column(my @columns = $rs->result_source->columns) { + my $info = $rs->result_source->column_info($column); + if(my $sequence = $info->{sequence}) { + $self->msg("- updating sequence $sequence"); + $rs->result_source->storage->dbh_do(sub { + my ($storage, $dbh, @cols) = @_; + $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));"); + my $sth = $dbh->prepare($sql); + my $rv = $sth->execute or die $sth->errstr; + $self->msg("- $sql"); + }); + } + } + } }); }); $self->do_post_ddl( { - schema=>$schema, + schema=>$schema, post_ddl=>$params->{post_ddl} } ) if $params->{post_ddl}; @@ -1405,6 +1424,92 @@ sub populate { return 1; } +# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse +sub _get_sorted_sources { + my ( $self, $dbicschema ) = @_; + + + my %table_monikers = map { $_ => 1 } $dbicschema->sources; + + my %tables; + foreach my $moniker (sort keys %table_monikers) { + my $source = $dbicschema->source($moniker); + + my $table_name = $source->name; + my @primary = $source->primary_columns; + my @rels = $source->relationships(); + + my %created_FK_rels; + foreach my $rel (sort @rels) { + my $rel_info = $source->relationship_info($rel); + + # Ignore any rel cond that isn't a straight hash + next unless ref $rel_info->{cond} eq 'HASH'; + + my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}}); + + # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to) + my $fk_constraint; + if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) { + $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint}; + } elsif ( $rel_info->{attrs}{accessor} + && $rel_info->{attrs}{accessor} eq 'multi' ) { + $fk_constraint = 0; + } else { + $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary); + } + + # Dont add a relation if its not constraining + next unless $fk_constraint; + + my $rel_table = $source->related_source($rel)->source_name; + # Make sure we don't create the same relation twice + my $key_test = join("\x00", sort @keys); + next if $created_FK_rels{$rel_table}->{$key_test}; + + if (scalar(@keys)) { + $created_FK_rels{$rel_table}->{$key_test} = 1; + + # calculate dependencies: do not consider deferrable constraints and + # self-references for dependency calculations + if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) { + $tables{$moniker}{$rel_table}++; + } + } + } + $tables{$moniker} = {} unless exists $tables{$moniker}; + } + + # resolve entire dep tree + my $dependencies = { + map { $_ => _resolve_deps ($_, \%tables) } (keys %tables) + }; + + # return the sorted result + return sort { + keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} } + || + $a cmp $b + } (keys %tables); +} + +sub _resolve_deps { + my ( $question, $answers, $seen ) = @_; + my $ret = {}; + $seen ||= {}; + + my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen ); + $seen{$question} = 1; + + for my $dep (keys %{ $answers->{$question} }) { + return {} if $seen->{$dep}; + my $subdeps = _resolve_deps( $dep, $answers, \%seen ); + $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps ); + ++$ret->{$dep}; + } + return $ret; +} + sub do_post_ddl { my ($self, $params) = @_; @@ -1429,6 +1534,16 @@ sub msg { } } +# Helper method for ensuring that the name used for a given source +# is always the same (This is used to name the fixture directories +# for example) + +sub _name_for_source { + my ($self, $source) = @_; + + return ref $source->name ? $source->source_name : $source->name; +} + =head1 AUTHOR Luke Saunders @@ -1445,6 +1560,8 @@ sub msg { Frank Switalski + Chris Akins + =head1 LICENSE This library is free software under the same license as perl itself