1 package DBIx::Class::Fixtures;
6 use DBIx::Class::Exception;
7 use Class::Accessor::Grouped;
8 use Path::Class qw(dir file);
10 use Config::Any::JSON;
11 use Data::Dump::Streamer;
12 use Data::Visitor::Callback;
14 use File::Copy::Recursive qw/dircopy/;
15 use File::Copy qw/move/;
16 use Hash::Merge qw( merge );
18 use Class::C3::Componentised;
20 use base qw(Class::Accessor::Grouped);
22 our $namespace_counter = 0;
24 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir _inherited_attributes debug schema_class/);
32 our $VERSION = '1.001002';
40 use DBIx::Class::Fixtures;
44 my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' });
47 config => 'set_config.json',
48 schema => $source_dbic_schema,
49 directory => '/home/me/app/fixtures'
53 directory => '/home/me/app/fixtures',
54 ddl => '/home/me/app/sql/ddl.sql',
55 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
56 post_ddl => '/home/me/app/sql/post_ddl.sql',
61 Dump fixtures from source database to filesystem then import to another database (with same schema)
62 at any time. Use as a constant dataset for running tests against or for populating development databases
63 when impractical to use production clones. Describe fixture set using relations and conditions based
64 on your DBIx::Class schema.
66 =head1 DEFINE YOUR FIXTURE SET
68 Fixture sets are currently defined in .json files which must reside in your config_dir
69 (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They describe which data to pull and dump
70 from the source database.
88 This will fetch artists with primary keys 1 and 3, the producer with primary key 5 and two of producer 5's
89 artists where 'artists' is a has_many DBIx::Class rel from Producer to Artist.
91 The top level attributes are as follows:
95 Sets must be an array of hashes, as in the example given above. Each set defines a set of objects to be
96 included in the fixtures. For details on valid set attributes see L</SET ATTRIBUTES> below.
100 Rules place general conditions on classes. For example if whenever an artist was dumped you also wanted all
101 of their cds dumped too, then you could use a rule to specify this. For example:
125 In this case all the cds of artists 1, 3 and all producer 5's artists will be dumped as well. Note that 'cds' is a
126 has_many DBIx::Class relation from Artist to CD. This is eqivalent to:
150 rules must be a hash keyed by class name.
156 To prevent repetition between configs you can include other configs. For example:
168 Includes must be an arrayref of hashrefs where the hashrefs have key 'file' which is the name of another config
169 file in the same directory. The original config is merged with its includes using Hash::Merge.
171 =head2 datetime_relative
173 Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::*
174 can parse. For example:
178 class: 'RecentItems',
181 datetime_relative : "2007-10-30 00:00:00"
184 This will work when dumping from a MySQL database and will cause any datetime fields (where datatype => 'datetime'
185 in the column def of the schema class) to be dumped as a DateTime::Duration object relative to the date specified in
186 the datetime_relative value. For example if the RecentItem object had a date field set to 2007-10-25, then when the
187 fixture is imported the field will be set to 5 days in the past relative to the current time.
191 Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0.
206 Note: belongs_to rels are automatically dumped whether you like it or not, this is to avoid FKs to nowhere when importing.
207 General rules on has_many rels are not accepted at this top level, but you can turn them on for individual
208 sets - see L</SET ATTRIBUTES>.
210 =head1 SET ATTRIBUTES
214 Required attribute. Specifies the DBIx::Class object class you wish to dump.
218 Array of primary key ids to fetch, basically causing an $rs->find($_) for each. If the id is not in the source db then it
219 just won't get dumped, no warnings or death.
223 Must be either an integer or the string 'all'. Specifying an integer will effectively set the 'rows' attribute on the resultset clause,
224 specifying 'all' will cause the rows attribute to be left off and for all matching rows to be dumped. There's no randomising
225 here, it's just the first x rows.
229 A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example:
235 cond: { name: 'Dave' }
239 This will dump all artists whose name is 'dave'. Essentially $artist_rs->search({ name => 'Dave' })->all.
241 Sometimes in a search clause it's useful to use scalar refs to do things like:
243 $artist_rs->search({ no1_singles => \'> no1_albums' })
245 This could be specified in the cond hash like so:
251 cond: { no1_singles: '\> no1_albums' }
255 So if the value starts with a backslash the value is made a scalar ref before being passed to search.
259 An array of relationships to be used in the cond clause.
265 cond: { 'cds.position': { '>': 4 } },
270 Fetch all artists who have cds with position greater than 4.
274 Must be an array of hashes. Specifies which rels to also dump. For example:
283 cond: { position: '2' }
288 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
290 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and 'join'. rel is the name of the DBIx::Class
291 rel to follow, the rest are the same as in the set attributes. quantity is necessary for has_many relationships,
292 but not if using for belongs_to or might_have relationships.
296 Specifies whether to fetch has_many rels for this set. Must be a hash containing keys fetch and quantity.
298 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an integer.
300 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
304 As with has_many but for might_have relationships. Quantity doesn't do anything in this case.
306 This value will be inherited by all fetches in this set. This is not true for the has_many attribute.
308 =head1 RULE ATTRIBUTES
312 Same as with L</SET ATTRIBUTES>
316 Same as with L</SET ATTRIBUTES>
320 Same as with L</SET ATTRIBUTES>
324 Same as with L</SET ATTRIBUTES>
328 Same as with L</SET ATTRIBUTES>
336 =item Arguments: \%$attrs
338 =item Return Value: $fixture_object
342 Returns a new DBIx::Class::Fixture object. %attrs can have the following parameters:
344 - config_dir: required. must contain a valid path to the directory in which your .json configs reside.
345 - debug: determines whether to be verbose
346 - ignore_sql_errors: ignore errors on import of DDL etc
349 my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' });
357 unless (ref $params eq 'HASH') {
358 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
361 unless ($params->{config_dir}) {
362 return DBIx::Class::Exception->throw('config_dir param not specified');
365 my $config_dir = dir($params->{config_dir});
366 unless (-e $params->{config_dir}) {
367 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
371 config_dir => $config_dir,
372 _inherited_attributes => [qw/datetime_relative might_have rules/],
373 debug => $params->{debug} || 0,
374 ignore_sql_errors => $params->{ignore_sql_errors}
386 =item Arguments: \%$attrs
388 =item Return Value: 1
393 config => 'set_config.json', # config file to use. must be in the config directory specified in the constructor
394 schema => $source_dbic_schema,
395 directory => '/home/me/app/fixtures' # output directory
401 all => 1, # just dump everything that's in the schema
402 schema => $source_dbic_schema,
403 directory => '/home/me/app/fixtures' # output directory
406 In this case objects will be dumped to subdirectories in the specified directory. For example:
408 /home/me/app/fixtures/artist/1.fix
409 /home/me/app/fixtures/artist/3.fix
410 /home/me/app/fixtures/producer/5.fix
412 schema and directory are required attributes. also, one of config or all must be specified.
420 unless (ref $params eq 'HASH') {
421 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
424 foreach my $param (qw/schema directory/) {
425 unless ($params->{$param}) {
426 return DBIx::Class::Exception->throw($param . ' param not specified');
430 my $schema = $params->{schema};
433 if ($params->{config}) {
435 $config_file = file($self->config_dir, $params->{config});
436 unless (-e $config_file) {
437 return DBIx::Class::Exception->throw('config does not exist at ' . $config_file);
439 $config = Config::Any::JSON->load($config_file);
442 if ($config->{includes}) {
443 $self->msg($config->{includes});
444 unless (ref $config->{includes} eq 'ARRAY') {
445 return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs');
447 foreach my $include_config (@{$config->{includes}}) {
448 unless ((ref $include_config eq 'HASH') && $include_config->{file}) {
449 return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs');
452 my $include_file = file($self->config_dir, $include_config->{file});
453 unless (-e $include_file) {
454 return DBIx::Class::Exception->throw('config does not exist at ' . $include_file);
456 my $include = Config::Any::JSON->load($include_file);
457 $self->msg($include);
458 $config = merge( $config, $include );
460 delete $config->{includes};
464 unless ($config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar(@{$config->{sets}})) {
465 return DBIx::Class::Exception->throw('config has no sets');
468 $config->{might_have} = { fetch => 0 } unless (exists $config->{might_have});
469 $config->{has_many} = { fetch => 0 } unless (exists $config->{has_many});
470 $config->{belongs_to} = { fetch => 1 } unless (exists $config->{belongs_to});
471 } elsif ($params->{all}) {
472 $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] };
474 return DBIx::Class::Exception->throw('must pass config or set all');
477 my $output_dir = dir($params->{directory});
478 unless (-e $output_dir) {
479 $output_dir->mkpath ||
480 return DBIx::Class::Exception->throw('output directory does not exist at ' . $output_dir);
483 $self->msg("generating fixtures");
484 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
486 if (-e $tmp_output_dir) {
487 $self->msg("- clearing existing $tmp_output_dir");
488 $tmp_output_dir->rmtree;
490 $self->msg("- creating $tmp_output_dir");
491 $tmp_output_dir->mkpath;
493 # write version file (for the potential benefit of populate)
494 my $version_file = file($tmp_output_dir, '_dumper_version');
495 write_file($version_file->stringify, $VERSION);
497 $config->{rules} ||= {};
498 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
499 my %options = ( is_root => 1 );
501 foreach my $source (@sources) {
502 # apply rule to set if specified
503 my $rule = $config->{rules}->{$source->{class}};
504 $source = merge( $source, $rule ) if ($rule);
507 my $rs = $schema->resultset($source->{class});
509 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
510 # if value starts with / assume it's meant to be passed as a scalar ref to dbic
511 # ideally this would substitute deeply
512 $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} };
515 $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond});
516 $self->msg("- dumping $source->{class}");
518 my %source_options = ( set => { %{$config}, %{$source} } );
519 if ($source->{quantity}) {
520 $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by});
521 if ($source->{quantity} eq 'all') {
522 push (@objects, $rs->all);
523 } elsif ($source->{quantity} =~ /^\d+$/) {
524 push (@objects, $rs->search({}, { rows => $source->{quantity} }));
526 DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity});
529 if ($source->{ids}) {
530 my @ids = @{$source->{ids}};
531 my @id_objects = grep { $_ } map { $rs->find($_) } @ids;
532 push (@objects, @id_objects);
534 unless ($source->{quantity} || $source->{ids}) {
535 DBIx::Class::Exception->throw('must specify either quantity or ids');
539 foreach my $object (@objects) {
540 $source_options{set_dir} = $tmp_output_dir;
541 $self->dump_object($object, { %options, %source_options } );
546 while (my $entry = shift @{$self->{queue}}) {
547 $self->dump_object(@$entry);
550 # clear existing output dir
551 foreach my $child ($output_dir->children) {
552 if ($child->is_dir) {
553 next if ($child eq $tmp_output_dir);
554 if (grep { $_ =~ /\.fix/ } $child->children) {
557 } elsif ($child =~ /_dumper_version$/) {
562 $self->msg("- moving temp dir to $output_dir");
563 move($_, dir($output_dir, $_->relative($_->parent)->stringify)) for $tmp_output_dir->children;
564 if (-e $output_dir) {
565 $self->msg("- clearing tmp dir $tmp_output_dir");
566 # delete existing fixture set
567 $tmp_output_dir->remove;
576 my ($self, $object, $params, $rr_info) = @_;
577 my $set = $params->{set};
578 die 'no dir passed to dump_object' unless $params->{set_dir};
579 die 'no object passed to dump_object' unless $object;
581 my @inherited_attrs = @{$self->_inherited_attributes};
583 # write dir and gen filename
584 my $source_dir = dir($params->{set_dir}, lc($object->result_source->from));
585 mkdir($source_dir->stringify, 0777);
587 # strip dir separators from file name
588 my $file = file($source_dir, join('-', map {
589 ( my $a = $object->get_column($_) ) =~ s|[/\\]|_|g; $a;
590 } sort $object->primary_columns) . '.fix');
593 my $exists = (-e $file->stringify) ? 1 : 0;
595 $self->msg('-- dumping ' . $file->stringify, 2);
596 my %ds = $object->get_columns;
598 my $formatter= $object->result_source->schema->storage->datetime_parser;
599 # mess with dates if specified
600 if ($set->{datetime_relative}) {
601 unless ($@ || !$formatter) {
603 if ($set->{datetime_relative} eq 'today') {
604 $dt = DateTime->today;
606 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
609 while (my ($col, $value) = each %ds) {
610 my $col_info = $object->result_source->column_info($col);
613 && $col_info->{_inflate_info}
614 && uc($col_info->{data_type}) eq 'DATETIME';
616 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
619 warn "datetime_relative not supported for this db driver at the moment";
623 # do the actual dumping
624 my $serialized = Dump(\%ds)->Out();
625 write_file($file->stringify, $serialized);
626 my $mode = 0777; chmod $mode, $file->stringify;
629 # don't bother looking at rels unless we are actually planning to dump at least one type
630 return unless ($set->{might_have}->{fetch} || $set->{belongs_to}->{fetch} || $set->{has_many}->{fetch} || $set->{fetch});
632 # dump rels of object
633 my $s = $object->result_source;
635 foreach my $name (sort $s->relationships) {
636 my $info = $s->relationship_info($name);
637 my $r_source = $s->related_source($name);
638 # if belongs_to or might_have with might_have param set or has_many with has_many param set then
639 if (($info->{attrs}{accessor} eq 'single' && (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))) || $info->{attrs}{accessor} eq 'filter' || ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))) {
640 my $related_rs = $object->related_resultset($name);
641 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
642 # these parts of the rule only apply to has_many rels
643 if ($rule && $info->{attrs}{accessor} eq 'multi') {
644 $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond});
645 $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all');
646 $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by});
648 if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) {
649 $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} });
651 my %c_params = %{$params};
653 my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs;
654 $c_params{set} = \%mock_set;
655 # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch});
656 $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch});
657 # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch});
658 $self->dump_object($_, \%c_params) foreach $related_rs->all;
663 return unless $set && $set->{fetch};
664 foreach my $fetch (@{$set->{fetch}}) {
666 $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
667 my $related_rs = $object->related_resultset($fetch->{rel});
668 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
670 my $info = $object->result_source->relationship_info($fetch->{rel});
671 if ($info->{attrs}{accessor} eq 'multi') {
672 $fetch = merge( $fetch, $rule );
673 } elsif ($rule->{fetch}) {
674 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
677 die "relationship " . $fetch->{rel} . " does not exist for " . $s->source_name unless ($related_rs);
678 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
679 # if value starts with / assume it's meant to be passed as a scalar ref to dbic
680 # ideally this would substitute deeply
681 $fetch->{cond} = { map { $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} };
683 $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} }) if ($fetch->{cond});
684 $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} }) if ($fetch->{quantity} && $fetch->{quantity} ne 'all');
685 $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} }) if ($fetch->{order_by});
686 $self->dump_object($_, { %{$params}, set => $fetch }) foreach $related_rs->all;
690 sub _generate_schema {
692 my $params = shift || {};
694 $self->msg("\ncreating schema");
695 # die 'must pass version param to generate_schema_from_ddl' unless $params->{version};
697 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
698 eval "require $schema_class";
702 my $connection_details = $params->{connection_details};
703 $namespace_counter++;
704 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
705 Class::C3::Componentised->inject_base( $namespace => $schema_class );
706 $pre_schema = $namespace->connect(@{$connection_details});
707 unless( $pre_schema ) {
708 return DBIx::Class::Exception->throw('connection details not valid');
710 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
711 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
712 my $dbh = $pre_schema->storage->dbh;
715 $self->msg("- clearing DB of existing tables");
716 eval { $dbh->do('SET foreign_key_checks=0') };
717 foreach my $table (@tables) {
718 eval { $dbh->do('drop table ' . $table . ($params->{cascade} ? ' cascade' : '') ) };
721 # import new ddl file to db
722 my $ddl_file = $params->{ddl};
723 $self->msg("- deploying schema using $ddl_file");
724 my $data = _read_sql($ddl_file);
726 eval { $dbh->do($_) or warn "SQL was:\n $_"};
727 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
729 $self->msg("- finished importing DDL into DB");
731 # load schema object from our new DB
732 $namespace_counter++;
733 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
734 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
735 my $schema = $namespace2->connect(@{$connection_details});
740 my $ddl_file = shift;
742 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
743 my @data = split(/\n/, join('', <$fh>));
744 @data = grep(!/^--/, @data);
745 @data = split(/;/, join('', @data));
747 @data = grep { $_ && $_ !~ /^-- / } @data;
755 =item Arguments: \%$attrs
757 =item Return Value: 1
761 $fixtures->populate({
762 directory => '/home/me/app/fixtures', # directory to look for fixtures in, as specified to dump
763 ddl => '/home/me/app/sql/ddl.sql', # DDL to deploy
764 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], # database to clear, deploy and then populate
765 post_ddl => '/home/me/app/sql/post_ddl.sql', # DDL to deploy after populating records, ie. FK constraints
766 cascade => 1, # use CASCADE option when dropping tables
767 no_populate => 0, # optional, set to 1 to run ddl but not populate
770 In this case the database app_dev will be cleared of all tables, then the specified DDL deployed to it,
771 then finally all fixtures found in /home/me/app/fixtures will be added to it. populate will generate
772 its own DBIx::Class schema from the DDL rather than being passed one to use. This is better as
773 custom insert methods are avoided which can to get in the way. In some cases you might not
774 have a DDL, and so this method will eventually allow a $schema object to be passed instead.
776 If needed, you can specify a post_ddl attribute which is a DDL to be applied after all the fixtures
777 have been added to the database. A good use of this option would be to add foreign key constraints
778 since databases like Postgresql cannot disable foreign key checks.
780 If your tables have foreign key constraints you may want to use the cascade attribute which will
781 make the drop table functionality cascade, ie 'DROP TABLE $table CASCADE'.
783 directory, dll and connection_details are all required attributes.
790 unless (ref $params eq 'HASH') {
791 return DBIx::Class::Exception->throw('first arg to populate must be hash ref');
794 foreach my $param (qw/directory/) {
795 unless ($params->{$param}) {
796 return DBIx::Class::Exception->throw($param . ' param not specified');
799 my $fixture_dir = dir(delete $params->{directory});
800 unless (-e $fixture_dir) {
801 return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir);
806 if ($params->{ddl} && $params->{connection_details}) {
807 $ddl_file = file(delete $params->{ddl});
808 unless (-e $ddl_file) {
809 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
811 unless (ref $params->{connection_details} eq 'ARRAY') {
812 return DBIx::Class::Exception->throw('connection details must be an arrayref');
814 } elsif ($params->{schema}) {
815 return DBIx::Class::Exception->throw('passing a schema is not supported at the moment');
817 return DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
820 my $schema = $self->_generate_schema({ ddl => $ddl_file, connection_details => delete $params->{connection_details}, %{$params} });
822 return 1 if $params->{no_populate};
824 $self->msg("\nimporting fixtures");
825 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
827 my $version_file = file($fixture_dir, '_dumper_version');
828 unless (-e $version_file) {
829 # return DBIx::Class::Exception->throw('no version file found');
832 if (-e $tmp_fixture_dir) {
833 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
834 $tmp_fixture_dir->rmtree;
836 $self->msg("- creating temp dir");
837 dircopy(dir($fixture_dir, $schema->source($_)->from), dir($tmp_fixture_dir, $schema->source($_)->from)) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources;
840 my $formatter= $schema->storage->datetime_parser;
841 unless ($@ || !$formatter) {
843 if ($params->{datetime_relative_to}) {
844 $callbacks{'DateTime::Duration'} = sub {
845 $params->{datetime_relative_to}->clone->add_duration($_);
848 $callbacks{'DateTime::Duration'} = sub {
849 $formatter->format_datetime(DateTime->today->add_duration($_))
852 $callbacks{object} ||= "visit_ref";
853 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
856 $schema->storage->with_deferred_fk_checks(sub {
857 foreach my $source (sort $schema->sources) {
858 $self->msg("- adding " . $source);
859 my $rs = $schema->resultset($source);
860 my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from));
861 next unless (-e $source_dir);
863 while (my $file = $source_dir->next) {
864 next unless ($file =~ /\.fix$/);
865 next if $file->is_dir;
866 my $contents = $file->slurp;
869 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
872 $rs->populate(\@rows) if (scalar(@rows));
876 $self->do_post_ddl({schema=>$schema, post_ddl=>$params->{post_ddl}}) if $params->{post_ddl};
878 $self->msg("- fixtures imported");
879 $self->msg("- cleaning up");
880 $tmp_fixture_dir->rmtree;
885 my ($self, $params) = @_;
887 my $schema = $params->{schema};
888 my $data = _read_sql($params->{post_ddl});
890 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
891 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
893 $self->msg("- finished importing post-populate DDL into DB");
898 my $subject = shift || return;
899 my $level = shift || 1;
900 return unless $self->debug >= $level;
902 print Dumper($subject);
904 print $subject . "\n";
910 Luke Saunders <luke@shadowcatsystems.co.uk>
912 Initial development sponsored by and (c) Takkle, Inc. 2007
916 Ash Berlin <ash@shadowcatsystems.co.uk>
917 Matt S. Trout <mst@shadowcatsystems.co.uk>
918 Drew Taylor <taylor.andrew.j@gmail.com>
922 This library is free software under the same license as perl itself