1 package DBIx::Class::Fixtures;
6 use DBIx::Class::Exception;
7 use Class::Accessor::Grouped;
8 use Path::Class qw(dir file);
10 use Config::Any::JSON;
11 use Data::Dump::Streamer;
12 use Data::Visitor::Callback;
14 use File::Copy::Recursive qw/dircopy/;
15 use File::Copy qw/move/;
16 use Hash::Merge qw( merge );
18 use Class::C3::Componentised;
20 use base qw(Class::Accessor::Grouped);
22 our $namespace_counter = 0;
24 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir _inherited_attributes debug schema_class/);
32 our $VERSION = '1.001001';
40 use DBIx::Class::Fixtures;
44 my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' });
47 config => 'set_config.json',
48 schema => $source_dbic_schema,
49 directory => '/home/me/app/fixtures'
53 directory => '/home/me/app/fixtures',
54 ddl => '/home/me/app/sql/ddl.sql',
55 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
56 post_ddl => '/home/me/app/sql/post_ddl.sql',
61 Dump fixtures from source database to filesystem then import to another database (with same schema)
62 at any time. Use as a constant dataset for running tests against or for populating development databases
63 when impractical to use production clones. Describe fixture set using relations and conditions based
64 on your DBIx::Class schema.
66 =head1 DEFINE YOUR FIXTURE SET
68 Fixture sets are currently defined in .json files which must reside in your config_dir
69 (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They describe which data to pull and dump
70 from the source database.
88 This will fetch artists with primary keys 1 and 3, the producer with primary key 5 and two of producer 5's
89 artists where 'artists' is a has_many DBIx::Class rel from Producer to Artist.
91 The top level attributes are as follows:
95 Sets must be an array of hashes, as in the example given above. Each set defines a set of objects to be
96 included in the fixtures. For details on valid set attributes see L</SET ATTRIBUTES> below.
100 Rules place general conditions on classes. For example if whenever an artist was dumped you also wanted all
101 of their cds dumped too, then you could use a rule to specify this. For example:
125 In this case all the cds of artists 1, 3 and all producer 5's artists will be dumped as well. Note that 'cds' is a
126 has_many DBIx::Class relation from Artist to CD. This is eqivalent to:
150 rules must be a hash keyed by class name.
156 To prevent repetition between configs you can include other configs. For example:
168 Includes must be an arrayref of hashrefs where the hashrefs have key 'file' which is the name of another config
169 file in the same directory. The original config is merged with its includes using Hash::Merge.
171 =head2 datetime_relative
173 Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::*
174 can parse. For example:
178 class: 'RecentItems',
181 datetime_relative : "2007-10-30 00:00:00"
184 This will work when dumping from a MySQL database and will cause any datetime fields (where datatype => 'datetime'
185 in the column def of the schema class) to be dumped as a DateTime::Duration object relative to the date specified in
186 the datetime_relative value. For example if the RecentItem object had a date field set to 2007-10-25, then when the
187 fixture is imported the field will be set to 5 days in the past relative to the current time.
191 Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0.
206 Note: belongs_to rels are automatically dumped whether you like it or not, this is to avoid FKs to nowhere when importing.
207 General rules on has_many rels are not accepted at this top level, but you can turn them on for individual
208 sets - see L</SET ATTRIBUTES>.
210 =head1 SET ATTRIBUTES
214 Required attribute. Specifies the DBIx::Class object class you wish to dump.
218 Array of primary key ids to fetch, basically causing an $rs->find($_) for each. If the id is not in the source db then it
219 just won't get dumped, no warnings or death.
223 Must be either an integer or the string 'all'. Specifying an integer will effectively set the 'rows' attribute on the resultset clause,
224 specifying 'all' will cause the rows attribute to be left off and for all matching rows to be dumped. There's no randomising
225 here, it's just the first x rows.
229 A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example:
235 cond: { name: 'Dave' }
239 This will dump all artists whose name is 'dave'. Essentially $artist_rs->search({ name => 'Dave' })->all.
241 Sometimes in a search clause it's useful to use scalar refs to do things like:
243 $artist_rs->search({ no1_singles => \'> no1_albums' })
245 This could be specified in the cond hash like so:
251 cond: { no1_singles: '\> no1_albums' }
255 So if the value starts with a backslash the value is made a scalar ref before being passed to search.
259 An array of relationships to be used in the cond clause.
265 cond: { 'cds.position': { '>': 4 } },
270 Fetch all artists who have cds with position greater than 4.
274 Must be an array of hashes. Specifies which rels to also dump. For example:
283 cond: { position: '2' }
288 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
290 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and 'join'. rel is the name of the DBIx::Class
291 rel to follow, the rest are the same as in the set attributes. quantity is necessary for has_many relationships,
292 but not if using for belongs_to or might_have relationships.
296 Specifies whether to fetch has_many rels for this set. Must be a hash containing keys fetch and quantity.
298 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an integer.
300 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
304 As with has_many but for might_have relationships. Quantity doesn't do anything in this case.
306 This value will be inherited by all fetches in this set. This is not true for the has_many attribute.
308 =head1 RULE ATTRIBUTES
312 Same as with L</SET ATTRIBUTES>
316 Same as with L</SET ATTRIBUTES>
320 Same as with L</SET ATTRIBUTES>
324 Same as with L</SET ATTRIBUTES>
328 Same as with L</SET ATTRIBUTES>
336 =item Arguments: \%$attrs
338 =item Return Value: $fixture_object
342 Returns a new DBIx::Class::Fixture object. %attrs can have the following parameters:
344 - config_dir: required. must contain a valid path to the directory in which your .json configs reside.
345 - debug: determines whether to be verbose
346 - ignore_sql_errors: ignore errors on import of DDL etc
349 my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' });
357 unless (ref $params eq 'HASH') {
358 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
361 unless ($params->{config_dir}) {
362 return DBIx::Class::Exception->throw('config_dir param not specified');
365 my $config_dir = dir($params->{config_dir});
366 unless (-e $params->{config_dir}) {
367 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
371 config_dir => $config_dir,
372 _inherited_attributes => [qw/datetime_relative might_have rules/],
373 debug => $params->{debug} || 0,
374 ignore_sql_errors => $params->{ignore_sql_errors}
386 =item Arguments: \%$attrs
388 =item Return Value: 1
393 config => 'set_config.json', # config file to use. must be in the config directory specified in the constructor
394 schema => $source_dbic_schema,
395 directory => '/home/me/app/fixtures' # output directory
401 all => 1, # just dump everything that's in the schema
402 schema => $source_dbic_schema,
403 directory => '/home/me/app/fixtures' # output directory
406 In this case objects will be dumped to subdirectories in the specified directory. For example:
408 /home/me/app/fixtures/artist/1.fix
409 /home/me/app/fixtures/artist/3.fix
410 /home/me/app/fixtures/producer/5.fix
412 schema and directory are required attributes. also, one of config or all must be specified.
420 unless (ref $params eq 'HASH') {
421 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
424 foreach my $param (qw/schema directory/) {
425 unless ($params->{$param}) {
426 return DBIx::Class::Exception->throw($param . ' param not specified');
430 my $schema = $params->{schema};
433 if ($params->{config}) {
435 $config_file = file($self->config_dir, $params->{config});
436 unless (-e $config_file) {
437 return DBIx::Class::Exception->throw('config does not exist at ' . $config_file);
439 $config = Config::Any::JSON->load($config_file);
442 if ($config->{includes}) {
443 $self->msg($config->{includes});
444 unless (ref $config->{includes} eq 'ARRAY') {
445 return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs');
447 foreach my $include_config (@{$config->{includes}}) {
448 unless ((ref $include_config eq 'HASH') && $include_config->{file}) {
449 return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs');
452 my $include_file = file($self->config_dir, $include_config->{file});
453 unless (-e $include_file) {
454 return DBIx::Class::Exception->throw('config does not exist at ' . $include_file);
456 my $include = Config::Any::JSON->load($include_file);
457 $self->msg($include);
458 $config = merge( $config, $include );
460 delete $config->{includes};
464 unless ($config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar(@{$config->{sets}})) {
465 return DBIx::Class::Exception->throw('config has no sets');
468 $config->{might_have} = { fetch => 0 } unless (exists $config->{might_have});
469 $config->{has_many} = { fetch => 0 } unless (exists $config->{has_many});
470 $config->{belongs_to} = { fetch => 1 } unless (exists $config->{belongs_to});
471 } elsif ($params->{all}) {
472 $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] };
473 print Dumper($config);
475 return DBIx::Class::Exception->throw('must pass config or set all');
478 my $output_dir = dir($params->{directory});
479 unless (-e $output_dir) {
480 $output_dir->mkpath ||
481 return DBIx::Class::Exception->throw('output directory does not exist at ' . $output_dir);
484 $self->msg("generating fixtures");
485 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
487 if (-e $tmp_output_dir) {
488 $self->msg("- clearing existing $tmp_output_dir");
489 $tmp_output_dir->rmtree;
491 $self->msg("- creating $tmp_output_dir");
492 $tmp_output_dir->mkpath;
494 # write version file (for the potential benefit of populate)
495 my $version_file = file($tmp_output_dir, '_dumper_version');
496 write_file($version_file->stringify, $VERSION);
498 $config->{rules} ||= {};
499 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
500 my %options = ( is_root => 1 );
502 foreach my $source (@sources) {
503 # apply rule to set if specified
504 my $rule = $config->{rules}->{$source->{class}};
505 $source = merge( $source, $rule ) if ($rule);
508 my $rs = $schema->resultset($source->{class});
510 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
511 # if value starts with / assume it's meant to be passed as a scalar ref to dbic
512 # ideally this would substitute deeply
513 $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} };
516 $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond});
517 $self->msg("- dumping $source->{class}");
519 my %source_options = ( set => { %{$config}, %{$source} } );
520 if ($source->{quantity}) {
521 $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by});
522 if ($source->{quantity} eq 'all') {
523 push (@objects, $rs->all);
524 } elsif ($source->{quantity} =~ /^\d+$/) {
525 push (@objects, $rs->search({}, { rows => $source->{quantity} }));
527 DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity});
530 if ($source->{ids}) {
531 my @ids = @{$source->{ids}};
532 my @id_objects = grep { $_ } map { $rs->find($_) } @ids;
533 push (@objects, @id_objects);
535 unless ($source->{quantity} || $source->{ids}) {
536 DBIx::Class::Exception->throw('must specify either quantity or ids');
540 foreach my $object (@objects) {
541 $source_options{set_dir} = $tmp_output_dir;
542 $self->dump_object($object, { %options, %source_options } );
547 while (my $entry = shift @{$self->{queue}}) {
548 $self->dump_object(@$entry);
551 # clear existing output dir
552 foreach my $child ($output_dir->children) {
553 if ($child->is_dir) {
554 next if ($child eq $tmp_output_dir);
555 if (grep { $_ =~ /\.fix/ } $child->children) {
558 } elsif ($child =~ /_dumper_version$/) {
563 $self->msg("- moving temp dir to $output_dir");
564 move($_, dir($output_dir, $_->relative($_->parent)->stringify)) for $tmp_output_dir->children;
565 if (-e $output_dir) {
566 $self->msg("- clearing tmp dir $tmp_output_dir");
567 # delete existing fixture set
568 $tmp_output_dir->remove;
577 my ($self, $object, $params, $rr_info) = @_;
578 my $set = $params->{set};
579 die 'no dir passed to dump_object' unless $params->{set_dir};
580 die 'no object passed to dump_object' unless $object;
582 my @inherited_attrs = @{$self->_inherited_attributes};
584 # write dir and gen filename
585 my $source_dir = dir($params->{set_dir}, lc($object->result_source->from));
586 mkdir($source_dir->stringify, 0777);
588 # strip dir separators from file name
589 my $file = file($source_dir, join('-', map {
590 ( my $a = $object->get_column($_) ) =~ s|[/\\]|_|g; $a;
591 } sort $object->primary_columns) . '.fix');
594 my $exists = (-e $file->stringify) ? 1 : 0;
596 $self->msg('-- dumping ' . $file->stringify, 2);
597 my %ds = $object->get_columns;
599 my $formatter= $object->result_source->schema->storage->datetime_parser;
600 # mess with dates if specified
601 if ($set->{datetime_relative}) {
602 unless ($@ || !$formatter) {
604 if ($set->{datetime_relative} eq 'today') {
605 $dt = DateTime->today;
607 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
610 while (my ($col, $value) = each %ds) {
611 my $col_info = $object->result_source->column_info($col);
614 && $col_info->{_inflate_info}
615 && uc($col_info->{data_type}) eq 'DATETIME';
617 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
620 warn "datetime_relative not supported for this db driver at the moment";
624 # do the actual dumping
625 my $serialized = Dump(\%ds)->Out();
626 write_file($file->stringify, $serialized);
627 my $mode = 0777; chmod $mode, $file->stringify;
630 # don't bother looking at rels unless we are actually planning to dump at least one type
631 return unless ($set->{might_have}->{fetch} || $set->{belongs_to}->{fetch} || $set->{has_many}->{fetch} || $set->{fetch});
633 # dump rels of object
634 my $s = $object->result_source;
636 foreach my $name (sort $s->relationships) {
637 my $info = $s->relationship_info($name);
638 my $r_source = $s->related_source($name);
639 # if belongs_to or might_have with might_have param set or has_many with has_many param set then
640 if (($info->{attrs}{accessor} eq 'single' && (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))) || $info->{attrs}{accessor} eq 'filter' || ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))) {
641 my $related_rs = $object->related_resultset($name);
642 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
643 # these parts of the rule only apply to has_many rels
644 if ($rule && $info->{attrs}{accessor} eq 'multi') {
645 $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond});
646 $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all');
647 $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by});
649 if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) {
650 $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} });
652 my %c_params = %{$params};
654 my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs;
655 $c_params{set} = \%mock_set;
656 # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch});
657 $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch});
658 # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch});
659 $self->dump_object($_, \%c_params) foreach $related_rs->all;
664 return unless $set && $set->{fetch};
665 foreach my $fetch (@{$set->{fetch}}) {
667 $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
668 my $related_rs = $object->related_resultset($fetch->{rel});
669 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
671 my $info = $object->result_source->relationship_info($fetch->{rel});
672 if ($info->{attrs}{accessor} eq 'multi') {
673 $fetch = merge( $fetch, $rule );
674 } elsif ($rule->{fetch}) {
675 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
678 die "relationship " . $fetch->{rel} . " does not exist for " . $s->source_name unless ($related_rs);
679 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
680 # if value starts with / assume it's meant to be passed as a scalar ref to dbic
681 # ideally this would substitute deeply
682 $fetch->{cond} = { map { $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} };
684 $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} }) if ($fetch->{cond});
685 $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} }) if ($fetch->{quantity} && $fetch->{quantity} ne 'all');
686 $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} }) if ($fetch->{order_by});
687 $self->dump_object($_, { %{$params}, set => $fetch }) foreach $related_rs->all;
691 sub _generate_schema {
693 my $params = shift || {};
695 $self->msg("\ncreating schema");
696 # die 'must pass version param to generate_schema_from_ddl' unless $params->{version};
698 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
699 eval "require $schema_class";
703 my $connection_details = $params->{connection_details};
704 $namespace_counter++;
705 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
706 Class::C3::Componentised->inject_base( $namespace => $schema_class );
707 $pre_schema = $namespace->connect(@{$connection_details});
708 unless( $pre_schema ) {
709 return DBIx::Class::Exception->throw('connection details not valid');
711 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
712 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
713 my $dbh = $pre_schema->storage->dbh;
716 $self->msg("- clearing DB of existing tables");
717 eval { $dbh->do('SET foreign_key_checks=0') };
718 foreach my $table (@tables) {
719 eval { $dbh->do('drop table ' . $table . ($params->{cascade} ? ' cascade' : '') ) };
722 # import new ddl file to db
723 my $ddl_file = $params->{ddl};
724 $self->msg("- deploying schema using $ddl_file");
725 my $data = _read_sql($ddl_file);
727 eval { $dbh->do($_) or warn "SQL was:\n $_"};
728 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
730 $self->msg("- finished importing DDL into DB");
732 # load schema object from our new DB
733 $namespace_counter++;
734 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
735 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
736 my $schema = $namespace2->connect(@{$connection_details});
741 my $ddl_file = shift;
743 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
744 my @data = split(/\n/, join('', <$fh>));
745 @data = grep(!/^--/, @data);
746 @data = split(/;/, join('', @data));
748 @data = grep { $_ && $_ !~ /^-- / } @data;
756 =item Arguments: \%$attrs
758 =item Return Value: 1
762 $fixtures->populate({
763 directory => '/home/me/app/fixtures', # directory to look for fixtures in, as specified to dump
764 ddl => '/home/me/app/sql/ddl.sql', # DDL to deploy
765 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], # database to clear, deploy and then populate
766 post_ddl => '/home/me/app/sql/post_ddl.sql', # DDL to deploy after populating records, ie. FK constraints
767 cascade => 1, # use CASCADE option when dropping tables
770 In this case the database app_dev will be cleared of all tables, then the specified DDL deployed to it,
771 then finally all fixtures found in /home/me/app/fixtures will be added to it. populate will generate
772 its own DBIx::Class schema from the DDL rather than being passed one to use. This is better as
773 custom insert methods are avoided which can to get in the way. In some cases you might not
774 have a DDL, and so this method will eventually allow a $schema object to be passed instead.
776 If needed, you can specify a post_ddl attribute which is a DDL to be applied after all the fixtures
777 have been added to the database. A good use of this option would be to add foreign key constraints
778 since databases like Postgresql cannot disable foreign key checks.
780 If your tables have foreign key constraints you may want to use the cascade attribute which will
781 make the drop table functionality cascade, ie 'DROP TABLE $table CASCADE'.
783 directory, dll and connection_details are all required attributes.
790 unless (ref $params eq 'HASH') {
791 return DBIx::Class::Exception->throw('first arg to populate must be hash ref');
794 foreach my $param (qw/directory/) {
795 unless ($params->{$param}) {
796 return DBIx::Class::Exception->throw($param . ' param not specified');
799 my $fixture_dir = dir(delete $params->{directory});
800 unless (-e $fixture_dir) {
801 return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir);
806 if ($params->{ddl} && $params->{connection_details}) {
807 $ddl_file = file(delete $params->{ddl});
808 unless (-e $ddl_file) {
809 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
811 unless (ref $params->{connection_details} eq 'ARRAY') {
812 return DBIx::Class::Exception->throw('connection details must be an arrayref');
814 } elsif ($params->{schema}) {
815 return DBIx::Class::Exception->throw('passing a schema is not supported at the moment');
817 return DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
820 my $schema = $self->_generate_schema({ ddl => $ddl_file, connection_details => delete $params->{connection_details}, %{$params} });
821 $self->msg("\nimporting fixtures");
822 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
824 my $version_file = file($fixture_dir, '_dumper_version');
825 unless (-e $version_file) {
826 # return DBIx::Class::Exception->throw('no version file found');
829 if (-e $tmp_fixture_dir) {
830 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
831 $tmp_fixture_dir->rmtree;
833 $self->msg("- creating temp dir");
834 dircopy(dir($fixture_dir, $schema->source($_)->from), dir($tmp_fixture_dir, $schema->source($_)->from)) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources;
837 my $formatter= $schema->storage->datetime_parser;
838 unless ($@ || !$formatter) {
840 if ($params->{datetime_relative_to}) {
841 $callbacks{'DateTime::Duration'} = sub {
842 $params->{datetime_relative_to}->clone->add_duration($_);
845 $callbacks{'DateTime::Duration'} = sub {
846 $formatter->format_datetime(DateTime->today->add_duration($_))
849 $callbacks{object} ||= "visit_ref";
850 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
853 my $db = $schema->storage->dbh->{Driver}->{Name};
854 my $dbi_class = "DBIx::Class::Fixtures::DBI::$db";
856 eval "require $dbi_class";
858 $dbi_class = "DBIx::Class::Fixtures::DBI";
859 eval "require $dbi_class";
863 $dbi_class->do_insert($schema, sub {
864 foreach my $source (sort $schema->sources) {
865 $self->msg("- adding " . $source);
866 my $rs = $schema->resultset($source);
867 my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from));
868 next unless (-e $source_dir);
870 while (my $file = $source_dir->next) {
871 next unless ($file =~ /\.fix$/);
872 next if $file->is_dir;
873 my $contents = $file->slurp;
876 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
879 $rs->populate(\@rows) if (scalar(@rows));
883 $self->do_post_ddl({schema=>$schema, post_ddl=>$params->{post_ddl}}) if $params->{post_ddl};
885 $self->msg("- fixtures imported");
886 $self->msg("- cleaning up");
887 $tmp_fixture_dir->rmtree;
888 eval { $schema->storage->dbh->do('SET foreign_key_checks=1') };
894 my ($self, $params) = @_;
896 my $schema = $params->{schema};
897 my $data = _read_sql($params->{post_ddl});
899 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
900 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
902 $self->msg("- finished importing post-populate DDL into DB");
907 my $subject = shift || return;
908 my $level = shift || 1;
909 return unless $self->debug >= $level;
911 print Dumper($subject);
913 print $subject . "\n";
919 Luke Saunders <luke@shadowcatsystems.co.uk>
921 Initial development sponsored by and (c) Takkle, Inc. 2007
925 Ash Berlin <ash@shadowcatsystems.co.uk>
926 Matt S. Trout <mst@shadowcatsystems.co.uk>
927 Drew Taylor <taylor.andrew.j@gmail.com>
931 This library is free software under the same license as perl itself