1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08099_07;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file);
10 use Config::Any::JSON;
11 use Data::Dump::Streamer;
12 use Data::Visitor::Callback;
14 use File::Copy::Recursive qw/dircopy/;
15 use File::Copy qw/move/;
16 use Hash::Merge qw( merge );
18 use Class::C3::Componentised;
20 use base qw(Class::Accessor::Grouped);
22 our $namespace_counter = 0;
24 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
25 _inherited_attributes debug schema_class dumped_objects/);
33 our $VERSION = '1.001002';
41 use DBIx::Class::Fixtures;
45 my $fixtures = DBIx::Class::Fixtures->new({
46 config_dir => '/home/me/app/fixture_configs'
50 config => 'set_config.json',
51 schema => $source_dbic_schema,
52 directory => '/home/me/app/fixtures'
56 directory => '/home/me/app/fixtures',
57 ddl => '/home/me/app/sql/ddl.sql',
58 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
59 post_ddl => '/home/me/app/sql/post_ddl.sql',
64 Dump fixtures from source database to filesystem then import to another
65 database (with same schema) at any time. Use as a constant dataset for running
66 tests against or for populating development databases when impractical to use
67 production clones. Describe fixture set using relations and conditions based on
68 your DBIx::Class schema.
70 =head1 DEFINE YOUR FIXTURE SET
72 Fixture sets are currently defined in .json files which must reside in your
73 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
74 describe which data to pull and dump from the source database.
97 This will fetch artists with primary keys 1 and 3, the producer with primary
98 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
99 rel from Producer to Artist.
101 The top level attributes are as follows:
105 Sets must be an array of hashes, as in the example given above. Each set
106 defines a set of objects to be included in the fixtures. For details on valid
107 set attributes see L</SET ATTRIBUTES> below.
111 Rules place general conditions on classes. For example if whenever an artist
112 was dumped you also wanted all of their cds dumped too, then you could use a
113 rule to specify this. For example:
142 In this case all the cds of artists 1, 3 and all producer 5's artists will be
143 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
144 to CD. This is eqivalent to:
171 rules must be a hash keyed by class name.
177 To prevent repetition between configs you can include other configs. For
186 { file: 'base.json' }
190 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
191 which is the name of another config file in the same directory. The original
192 config is merged with its includes using L<Hash::Merge>.
194 =head2 datetime_relative
196 Only available for MySQL and PostgreSQL at the moment, must be a value that
197 DateTime::Format::* can parse. For example:
201 class: 'RecentItems',
204 datetime_relative : "2007-10-30 00:00:00"
207 This will work when dumping from a MySQL database and will cause any datetime
208 fields (where datatype => 'datetime' in the column def of the schema class) to
209 be dumped as a DateTime::Duration object relative to the date specified in the
210 datetime_relative value. For example if the RecentItem object had a date field
211 set to 2007-10-25, then when the fixture is imported the field will be set to 5
212 days in the past relative to the current time.
216 Specifies whether to automatically dump might_have relationships. Should be a
217 hash with one attribute - fetch. Set fetch to 1 or 0.
220 might_have: { fetch: 1 },
233 Note: belongs_to rels are automatically dumped whether you like it or not, this
234 is to avoid FKs to nowhere when importing. General rules on has_many rels are
235 not accepted at this top level, but you can turn them on for individual sets -
236 see L</SET ATTRIBUTES>.
238 =head1 SET ATTRIBUTES
242 Required attribute. Specifies the DBIx::Class object class you wish to dump.
246 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
247 If the id is not in the source db then it just won't get dumped, no warnings or
252 Must be either an integer or the string 'all'. Specifying an integer will
253 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
254 will cause the rows attribute to be left off and for all matching rows to be
255 dumped. There's no randomising here, it's just the first x rows.
259 A hash specifying the conditions dumped objects must match. Essentially this is
260 a JSON representation of a DBIx::Class search clause. For example:
266 cond: { name: 'Dave' }
270 This will dump all artists whose name is 'dave'. Essentially
271 $artist_rs->search({ name => 'Dave' })->all.
273 Sometimes in a search clause it's useful to use scalar refs to do things like:
275 $artist_rs->search({ no1_singles => \'> no1_albums' })
277 This could be specified in the cond hash like so:
283 cond: { no1_singles: '\> no1_albums' }
287 So if the value starts with a backslash the value is made a scalar ref before
288 being passed to search.
292 An array of relationships to be used in the cond clause.
298 cond: { 'cds.position': { '>': 4 } },
303 Fetch all artists who have cds with position greater than 4.
307 Must be an array of hashes. Specifies which rels to also dump. For example:
316 cond: { position: '2' }
321 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
323 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
324 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
325 as in the set attributes. quantity is necessary for has_many relationships, but
326 not if using for belongs_to or might_have relationships.
330 Specifies whether to fetch has_many rels for this set. Must be a hash
331 containing keys fetch and quantity.
333 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
336 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
340 As with has_many but for might_have relationships. Quantity doesn't do anything
343 This value will be inherited by all fetches in this set. This is not true for
344 the has_many attribute.
346 =head1 RULE ATTRIBUTES
350 Same as with L</SET ATTRIBUTES>
354 Same as with L</SET ATTRIBUTES>
358 Same as with L</SET ATTRIBUTES>
362 Same as with L</SET ATTRIBUTES>
366 Same as with L</SET ATTRIBUTES>
374 =item Arguments: \%$attrs
376 =item Return Value: $fixture_object
380 Returns a new DBIx::Class::Fixture object. %attrs can have the following
387 required. must contain a valid path to the directory in which your .json
392 determines whether to be verbose
394 =item ignore_sql_errors:
396 ignore errors on import of DDL etc
400 my $fixtures = DBIx::Class::Fixtures->new( {
401 config_dir => '/home/me/app/fixture_configs'
410 unless (ref $params eq 'HASH') {
411 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
414 unless ($params->{config_dir}) {
415 return DBIx::Class::Exception->throw('config_dir param not specified');
418 my $config_dir = dir($params->{config_dir});
419 unless (-e $params->{config_dir}) {
420 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
424 config_dir => $config_dir,
425 _inherited_attributes => [qw/datetime_relative might_have rules/],
426 debug => $params->{debug} || 0,
427 ignore_sql_errors => $params->{ignore_sql_errors}
432 $self->dumped_objects({});
441 =item Arguments: \%$attrs
443 =item Return Value: 1
448 config => 'set_config.json', # config file to use. must be in the config
449 # directory specified in the constructor
450 schema => $source_dbic_schema,
451 directory => '/home/me/app/fixtures' # output directory
457 all => 1, # just dump everything that's in the schema
458 schema => $source_dbic_schema,
459 directory => '/home/me/app/fixtures' # output directory
462 In this case objects will be dumped to subdirectories in the specified
463 directory. For example:
465 /home/me/app/fixtures/artist/1.fix
466 /home/me/app/fixtures/artist/3.fix
467 /home/me/app/fixtures/producer/5.fix
469 schema and directory are required attributes. also, one of config or all must be specified.
477 unless (ref $params eq 'HASH') {
478 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
481 foreach my $param (qw/schema directory/) {
482 unless ($params->{$param}) {
483 return DBIx::Class::Exception->throw($param . ' param not specified');
487 my $schema = $params->{schema};
489 if ($params->{config}) {
491 my $config_file = $self->config_dir->file($params->{config});
492 $config = $self->load_config_file($config_file);
493 } elsif ($params->{all}) {
495 might_have => { fetch => 0 },
496 has_many => { fetch => 0 },
497 belongs_to => { fetch => 0 },
498 sets => [map {{ class => $_, quantity => 'all' }} $schema->sources]
501 DBIx::Class::Exception->throw('must pass config or set all');
504 my $output_dir = dir($params->{directory});
505 unless (-e $output_dir) {
506 $output_dir->mkpath ||
507 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
510 $self->msg("generating fixtures");
511 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
513 if (-e $tmp_output_dir) {
514 $self->msg("- clearing existing $tmp_output_dir");
515 $tmp_output_dir->rmtree;
517 $self->msg("- creating $tmp_output_dir");
518 $tmp_output_dir->mkpath;
520 # write version file (for the potential benefit of populate)
521 $tmp_output_dir->file('_dumper_version')
525 $config->{rules} ||= {};
526 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
528 foreach my $source (@sources) {
529 # apply rule to set if specified
530 my $rule = $config->{rules}->{$source->{class}};
531 $source = merge( $source, $rule ) if ($rule);
534 my $rs = $schema->resultset($source->{class});
536 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
537 # if value starts with \ assume it's meant to be passed as a scalar ref to dbic
538 # ideally this would substitute deeply
539 $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} };
542 $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond});
543 $self->msg("- dumping $source->{class}");
544 my %source_options = ( set => { %{$config}, %{$source} } );
545 if ($source->{quantity}) {
546 $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by});
547 if ($source->{quantity} =~ /^\d+$/) {
548 $rs = $rs->search({}, { rows => $source->{quantity} });
549 } elsif ($source->{quantity} ne 'all') {
550 DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity});
553 elsif ($source->{ids} && @{$source->{ids}}) {
554 my @ids = @{$source->{ids}};
555 my (@pks) = $rs->result_source->primary_columns;
556 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
557 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
560 DBIx::Class::Exception->throw('must specify either quantity or ids');
563 $source_options{set_dir} = $tmp_output_dir;
564 $self->dump_rs($rs, \%source_options );
567 # clear existing output dir
568 foreach my $child ($output_dir->children) {
569 if ($child->is_dir) {
570 next if ($child eq $tmp_output_dir);
571 if (grep { $_ =~ /\.fix/ } $child->children) {
574 } elsif ($child =~ /_dumper_version$/) {
579 $self->msg("- moving temp dir to $output_dir");
580 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
581 for $tmp_output_dir->children;
583 if (-e $output_dir) {
584 $self->msg("- clearing tmp dir $tmp_output_dir");
585 # delete existing fixture set
586 $tmp_output_dir->remove;
594 sub load_config_file {
595 my ($self, $config_file) = @_;
596 DBIx::Class::Exception->throw("config does not exist at $config_file")
597 unless -e $config_file;
599 my $config = Config::Any::JSON->load($config_file);
602 if (my $incs = $config->{includes}) {
604 DBIx::Class::Exception->throw(
605 'includes params of config must be an array ref of hashrefs'
606 ) unless ref $incs eq 'ARRAY';
608 foreach my $include_config (@$incs) {
609 DBIx::Class::Exception->throw(
610 'includes params of config must be an array ref of hashrefs'
611 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
613 my $include_file = $self->config_dir->file($include_config->{file});
615 DBIx::Class::Exception->throw("config does not exist at $include_file")
616 unless -e $include_file;
618 my $include = Config::Any::JSON->load($include_file);
619 $self->msg($include);
620 $config = merge( $config, $include );
622 delete $config->{includes};
626 return DBIx::Class::Exception->throw('config has no sets')
627 unless $config && $config->{sets} &&
628 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
630 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
631 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
632 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
638 my ($self, $rs, $params) = @_;
640 while (my $row = $rs->next) {
641 $self->dump_object($row, $params);
646 my ($self, $object, $params) = @_;
647 my $set = $params->{set};
648 die 'no dir passed to dump_object' unless $params->{set_dir};
649 die 'no object passed to dump_object' unless $object;
651 my @inherited_attrs = @{$self->_inherited_attributes};
654 $object->get_column($_)
655 } $object->primary_columns;
657 my $key = join("\0", @pk_vals);
659 my $src = $object->result_source;
660 my $exists = $self->dumped_objects->{$src->name}{$key}++;
663 # write dir and gen filename
664 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
665 $source_dir->mkpath(0, 0777);
667 # strip dir separators from file name
668 my $file = $source_dir->file(join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix');
673 $self->msg('-- dumping ' . $file->stringify, 2);
674 my %ds = $object->get_columns;
676 # mess with dates if specified
677 if ($set->{datetime_relative}) {
678 my $formatter= $object->result_source->schema->storage->datetime_parser;
679 unless ($@ || !$formatter) {
681 if ($set->{datetime_relative} eq 'today') {
682 $dt = DateTime->today;
684 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
687 while (my ($col, $value) = each %ds) {
688 my $col_info = $object->result_source->column_info($col);
691 && $col_info->{_inflate_info}
692 && uc($col_info->{data_type}) eq 'DATETIME';
694 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
697 warn "datetime_relative not supported for this db driver at the moment";
701 # do the actual dumping
702 my $serialized = Dump(\%ds)->Out();
703 $file->openw->print($serialized);
706 # don't bother looking at rels unless we are actually planning to dump at least one type
707 return unless $set->{might_have}->{fetch}
708 || $set->{belongs_to}->{fetch}
709 || $set->{has_many}->{fetch}
712 # dump rels of object
714 foreach my $name (sort $src->relationships) {
715 my $info = $src->relationship_info($name);
716 my $r_source = $src->related_source($name);
717 # if belongs_to or might_have with might_have param set or has_many with has_many param set then
719 ( $info->{attrs}{accessor} eq 'single' &&
720 (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))
722 $info->{attrs}{accessor} eq 'filter' ||
723 ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))
725 my $related_rs = $object->related_resultset($name);
726 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
727 # these parts of the rule only apply to has_many rels
728 if ($rule && $info->{attrs}{accessor} eq 'multi') {
729 $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond});
730 $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all');
731 $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by});
733 if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) {
734 $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} });
736 my %c_params = %{$params};
738 my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs;
739 $c_params{set} = \%mock_set;
740 # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch});
741 $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch});
742 # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch});
743 $self->dump_rs($related_rs, \%c_params);
748 return unless $set && $set->{fetch};
749 foreach my $fetch (@{$set->{fetch}}) {
751 $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
752 my $related_rs = $object->related_resultset($fetch->{rel});
753 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
756 my $info = $object->result_source->relationship_info($fetch->{rel});
757 if ($info->{attrs}{accessor} eq 'multi') {
758 $fetch = merge( $fetch, $rule );
759 } elsif ($rule->{fetch}) {
760 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
764 die "relationship " . $fetch->{rel} . " does not exist for " . $src->source_name
765 unless ($related_rs);
767 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
768 # if value starts with \ assume it's meant to be passed as a scalar ref to dbic
769 # ideally this would substitute deeply
770 $fetch->{cond} = { map {
771 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
772 : $fetch->{cond}->{$_}
773 } keys %{$fetch->{cond}} };
776 $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} })
778 $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} })
779 if ($fetch->{quantity} && $fetch->{quantity} ne 'all');
780 $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} })
781 if ($fetch->{order_by});
783 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
787 sub _generate_schema {
789 my $params = shift || {};
791 $self->msg("\ncreating schema");
792 # die 'must pass version param to generate_schema_from_ddl' unless $params->{version};
794 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
795 eval "require $schema_class";
799 my $connection_details = $params->{connection_details};
801 $namespace_counter++;
803 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
804 Class::C3::Componentised->inject_base( $namespace => $schema_class );
806 $pre_schema = $namespace->connect(@{$connection_details});
807 unless( $pre_schema ) {
808 return DBIx::Class::Exception->throw('connection details not valid');
810 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
811 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
812 my $dbh = $pre_schema->storage->dbh;
815 $self->msg("- clearing DB of existing tables");
816 $pre_schema->storage->with_deferred_fk_checks(sub {
817 foreach my $table (@tables) {
819 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
824 # import new ddl file to db
825 my $ddl_file = $params->{ddl};
826 $self->msg("- deploying schema using $ddl_file");
827 my $data = _read_sql($ddl_file);
829 eval { $dbh->do($_) or warn "SQL was:\n $_"};
830 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
832 $self->msg("- finished importing DDL into DB");
834 # load schema object from our new DB
835 $namespace_counter++;
836 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter;
837 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
838 my $schema = $namespace2->connect(@{$connection_details});
843 my $ddl_file = shift;
845 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
846 my @data = split(/\n/, join('', <$fh>));
847 @data = grep(!/^--/, @data);
848 @data = split(/;/, join('', @data));
850 @data = grep { $_ && $_ !~ /^-- / } @data;
858 =item Arguments: \%$attrs
860 =item Return Value: 1
864 $fixtures->populate( {
865 # directory to look for fixtures in, as specified to dump
866 directory => '/home/me/app/fixtures',
869 ddl => '/home/me/app/sql/ddl.sql',
871 # database to clear, deploy and then populate
872 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
874 # DDL to deploy after populating records, ie. FK constraints
875 post_ddl => '/home/me/app/sql/post_ddl.sql',
877 # use CASCADE option when dropping tables
880 # optional, set to 1 to run ddl but not populate
883 # Dont try to clean the database, just populate over whats there. Requires
884 # schema option. Use this if you want to handle removing old data yourself
889 In this case the database app_dev will be cleared of all tables, then the
890 specified DDL deployed to it, then finally all fixtures found in
891 /home/me/app/fixtures will be added to it. populate will generate its own
892 DBIx::Class schema from the DDL rather than being passed one to use. This is
893 better as custom insert methods are avoided which can to get in the way. In
894 some cases you might not have a DDL, and so this method will eventually allow a
895 $schema object to be passed instead.
897 If needed, you can specify a post_ddl attribute which is a DDL to be applied
898 after all the fixtures have been added to the database. A good use of this
899 option would be to add foreign key constraints since databases like Postgresql
900 cannot disable foreign key checks.
902 If your tables have foreign key constraints you may want to use the cascade
903 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
906 C<directory> is a required attribute.
908 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
909 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
912 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
913 attribute containing the connected schema you wish to operate on and set the
914 C<no_deploy> attribute.
921 unless (ref $params eq 'HASH') {
922 return DBIx::Class::Exception->throw('first arg to populate must be hash ref');
925 foreach my $param (qw/directory/) {
926 unless ($params->{$param}) {
927 return DBIx::Class::Exception->throw($param . ' param not specified');
930 my $fixture_dir = dir(delete $params->{directory});
931 unless (-e $fixture_dir) {
932 return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir);
938 if ($params->{ddl} && $params->{connection_details}) {
939 $ddl_file = file(delete $params->{ddl});
940 unless (-e $ddl_file) {
941 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
943 unless (ref $params->{connection_details} eq 'ARRAY') {
944 return DBIx::Class::Exception->throw('connection details must be an arrayref');
946 $schema = $self->_generate_schema({
948 connection_details => delete $params->{connection_details},
951 } elsif ($params->{schema} && $params->{no_deploy}) {
952 $schema = $params->{schema};
954 return DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
958 return 1 if $params->{no_populate};
960 $self->msg("\nimporting fixtures");
961 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
963 my $version_file = file($fixture_dir, '_dumper_version');
964 unless (-e $version_file) {
965 # return DBIx::Class::Exception->throw('no version file found');
968 if (-e $tmp_fixture_dir) {
969 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
970 $tmp_fixture_dir->rmtree;
972 $self->msg("- creating temp dir");
974 dir($fixture_dir, $schema->source($_)->from),
975 dir($tmp_fixture_dir, $schema->source($_)->from)
976 ) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources;
978 unless (-d $tmp_fixture_dir) {
979 return DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
983 my $formatter= $schema->storage->datetime_parser;
984 unless ($@ || !$formatter) {
986 if ($params->{datetime_relative_to}) {
987 $callbacks{'DateTime::Duration'} = sub {
988 $params->{datetime_relative_to}->clone->add_duration($_);
991 $callbacks{'DateTime::Duration'} = sub {
992 $formatter->format_datetime(DateTime->today->add_duration($_))
995 $callbacks{object} ||= "visit_ref";
996 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
999 $schema->storage->with_deferred_fk_checks(sub {
1000 foreach my $source (sort $schema->sources) {
1001 $self->msg("- adding " . $source);
1002 my $rs = $schema->resultset($source);
1003 my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from));
1004 next unless (-e $source_dir);
1006 while (my $file = $source_dir->next) {
1007 next unless ($file =~ /\.fix$/);
1008 next if $file->is_dir;
1009 my $contents = $file->slurp;
1012 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1013 push(@rows, $HASH1);
1015 $rs->populate(\@rows) if (scalar(@rows));
1019 $self->do_post_ddl( {
1021 post_ddl=>$params->{post_ddl}
1022 } ) if $params->{post_ddl};
1024 $self->msg("- fixtures imported");
1025 $self->msg("- cleaning up");
1026 $tmp_fixture_dir->rmtree;
1031 my ($self, $params) = @_;
1033 my $schema = $params->{schema};
1034 my $data = _read_sql($params->{post_ddl});
1036 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1037 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1039 $self->msg("- finished importing post-populate DDL into DB");
1044 my $subject = shift || return;
1045 my $level = shift || 1;
1046 return unless $self->debug >= $level;
1048 print Dumper($subject);
1050 print $subject . "\n";
1056 Luke Saunders <luke@shadowcatsystems.co.uk>
1058 Initial development sponsored by and (c) Takkle, Inc. 2007
1062 Ash Berlin <ash@shadowcatsystems.co.uk>
1064 Matt S. Trout <mst@shadowcatsystems.co.uk>
1066 Drew Taylor <taylor.andrew.j@gmail.com>
1070 This library is free software under the same license as perl itself