1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file);
10 use Config::Any::JSON;
11 use Data::Dump::Streamer;
12 use Data::Visitor::Callback;
14 use File::Copy::Recursive qw/dircopy/;
15 use File::Copy qw/move/;
16 use Hash::Merge qw( merge );
18 use Class::C3::Componentised;
20 use base qw(Class::Accessor::Grouped);
22 our $namespace_counter = 0;
24 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
25 _inherited_attributes debug schema_class dumped_objects/);
33 our $VERSION = '1.001010';
41 use DBIx::Class::Fixtures;
45 my $fixtures = DBIx::Class::Fixtures->new({
46 config_dir => '/home/me/app/fixture_configs'
50 config => 'set_config.json',
51 schema => $source_dbic_schema,
52 directory => '/home/me/app/fixtures'
56 directory => '/home/me/app/fixtures',
57 ddl => '/home/me/app/sql/ddl.sql',
58 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
59 post_ddl => '/home/me/app/sql/post_ddl.sql',
64 Dump fixtures from source database to filesystem then import to another
65 database (with same schema) at any time. Use as a constant dataset for running
66 tests against or for populating development databases when impractical to use
67 production clones. Describe fixture set using relations and conditions based on
68 your DBIx::Class schema.
70 =head1 DEFINE YOUR FIXTURE SET
72 Fixture sets are currently defined in .json files which must reside in your
73 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
74 describe which data to pull and dump from the source database.
97 This will fetch artists with primary keys 1 and 3, the producer with primary
98 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
99 rel from Producer to Artist.
101 The top level attributes are as follows:
105 Sets must be an array of hashes, as in the example given above. Each set
106 defines a set of objects to be included in the fixtures. For details on valid
107 set attributes see L</SET ATTRIBUTES> below.
111 Rules place general conditions on classes. For example if whenever an artist
112 was dumped you also wanted all of their cds dumped too, then you could use a
113 rule to specify this. For example:
142 In this case all the cds of artists 1, 3 and all producer 5's artists will be
143 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
144 to CD. This is eqivalent to:
171 rules must be a hash keyed by class name.
177 To prevent repetition between configs you can include other configs. For
186 { "file": "base.json" }
190 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
191 which is the name of another config file in the same directory. The original
192 config is merged with its includes using L<Hash::Merge>.
194 =head2 datetime_relative
196 Only available for MySQL and PostgreSQL at the moment, must be a value that
197 DateTime::Format::* can parse. For example:
201 "class": "RecentItems",
204 "datetime_relative": "2007-10-30 00:00:00"
207 This will work when dumping from a MySQL database and will cause any datetime
208 fields (where datatype => 'datetime' in the column def of the schema class) to
209 be dumped as a DateTime::Duration object relative to the date specified in the
210 datetime_relative value. For example if the RecentItem object had a date field
211 set to 2007-10-25, then when the fixture is imported the field will be set to 5
212 days in the past relative to the current time.
216 Specifies whether to automatically dump might_have relationships. Should be a
217 hash with one attribute - fetch. Set fetch to 1 or 0.
220 "might_have": { "fetch": 1 },
233 Note: belongs_to rels are automatically dumped whether you like it or not, this
234 is to avoid FKs to nowhere when importing. General rules on has_many rels are
235 not accepted at this top level, but you can turn them on for individual sets -
236 see L</SET ATTRIBUTES>.
238 =head1 SET ATTRIBUTES
242 Required attribute. Specifies the DBIx::Class object class you wish to dump.
246 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
247 If the id is not in the source db then it just won't get dumped, no warnings or
252 Must be either an integer or the string 'all'. Specifying an integer will
253 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
254 will cause the rows attribute to be left off and for all matching rows to be
255 dumped. There's no randomising here, it's just the first x rows.
259 A hash specifying the conditions dumped objects must match. Essentially this is
260 a JSON representation of a DBIx::Class search clause. For example:
266 "cond": { "name": "Dave" }
270 This will dump all artists whose name is 'dave'. Essentially
271 $artist_rs->search({ name => 'Dave' })->all.
273 Sometimes in a search clause it's useful to use scalar refs to do things like:
275 $artist_rs->search({ no1_singles => \'> no1_albums' })
277 This could be specified in the cond hash like so:
283 "cond": { "no1_singles": "\> no1_albums" }
287 So if the value starts with a backslash the value is made a scalar ref before
288 being passed to search.
292 An array of relationships to be used in the cond clause.
298 "cond": { "cds.position": { ">": 4 } },
303 Fetch all artists who have cds with position greater than 4.
307 Must be an array of hashes. Specifies which rels to also dump. For example:
316 "cond": { "position": "2" }
321 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
323 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
324 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
325 as in the set attributes. quantity is necessary for has_many relationships, but
326 not if using for belongs_to or might_have relationships.
330 Specifies whether to fetch has_many rels for this set. Must be a hash
331 containing keys fetch and quantity.
333 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
336 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
340 As with has_many but for might_have relationships. Quantity doesn't do anything
343 This value will be inherited by all fetches in this set. This is not true for
344 the has_many attribute.
346 =head1 RULE ATTRIBUTES
350 Same as with L</SET ATTRIBUTES>
354 Same as with L</SET ATTRIBUTES>
358 Same as with L</SET ATTRIBUTES>
362 Same as with L</SET ATTRIBUTES>
366 Same as with L</SET ATTRIBUTES>
374 =item Arguments: \%$attrs
376 =item Return Value: $fixture_object
380 Returns a new DBIx::Class::Fixture object. %attrs can have the following
387 required. must contain a valid path to the directory in which your .json
392 determines whether to be verbose
394 =item ignore_sql_errors:
396 ignore errors on import of DDL etc
400 my $fixtures = DBIx::Class::Fixtures->new( {
401 config_dir => '/home/me/app/fixture_configs'
410 unless (ref $params eq 'HASH') {
411 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
414 unless ($params->{config_dir}) {
415 return DBIx::Class::Exception->throw('config_dir param not specified');
418 my $config_dir = dir($params->{config_dir});
419 unless (-e $params->{config_dir}) {
420 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
424 config_dir => $config_dir,
425 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
426 debug => $params->{debug} || 0,
427 ignore_sql_errors => $params->{ignore_sql_errors},
428 dumped_objects => {},
429 use_create => $params->{use_create} || 0
441 =item Arguments: \%$attrs
443 =item Return Value: 1
448 config => 'set_config.json', # config file to use. must be in the config
449 # directory specified in the constructor
450 schema => $source_dbic_schema,
451 directory => '/home/me/app/fixtures' # output directory
457 all => 1, # just dump everything that's in the schema
458 schema => $source_dbic_schema,
459 directory => '/home/me/app/fixtures' # output directory
462 In this case objects will be dumped to subdirectories in the specified
463 directory. For example:
465 /home/me/app/fixtures/artist/1.fix
466 /home/me/app/fixtures/artist/3.fix
467 /home/me/app/fixtures/producer/5.fix
469 schema and directory are required attributes. also, one of config or all must
472 Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
473 If this form is used your HashRef should conform to the structure rules defined
474 for the JSON representations.
482 unless (ref $params eq 'HASH') {
483 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
486 foreach my $param (qw/schema directory/) {
487 unless ($params->{$param}) {
488 return DBIx::Class::Exception->throw($param . ' param not specified');
492 if($params->{excludes} && !$params->{all}) {
493 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
496 my $schema = $params->{schema};
498 if ($params->{config}) {
499 my $config = ref $params->{config} eq 'HASH' ?
503 my $config_file = $self->config_dir->file($params->{config});
504 $self->load_config_file($config_file);
506 } elsif ($params->{all}) {
507 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
509 might_have => { fetch => 0 },
510 has_many => { fetch => 0 },
511 belongs_to => { fetch => 0 },
514 { class => $_, quantity => 'all' };
520 DBIx::Class::Exception->throw('must pass config or set all');
523 my $output_dir = dir($params->{directory});
524 unless (-e $output_dir) {
525 $output_dir->mkpath ||
526 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
529 $self->msg("generating fixtures");
530 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
532 if (-e $tmp_output_dir) {
533 $self->msg("- clearing existing $tmp_output_dir");
534 $tmp_output_dir->rmtree;
536 $self->msg("- creating $tmp_output_dir");
537 $tmp_output_dir->mkpath;
539 # write version file (for the potential benefit of populate)
540 $tmp_output_dir->file('_dumper_version')
544 $config->{rules} ||= {};
545 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
547 while ( my ($k,$v) = each %{ $config->{rules} } ) {
548 if ( my $source = eval { $schema->source($k) } ) {
549 $config->{rules}{$source->source_name} = $v;
553 foreach my $source (@sources) {
554 # apply rule to set if specified
555 my $rule = $config->{rules}->{$source->{class}};
556 $source = merge( $source, $rule ) if ($rule);
559 my $rs = $schema->resultset($source->{class});
561 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
562 # if value starts with \ assume it's meant to be passed as a scalar ref
563 # to dbic. ideally this would substitute deeply
566 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
567 : $source->{cond}->{$_}
568 } keys %{$source->{cond}}
572 $rs = $rs->search($source->{cond}, { join => $source->{join} })
575 $self->msg("- dumping $source->{class}");
577 my %source_options = ( set => { %{$config}, %{$source} } );
578 if ($source->{quantity}) {
579 $rs = $rs->search({}, { order_by => $source->{order_by} })
580 if $source->{order_by};
582 if ($source->{quantity} =~ /^\d+$/) {
583 $rs = $rs->search({}, { rows => $source->{quantity} });
584 } elsif ($source->{quantity} ne 'all') {
585 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
588 elsif ($source->{ids} && @{$source->{ids}}) {
589 my @ids = @{$source->{ids}};
590 my (@pks) = $rs->result_source->primary_columns;
591 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
592 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
595 DBIx::Class::Exception->throw('must specify either quantity or ids');
598 $source_options{set_dir} = $tmp_output_dir;
599 $self->dump_rs($rs, \%source_options );
602 # clear existing output dir
603 foreach my $child ($output_dir->children) {
604 if ($child->is_dir) {
605 next if ($child eq $tmp_output_dir);
606 if (grep { $_ =~ /\.fix/ } $child->children) {
609 } elsif ($child =~ /_dumper_version$/) {
614 $self->msg("- moving temp dir to $output_dir");
615 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
616 for $tmp_output_dir->children;
618 if (-e $output_dir) {
619 $self->msg("- clearing tmp dir $tmp_output_dir");
620 # delete existing fixture set
621 $tmp_output_dir->remove;
629 sub load_config_file {
630 my ($self, $config_file) = @_;
631 DBIx::Class::Exception->throw("config does not exist at $config_file")
632 unless -e $config_file;
634 my $config = Config::Any::JSON->load($config_file);
637 if (my $incs = $config->{includes}) {
639 DBIx::Class::Exception->throw(
640 'includes params of config must be an array ref of hashrefs'
641 ) unless ref $incs eq 'ARRAY';
643 foreach my $include_config (@$incs) {
644 DBIx::Class::Exception->throw(
645 'includes params of config must be an array ref of hashrefs'
646 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
648 my $include_file = $self->config_dir->file($include_config->{file});
650 DBIx::Class::Exception->throw("config does not exist at $include_file")
651 unless -e $include_file;
653 my $include = Config::Any::JSON->load($include_file);
654 $self->msg($include);
655 $config = merge( $config, $include );
657 delete $config->{includes};
661 return DBIx::Class::Exception->throw('config has no sets')
662 unless $config && $config->{sets} &&
663 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
665 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
666 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
667 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
673 my ($self, $rs, $params) = @_;
675 while (my $row = $rs->next) {
676 $self->dump_object($row, $params);
681 my ($self, $object, $params) = @_;
682 my $set = $params->{set};
683 die 'no dir passed to dump_object' unless $params->{set_dir};
684 die 'no object passed to dump_object' unless $object;
686 my @inherited_attrs = @{$self->_inherited_attributes};
689 $object->get_column($_)
690 } $object->primary_columns;
692 my $key = join("\0", @pk_vals);
694 my $src = $object->result_source;
695 my $exists = $self->dumped_objects->{$src->name}{$key}++;
698 # write dir and gen filename
699 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
700 $source_dir->mkpath(0, 0777);
702 # strip dir separators from file name
703 my $file = $source_dir->file(
704 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
710 $self->msg('-- dumping ' . $file->stringify, 2);
711 my %ds = $object->get_columns;
713 # mess with dates if specified
714 if ($set->{datetime_relative}) {
715 my $formatter= $object->result_source->schema->storage->datetime_parser;
716 unless ($@ || !$formatter) {
718 if ($set->{datetime_relative} eq 'today') {
719 $dt = DateTime->today;
721 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
724 while (my ($col, $value) = each %ds) {
725 my $col_info = $object->result_source->column_info($col);
728 && $col_info->{_inflate_info}
729 && uc($col_info->{data_type}) eq 'DATETIME';
731 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
734 warn "datetime_relative not supported for this db driver at the moment";
738 # do the actual dumping
739 my $serialized = Dump(\%ds)->Out();
740 $file->openw->print($serialized);
743 # don't bother looking at rels unless we are actually planning to dump at least one type
744 my ($might_have, $belongs_to, $has_many) = map {
745 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
746 } qw/might_have belongs_to has_many/;
748 return unless $might_have
753 # dump rels of object
755 foreach my $name (sort $src->relationships) {
756 my $info = $src->relationship_info($name);
757 my $r_source = $src->related_source($name);
758 # if belongs_to or might_have with might_have param set or has_many with
759 # has_many param set then
761 ( $info->{attrs}{accessor} eq 'single' &&
762 (!$info->{attrs}{join_type} || $might_have)
764 || $info->{attrs}{accessor} eq 'filter'
766 ($info->{attrs}{accessor} eq 'multi' && $has_many)
768 my $related_rs = $object->related_resultset($name);
769 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
770 # these parts of the rule only apply to has_many rels
771 if ($rule && $info->{attrs}{accessor} eq 'multi') {
772 $related_rs = $related_rs->search(
774 { join => $rule->{join} }
775 ) if ($rule->{cond});
777 $related_rs = $related_rs->search(
779 { rows => $rule->{quantity} }
780 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
782 $related_rs = $related_rs->search(
784 { order_by => $rule->{order_by} }
785 ) if ($rule->{order_by});
788 if ($set->{has_many}{quantity} &&
789 $set->{has_many}{quantity} =~ /^\d+$/) {
790 $related_rs = $related_rs->search(
792 { rows => $set->{has_many}->{quantity} }
796 my %c_params = %{$params};
800 } grep { $set->{$_} } @inherited_attrs;
802 $c_params{set} = \%mock_set;
803 $c_params{set} = merge( $c_params{set}, $rule)
804 if $rule && $rule->{fetch};
806 $self->dump_rs($related_rs, \%c_params);
811 return unless $set && $set->{fetch};
812 foreach my $fetch (@{$set->{fetch}}) {
814 $fetch->{$_} = $set->{$_} foreach
815 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
816 my $related_rs = $object->related_resultset($fetch->{rel});
817 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
820 my $info = $object->result_source->relationship_info($fetch->{rel});
821 if ($info->{attrs}{accessor} eq 'multi') {
822 $fetch = merge( $fetch, $rule );
823 } elsif ($rule->{fetch}) {
824 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
828 die "relationship $fetch->{rel} does not exist for " . $src->source_name
829 unless ($related_rs);
831 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
832 # if value starts with \ assume it's meant to be passed as a scalar ref
833 # to dbic. ideally this would substitute deeply
834 $fetch->{cond} = { map {
835 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
836 : $fetch->{cond}->{$_}
837 } keys %{$fetch->{cond}} };
840 $related_rs = $related_rs->search(
842 { join => $fetch->{join} }
845 $related_rs = $related_rs->search(
847 { rows => $fetch->{quantity} }
848 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
849 $related_rs = $related_rs->search(
851 { order_by => $fetch->{order_by} }
852 ) if $fetch->{order_by};
854 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
858 sub _generate_schema {
860 my $params = shift || {};
862 $self->msg("\ncreating schema");
864 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
865 eval "require $schema_class";
869 my $connection_details = $params->{connection_details};
871 $namespace_counter++;
873 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
874 Class::C3::Componentised->inject_base( $namespace => $schema_class );
876 $pre_schema = $namespace->connect(@{$connection_details});
877 unless( $pre_schema ) {
878 return DBIx::Class::Exception->throw('connection details not valid');
880 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
881 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
882 my $dbh = $pre_schema->storage->dbh;
885 $self->msg("- clearing DB of existing tables");
886 $pre_schema->storage->txn_do(sub {
887 $pre_schema->storage->with_deferred_fk_checks(sub {
888 foreach my $table (@tables) {
890 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
896 # import new ddl file to db
897 my $ddl_file = $params->{ddl};
898 $self->msg("- deploying schema using $ddl_file");
899 my $data = _read_sql($ddl_file);
901 eval { $dbh->do($_) or warn "SQL was:\n $_"};
902 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
904 $self->msg("- finished importing DDL into DB");
906 # load schema object from our new DB
907 $namespace_counter++;
908 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
909 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
910 my $schema = $namespace2->connect(@{$connection_details});
915 my $ddl_file = shift;
917 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
918 my @data = split(/\n/, join('', <$fh>));
919 @data = grep(!/^--/, @data);
920 @data = split(/;/, join('', @data));
922 @data = grep { $_ && $_ !~ /^-- / } @data;
930 =item Arguments: \%$attrs
932 =item Return Value: 1
936 $fixtures->populate( {
937 # directory to look for fixtures in, as specified to dump
938 directory => '/home/me/app/fixtures',
941 ddl => '/home/me/app/sql/ddl.sql',
943 # database to clear, deploy and then populate
944 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
946 # DDL to deploy after populating records, ie. FK constraints
947 post_ddl => '/home/me/app/sql/post_ddl.sql',
949 # use CASCADE option when dropping tables
952 # optional, set to 1 to run ddl but not populate
955 # optional, set to 1 to run each fixture through ->create rather than have
956 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
957 # that effects the value of column(s).
960 # Dont try to clean the database, just populate over whats there. Requires
961 # schema option. Use this if you want to handle removing old data yourself
966 In this case the database app_dev will be cleared of all tables, then the
967 specified DDL deployed to it, then finally all fixtures found in
968 /home/me/app/fixtures will be added to it. populate will generate its own
969 DBIx::Class schema from the DDL rather than being passed one to use. This is
970 better as custom insert methods are avoided which can to get in the way. In
971 some cases you might not have a DDL, and so this method will eventually allow a
972 $schema object to be passed instead.
974 If needed, you can specify a post_ddl attribute which is a DDL to be applied
975 after all the fixtures have been added to the database. A good use of this
976 option would be to add foreign key constraints since databases like Postgresql
977 cannot disable foreign key checks.
979 If your tables have foreign key constraints you may want to use the cascade
980 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
983 C<directory> is a required attribute.
985 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
986 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
989 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
990 attribute containing the connected schema you wish to operate on and set the
991 C<no_deploy> attribute.
998 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
999 unless ref $params eq 'HASH';
1001 DBIx::Class::Exception->throw('directory param not specified')
1002 unless $params->{directory};
1004 my $fixture_dir = dir(delete $params->{directory});
1005 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1006 unless -d $fixture_dir;
1011 if ($params->{ddl} && $params->{connection_details}) {
1012 $ddl_file = file(delete $params->{ddl});
1013 unless (-e $ddl_file) {
1014 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1016 unless (ref $params->{connection_details} eq 'ARRAY') {
1017 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1019 $schema = $self->_generate_schema({
1021 connection_details => delete $params->{connection_details},
1024 } elsif ($params->{schema} && $params->{no_deploy}) {
1025 $schema = $params->{schema};
1027 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1031 return 1 if $params->{no_populate};
1033 $self->msg("\nimporting fixtures");
1034 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
1035 my $version_file = file($fixture_dir, '_dumper_version');
1036 # DBIx::Class::Exception->throw('no version file found');
1037 # unless -e $version_file;
1039 if (-e $tmp_fixture_dir) {
1040 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1041 $tmp_fixture_dir->rmtree;
1043 $self->msg("- creating temp dir");
1044 $tmp_fixture_dir->mkpath();
1045 for ( map { $schema->source($_)->from } $schema->sources) {
1046 my $from_dir = $fixture_dir->subdir($_);
1047 next unless -e $from_dir;
1048 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1051 unless (-d $tmp_fixture_dir) {
1052 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1056 my $formatter = $schema->storage->datetime_parser;
1057 unless ($@ || !$formatter) {
1059 if ($params->{datetime_relative_to}) {
1060 $callbacks{'DateTime::Duration'} = sub {
1061 $params->{datetime_relative_to}->clone->add_duration($_);
1064 $callbacks{'DateTime::Duration'} = sub {
1065 $formatter->format_datetime(DateTime->today->add_duration($_))
1068 $callbacks{object} ||= "visit_ref";
1069 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1072 $schema->storage->txn_do(sub {
1073 $schema->storage->with_deferred_fk_checks(sub {
1074 foreach my $source (sort $schema->sources) {
1075 $self->msg("- adding " . $source);
1076 my $rs = $schema->resultset($source);
1077 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1078 next unless (-e $source_dir);
1080 while (my $file = $source_dir->next) {
1081 next unless ($file =~ /\.fix$/);
1082 next if $file->is_dir;
1083 my $contents = $file->slurp;
1086 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1087 if ( $params->{use_create} ) {
1088 $rs->create( $HASH1 );
1090 push(@rows, $HASH1);
1093 $rs->populate(\@rows) if scalar(@rows);
1097 $self->do_post_ddl( {
1099 post_ddl=>$params->{post_ddl}
1100 } ) if $params->{post_ddl};
1102 $self->msg("- fixtures imported");
1103 $self->msg("- cleaning up");
1104 $tmp_fixture_dir->rmtree;
1109 my ($self, $params) = @_;
1111 my $schema = $params->{schema};
1112 my $data = _read_sql($params->{post_ddl});
1114 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1115 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1117 $self->msg("- finished importing post-populate DDL into DB");
1122 my $subject = shift || return;
1123 my $level = shift || 1;
1124 return unless $self->debug >= $level;
1126 print Dumper($subject);
1128 print $subject . "\n";
1134 Luke Saunders <luke@shadowcatsystems.co.uk>
1136 Initial development sponsored by and (c) Takkle, Inc. 2007
1140 Ash Berlin <ash@shadowcatsystems.co.uk>
1142 Matt S. Trout <mst@shadowcatsystems.co.uk>
1144 Drew Taylor <taylor.andrew.j@gmail.com>
1146 Frank Switalski <fswitalski@gmail.com>
1150 This library is free software under the same license as perl itself