1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file);
10 use Config::Any::JSON;
11 use Data::Dump::Streamer;
12 use Data::Visitor::Callback;
14 use File::Copy::Recursive qw/dircopy/;
15 use File::Copy qw/move/;
16 use Hash::Merge qw( merge );
18 use Class::C3::Componentised;
20 use base qw(Class::Accessor::Grouped);
22 our $namespace_counter = 0;
24 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
25 _inherited_attributes debug schema_class dumped_objects/);
33 our $VERSION = '1.001010';
41 use DBIx::Class::Fixtures;
45 my $fixtures = DBIx::Class::Fixtures->new({
46 config_dir => '/home/me/app/fixture_configs'
50 config => 'set_config.json',
51 schema => $source_dbic_schema,
52 directory => '/home/me/app/fixtures'
56 directory => '/home/me/app/fixtures',
57 ddl => '/home/me/app/sql/ddl.sql',
58 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
59 post_ddl => '/home/me/app/sql/post_ddl.sql',
64 Dump fixtures from source database to filesystem then import to another
65 database (with same schema) at any time. Use as a constant dataset for running
66 tests against or for populating development databases when impractical to use
67 production clones. Describe fixture set using relations and conditions based on
68 your DBIx::Class schema.
70 =head1 DEFINE YOUR FIXTURE SET
72 Fixture sets are currently defined in .json files which must reside in your
73 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
74 describe which data to pull and dump from the source database.
97 This will fetch artists with primary keys 1 and 3, the producer with primary
98 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
99 rel from Producer to Artist.
101 The top level attributes are as follows:
105 Sets must be an array of hashes, as in the example given above. Each set
106 defines a set of objects to be included in the fixtures. For details on valid
107 set attributes see L</SET ATTRIBUTES> below.
111 Rules place general conditions on classes. For example if whenever an artist
112 was dumped you also wanted all of their cds dumped too, then you could use a
113 rule to specify this. For example:
142 In this case all the cds of artists 1, 3 and all producer 5's artists will be
143 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
144 to CD. This is eqivalent to:
171 rules must be a hash keyed by class name.
177 To prevent repetition between configs you can include other configs. For
186 { "file": "base.json" }
190 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
191 which is the name of another config file in the same directory. The original
192 config is merged with its includes using L<Hash::Merge>.
194 =head2 datetime_relative
196 Only available for MySQL and PostgreSQL at the moment, must be a value that
197 DateTime::Format::* can parse. For example:
201 "class": "RecentItems",
204 "datetime_relative": "2007-10-30 00:00:00"
207 This will work when dumping from a MySQL database and will cause any datetime
208 fields (where datatype => 'datetime' in the column def of the schema class) to
209 be dumped as a DateTime::Duration object relative to the date specified in the
210 datetime_relative value. For example if the RecentItem object had a date field
211 set to 2007-10-25, then when the fixture is imported the field will be set to 5
212 days in the past relative to the current time.
216 Specifies whether to automatically dump might_have relationships. Should be a
217 hash with one attribute - fetch. Set fetch to 1 or 0.
220 "might_have": { "fetch": 1 },
233 Note: belongs_to rels are automatically dumped whether you like it or not, this
234 is to avoid FKs to nowhere when importing. General rules on has_many rels are
235 not accepted at this top level, but you can turn them on for individual sets -
236 see L</SET ATTRIBUTES>.
238 =head1 SET ATTRIBUTES
242 Required attribute. Specifies the DBIx::Class object class you wish to dump.
246 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
247 If the id is not in the source db then it just won't get dumped, no warnings or
252 Must be either an integer or the string 'all'. Specifying an integer will
253 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
254 will cause the rows attribute to be left off and for all matching rows to be
255 dumped. There's no randomising here, it's just the first x rows.
259 A hash specifying the conditions dumped objects must match. Essentially this is
260 a JSON representation of a DBIx::Class search clause. For example:
266 "cond": { "name": "Dave" }
270 This will dump all artists whose name is 'dave'. Essentially
271 $artist_rs->search({ name => 'Dave' })->all.
273 Sometimes in a search clause it's useful to use scalar refs to do things like:
275 $artist_rs->search({ no1_singles => \'> no1_albums' })
277 This could be specified in the cond hash like so:
283 "cond": { "no1_singles": "\> no1_albums" }
287 So if the value starts with a backslash the value is made a scalar ref before
288 being passed to search.
292 An array of relationships to be used in the cond clause.
298 "cond": { "cds.position": { ">": 4 } },
303 Fetch all artists who have cds with position greater than 4.
307 Must be an array of hashes. Specifies which rels to also dump. For example:
316 "cond": { "position": "2" }
321 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
323 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
324 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
325 as in the set attributes. quantity is necessary for has_many relationships, but
326 not if using for belongs_to or might_have relationships.
330 Specifies whether to fetch has_many rels for this set. Must be a hash
331 containing keys fetch and quantity.
333 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
336 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
340 As with has_many but for might_have relationships. Quantity doesn't do anything
343 This value will be inherited by all fetches in this set. This is not true for
344 the has_many attribute.
346 =head1 RULE ATTRIBUTES
350 Same as with L</SET ATTRIBUTES>
354 Same as with L</SET ATTRIBUTES>
358 Same as with L</SET ATTRIBUTES>
362 Same as with L</SET ATTRIBUTES>
366 Same as with L</SET ATTRIBUTES>
374 =item Arguments: \%$attrs
376 =item Return Value: $fixture_object
380 Returns a new DBIx::Class::Fixture object. %attrs can have the following
387 required. must contain a valid path to the directory in which your .json
392 determines whether to be verbose
394 =item ignore_sql_errors:
396 ignore errors on import of DDL etc
400 my $fixtures = DBIx::Class::Fixtures->new( {
401 config_dir => '/home/me/app/fixture_configs'
410 unless (ref $params eq 'HASH') {
411 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
414 unless ($params->{config_dir}) {
415 return DBIx::Class::Exception->throw('config_dir param not specified');
418 my $config_dir = dir($params->{config_dir});
419 unless (-e $params->{config_dir}) {
420 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
424 config_dir => $config_dir,
425 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
426 debug => $params->{debug} || 0,
427 ignore_sql_errors => $params->{ignore_sql_errors},
428 dumped_objects => {},
429 use_create => $params->{use_create} || 0
441 =item Arguments: \%$attrs
443 =item Return Value: 1
448 config => 'set_config.json', # config file to use. must be in the config
449 # directory specified in the constructor
450 schema => $source_dbic_schema,
451 directory => '/home/me/app/fixtures' # output directory
457 all => 1, # just dump everything that's in the schema
458 schema => $source_dbic_schema,
459 directory => '/home/me/app/fixtures' # output directory
462 In this case objects will be dumped to subdirectories in the specified
463 directory. For example:
465 /home/me/app/fixtures/artist/1.fix
466 /home/me/app/fixtures/artist/3.fix
467 /home/me/app/fixtures/producer/5.fix
469 schema and directory are required attributes. also, one of config or all must be specified.
477 unless (ref $params eq 'HASH') {
478 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
481 foreach my $param (qw/schema directory/) {
482 unless ($params->{$param}) {
483 return DBIx::Class::Exception->throw($param . ' param not specified');
487 if($params->{excludes} && !$params->{all}) {
488 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
491 my $schema = $params->{schema};
493 if ($params->{config}) {
495 my $config_file = $self->config_dir->file($params->{config});
496 $config = $self->load_config_file($config_file);
497 } elsif ($params->{all}) {
498 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
500 might_have => { fetch => 0 },
501 has_many => { fetch => 0 },
502 belongs_to => { fetch => 0 },
505 { class => $_, quantity => 'all' };
511 DBIx::Class::Exception->throw('must pass config or set all');
514 my $output_dir = dir($params->{directory});
515 unless (-e $output_dir) {
516 $output_dir->mkpath ||
517 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
520 $self->msg("generating fixtures");
521 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
523 if (-e $tmp_output_dir) {
524 $self->msg("- clearing existing $tmp_output_dir");
525 $tmp_output_dir->rmtree;
527 $self->msg("- creating $tmp_output_dir");
528 $tmp_output_dir->mkpath;
530 # write version file (for the potential benefit of populate)
531 $tmp_output_dir->file('_dumper_version')
535 $config->{rules} ||= {};
536 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
538 while ( my ($k,$v) = each %{ $config->{rules} } ) {
539 if ( my $source = eval { $schema->source($k) } ) {
540 $config->{rules}{$source->source_name} = $v;
544 foreach my $source (@sources) {
545 # apply rule to set if specified
546 my $rule = $config->{rules}->{$source->{class}};
547 $source = merge( $source, $rule ) if ($rule);
550 my $rs = $schema->resultset($source->{class});
552 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
553 # if value starts with \ assume it's meant to be passed as a scalar ref
554 # to dbic. ideally this would substitute deeply
557 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
558 : $source->{cond}->{$_}
559 } keys %{$source->{cond}}
563 $rs = $rs->search($source->{cond}, { join => $source->{join} })
566 $self->msg("- dumping $source->{class}");
568 my %source_options = ( set => { %{$config}, %{$source} } );
569 if ($source->{quantity}) {
570 $rs = $rs->search({}, { order_by => $source->{order_by} })
571 if $source->{order_by};
573 if ($source->{quantity} =~ /^\d+$/) {
574 $rs = $rs->search({}, { rows => $source->{quantity} });
575 } elsif ($source->{quantity} ne 'all') {
576 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
579 elsif ($source->{ids} && @{$source->{ids}}) {
580 my @ids = @{$source->{ids}};
581 my (@pks) = $rs->result_source->primary_columns;
582 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
583 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
586 DBIx::Class::Exception->throw('must specify either quantity or ids');
589 $source_options{set_dir} = $tmp_output_dir;
590 $self->dump_rs($rs, \%source_options );
593 # clear existing output dir
594 foreach my $child ($output_dir->children) {
595 if ($child->is_dir) {
596 next if ($child eq $tmp_output_dir);
597 if (grep { $_ =~ /\.fix/ } $child->children) {
600 } elsif ($child =~ /_dumper_version$/) {
605 $self->msg("- moving temp dir to $output_dir");
606 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
607 for $tmp_output_dir->children;
609 if (-e $output_dir) {
610 $self->msg("- clearing tmp dir $tmp_output_dir");
611 # delete existing fixture set
612 $tmp_output_dir->remove;
620 sub load_config_file {
621 my ($self, $config_file) = @_;
622 DBIx::Class::Exception->throw("config does not exist at $config_file")
623 unless -e $config_file;
625 my $config = Config::Any::JSON->load($config_file);
628 if (my $incs = $config->{includes}) {
630 DBIx::Class::Exception->throw(
631 'includes params of config must be an array ref of hashrefs'
632 ) unless ref $incs eq 'ARRAY';
634 foreach my $include_config (@$incs) {
635 DBIx::Class::Exception->throw(
636 'includes params of config must be an array ref of hashrefs'
637 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
639 my $include_file = $self->config_dir->file($include_config->{file});
641 DBIx::Class::Exception->throw("config does not exist at $include_file")
642 unless -e $include_file;
644 my $include = Config::Any::JSON->load($include_file);
645 $self->msg($include);
646 $config = merge( $config, $include );
648 delete $config->{includes};
652 return DBIx::Class::Exception->throw('config has no sets')
653 unless $config && $config->{sets} &&
654 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
656 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
657 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
658 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
664 my ($self, $rs, $params) = @_;
666 while (my $row = $rs->next) {
667 $self->dump_object($row, $params);
672 my ($self, $object, $params) = @_;
673 my $set = $params->{set};
674 die 'no dir passed to dump_object' unless $params->{set_dir};
675 die 'no object passed to dump_object' unless $object;
677 my @inherited_attrs = @{$self->_inherited_attributes};
680 $object->get_column($_)
681 } $object->primary_columns;
683 my $key = join("\0", @pk_vals);
685 my $src = $object->result_source;
686 my $exists = $self->dumped_objects->{$src->name}{$key}++;
689 # write dir and gen filename
690 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
691 $source_dir->mkpath(0, 0777);
693 # strip dir separators from file name
694 my $file = $source_dir->file(
695 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
701 $self->msg('-- dumping ' . $file->stringify, 2);
702 my %ds = $object->get_columns;
704 # mess with dates if specified
705 if ($set->{datetime_relative}) {
706 my $formatter= $object->result_source->schema->storage->datetime_parser;
707 unless ($@ || !$formatter) {
709 if ($set->{datetime_relative} eq 'today') {
710 $dt = DateTime->today;
712 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
715 while (my ($col, $value) = each %ds) {
716 my $col_info = $object->result_source->column_info($col);
719 && $col_info->{_inflate_info}
720 && uc($col_info->{data_type}) eq 'DATETIME';
722 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
725 warn "datetime_relative not supported for this db driver at the moment";
729 # do the actual dumping
730 my $serialized = Dump(\%ds)->Out();
731 $file->openw->print($serialized);
734 # don't bother looking at rels unless we are actually planning to dump at least one type
735 my ($might_have, $belongs_to, $has_many) = map {
736 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
737 } qw/might_have belongs_to has_many/;
739 return unless $might_have
744 # dump rels of object
746 foreach my $name (sort $src->relationships) {
747 my $info = $src->relationship_info($name);
748 my $r_source = $src->related_source($name);
749 # if belongs_to or might_have with might_have param set or has_many with
750 # has_many param set then
752 ( $info->{attrs}{accessor} eq 'single' &&
753 (!$info->{attrs}{join_type} || $might_have)
755 || $info->{attrs}{accessor} eq 'filter'
757 ($info->{attrs}{accessor} eq 'multi' && $has_many)
759 my $related_rs = $object->related_resultset($name);
760 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
761 # these parts of the rule only apply to has_many rels
762 if ($rule && $info->{attrs}{accessor} eq 'multi') {
763 $related_rs = $related_rs->search(
765 { join => $rule->{join} }
766 ) if ($rule->{cond});
768 $related_rs = $related_rs->search(
770 { rows => $rule->{quantity} }
771 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
773 $related_rs = $related_rs->search(
775 { order_by => $rule->{order_by} }
776 ) if ($rule->{order_by});
779 if ($set->{has_many}{quantity} &&
780 $set->{has_many}{quantity} =~ /^\d+$/) {
781 $related_rs = $related_rs->search(
783 { rows => $set->{has_many}->{quantity} }
787 my %c_params = %{$params};
791 } grep { $set->{$_} } @inherited_attrs;
793 $c_params{set} = \%mock_set;
794 $c_params{set} = merge( $c_params{set}, $rule)
795 if $rule && $rule->{fetch};
797 $self->dump_rs($related_rs, \%c_params);
802 return unless $set && $set->{fetch};
803 foreach my $fetch (@{$set->{fetch}}) {
805 $fetch->{$_} = $set->{$_} foreach
806 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
807 my $related_rs = $object->related_resultset($fetch->{rel});
808 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
811 my $info = $object->result_source->relationship_info($fetch->{rel});
812 if ($info->{attrs}{accessor} eq 'multi') {
813 $fetch = merge( $fetch, $rule );
814 } elsif ($rule->{fetch}) {
815 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
819 die "relationship $fetch->{rel} does not exist for " . $src->source_name
820 unless ($related_rs);
822 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
823 # if value starts with \ assume it's meant to be passed as a scalar ref
824 # to dbic. ideally this would substitute deeply
825 $fetch->{cond} = { map {
826 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
827 : $fetch->{cond}->{$_}
828 } keys %{$fetch->{cond}} };
831 $related_rs = $related_rs->search(
833 { join => $fetch->{join} }
836 $related_rs = $related_rs->search(
838 { rows => $fetch->{quantity} }
839 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
840 $related_rs = $related_rs->search(
842 { order_by => $fetch->{order_by} }
843 ) if $fetch->{order_by};
845 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
849 sub _generate_schema {
851 my $params = shift || {};
853 $self->msg("\ncreating schema");
855 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
856 eval "require $schema_class";
860 my $connection_details = $params->{connection_details};
862 $namespace_counter++;
864 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
865 Class::C3::Componentised->inject_base( $namespace => $schema_class );
867 $pre_schema = $namespace->connect(@{$connection_details});
868 unless( $pre_schema ) {
869 return DBIx::Class::Exception->throw('connection details not valid');
871 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
872 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
873 my $dbh = $pre_schema->storage->dbh;
876 $self->msg("- clearing DB of existing tables");
877 $pre_schema->storage->txn_do(sub {
878 $pre_schema->storage->with_deferred_fk_checks(sub {
879 foreach my $table (@tables) {
881 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
887 # import new ddl file to db
888 my $ddl_file = $params->{ddl};
889 $self->msg("- deploying schema using $ddl_file");
890 my $data = _read_sql($ddl_file);
892 eval { $dbh->do($_) or warn "SQL was:\n $_"};
893 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
895 $self->msg("- finished importing DDL into DB");
897 # load schema object from our new DB
898 $namespace_counter++;
899 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
900 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
901 my $schema = $namespace2->connect(@{$connection_details});
906 my $ddl_file = shift;
908 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
909 my @data = split(/\n/, join('', <$fh>));
910 @data = grep(!/^--/, @data);
911 @data = split(/;/, join('', @data));
913 @data = grep { $_ && $_ !~ /^-- / } @data;
921 =item Arguments: \%$attrs
923 =item Return Value: 1
927 $fixtures->populate( {
928 # directory to look for fixtures in, as specified to dump
929 directory => '/home/me/app/fixtures',
932 ddl => '/home/me/app/sql/ddl.sql',
934 # database to clear, deploy and then populate
935 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
937 # DDL to deploy after populating records, ie. FK constraints
938 post_ddl => '/home/me/app/sql/post_ddl.sql',
940 # use CASCADE option when dropping tables
943 # optional, set to 1 to run ddl but not populate
946 # optional, set to 1 to run each fixture through ->create rather than have
947 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
948 # that effects the value of column(s).
951 # Dont try to clean the database, just populate over whats there. Requires
952 # schema option. Use this if you want to handle removing old data yourself
957 In this case the database app_dev will be cleared of all tables, then the
958 specified DDL deployed to it, then finally all fixtures found in
959 /home/me/app/fixtures will be added to it. populate will generate its own
960 DBIx::Class schema from the DDL rather than being passed one to use. This is
961 better as custom insert methods are avoided which can to get in the way. In
962 some cases you might not have a DDL, and so this method will eventually allow a
963 $schema object to be passed instead.
965 If needed, you can specify a post_ddl attribute which is a DDL to be applied
966 after all the fixtures have been added to the database. A good use of this
967 option would be to add foreign key constraints since databases like Postgresql
968 cannot disable foreign key checks.
970 If your tables have foreign key constraints you may want to use the cascade
971 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
974 C<directory> is a required attribute.
976 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
977 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
980 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
981 attribute containing the connected schema you wish to operate on and set the
982 C<no_deploy> attribute.
989 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
990 unless ref $params eq 'HASH';
992 DBIx::Class::Exception->throw('directory param not specified')
993 unless $params->{directory};
995 my $fixture_dir = dir(delete $params->{directory});
996 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
997 unless -d $fixture_dir;
1002 if ($params->{ddl} && $params->{connection_details}) {
1003 $ddl_file = file(delete $params->{ddl});
1004 unless (-e $ddl_file) {
1005 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1007 unless (ref $params->{connection_details} eq 'ARRAY') {
1008 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1010 $schema = $self->_generate_schema({
1012 connection_details => delete $params->{connection_details},
1015 } elsif ($params->{schema} && $params->{no_deploy}) {
1016 $schema = $params->{schema};
1018 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1022 return 1 if $params->{no_populate};
1024 $self->msg("\nimporting fixtures");
1025 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
1026 my $version_file = file($fixture_dir, '_dumper_version');
1027 # DBIx::Class::Exception->throw('no version file found');
1028 # unless -e $version_file;
1030 if (-e $tmp_fixture_dir) {
1031 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1032 $tmp_fixture_dir->rmtree;
1034 $self->msg("- creating temp dir");
1035 $tmp_fixture_dir->mkpath();
1036 for ( map { $schema->source($_)->from } $schema->sources) {
1037 my $from_dir = $fixture_dir->subdir($_);
1038 next unless -e $from_dir;
1039 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1042 unless (-d $tmp_fixture_dir) {
1043 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1047 my $formatter = $schema->storage->datetime_parser;
1048 unless ($@ || !$formatter) {
1050 if ($params->{datetime_relative_to}) {
1051 $callbacks{'DateTime::Duration'} = sub {
1052 $params->{datetime_relative_to}->clone->add_duration($_);
1055 $callbacks{'DateTime::Duration'} = sub {
1056 $formatter->format_datetime(DateTime->today->add_duration($_))
1059 $callbacks{object} ||= "visit_ref";
1060 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1063 $schema->storage->txn_do(sub {
1064 $schema->storage->with_deferred_fk_checks(sub {
1065 foreach my $source (sort $schema->sources) {
1066 $self->msg("- adding " . $source);
1067 my $rs = $schema->resultset($source);
1068 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1069 next unless (-e $source_dir);
1071 while (my $file = $source_dir->next) {
1072 next unless ($file =~ /\.fix$/);
1073 next if $file->is_dir;
1074 my $contents = $file->slurp;
1077 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1078 if ( $params->{use_create} ) {
1079 $rs->create( $HASH1 );
1081 push(@rows, $HASH1);
1084 $rs->populate(\@rows) if scalar(@rows);
1088 $self->do_post_ddl( {
1090 post_ddl=>$params->{post_ddl}
1091 } ) if $params->{post_ddl};
1093 $self->msg("- fixtures imported");
1094 $self->msg("- cleaning up");
1095 $tmp_fixture_dir->rmtree;
1100 my ($self, $params) = @_;
1102 my $schema = $params->{schema};
1103 my $data = _read_sql($params->{post_ddl});
1105 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1106 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1108 $self->msg("- finished importing post-populate DDL into DB");
1113 my $subject = shift || return;
1114 my $level = shift || 1;
1115 return unless $self->debug >= $level;
1117 print Dumper($subject);
1119 print $subject . "\n";
1125 Luke Saunders <luke@shadowcatsystems.co.uk>
1127 Initial development sponsored by and (c) Takkle, Inc. 2007
1131 Ash Berlin <ash@shadowcatsystems.co.uk>
1133 Matt S. Trout <mst@shadowcatsystems.co.uk>
1135 Drew Taylor <taylor.andrew.j@gmail.com>
1137 Frank Switalski <fswitalski@gmail.com>
1141 This library is free software under the same license as perl itself