1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file);
10 use File::Spec::Functions 'catfile', 'catdir';
11 use Config::Any::JSON;
12 use Data::Dump::Streamer;
13 use Data::Visitor::Callback;
15 use File::Copy::Recursive qw/dircopy/;
16 use File::Copy qw/move/;
17 use Hash::Merge qw( merge );
19 use Class::C3::Componentised;
22 use base qw(Class::Accessor::Grouped);
24 our $namespace_counter = 0;
26 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
27 _inherited_attributes debug schema_class dumped_objects config_attrs/);
35 our $VERSION = '1.001014';
39 DBIx::Class::Fixtures - Dump data and repopulate a database using rules
43 use DBIx::Class::Fixtures;
47 my $fixtures = DBIx::Class::Fixtures->new({
48 config_dir => '/home/me/app/fixture_configs'
52 config => 'set_config.json',
53 schema => $source_dbic_schema,
54 directory => '/home/me/app/fixtures'
58 directory => '/home/me/app/fixtures',
59 ddl => '/home/me/app/sql/ddl.sql',
60 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
61 post_ddl => '/home/me/app/sql/post_ddl.sql',
66 Dump fixtures from source database to filesystem then import to another
67 database (with same schema) at any time. Use as a constant dataset for running
68 tests against or for populating development databases when impractical to use
69 production clones. Describe fixture set using relations and conditions based on
70 your DBIx::Class schema.
72 =head1 DEFINE YOUR FIXTURE SET
74 Fixture sets are currently defined in .json files which must reside in your
75 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
76 describe which data to pull and dump from the source database.
99 This will fetch artists with primary keys 1 and 3, the producer with primary
100 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
101 rel from Producer to Artist.
103 The top level attributes are as follows:
107 Sets must be an array of hashes, as in the example given above. Each set
108 defines a set of objects to be included in the fixtures. For details on valid
109 set attributes see L</SET ATTRIBUTES> below.
113 Rules place general conditions on classes. For example if whenever an artist
114 was dumped you also wanted all of their cds dumped too, then you could use a
115 rule to specify this. For example:
144 In this case all the cds of artists 1, 3 and all producer 5's artists will be
145 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
146 to CD. This is eqivalent to:
173 rules must be a hash keyed by class name.
179 To prevent repetition between configs you can include other configs. For
188 { "file": "base.json" }
192 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
193 which is the name of another config file in the same directory. The original
194 config is merged with its includes using L<Hash::Merge>.
196 =head2 datetime_relative
198 Only available for MySQL and PostgreSQL at the moment, must be a value that
199 DateTime::Format::* can parse. For example:
203 "class": "RecentItems",
206 "datetime_relative": "2007-10-30 00:00:00"
209 This will work when dumping from a MySQL database and will cause any datetime
210 fields (where datatype => 'datetime' in the column def of the schema class) to
211 be dumped as a DateTime::Duration object relative to the date specified in the
212 datetime_relative value. For example if the RecentItem object had a date field
213 set to 2007-10-25, then when the fixture is imported the field will be set to 5
214 days in the past relative to the current time.
218 Specifies whether to automatically dump might_have relationships. Should be a
219 hash with one attribute - fetch. Set fetch to 1 or 0.
222 "might_have": { "fetch": 1 },
235 Note: belongs_to rels are automatically dumped whether you like it or not, this
236 is to avoid FKs to nowhere when importing. General rules on has_many rels are
237 not accepted at this top level, but you can turn them on for individual sets -
238 see L</SET ATTRIBUTES>.
240 =head1 SET ATTRIBUTES
244 Required attribute. Specifies the DBIx::Class object class you wish to dump.
248 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
249 If the id is not in the source db then it just won't get dumped, no warnings or
254 Must be either an integer or the string 'all'. Specifying an integer will
255 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
256 will cause the rows attribute to be left off and for all matching rows to be
257 dumped. There's no randomising here, it's just the first x rows.
261 A hash specifying the conditions dumped objects must match. Essentially this is
262 a JSON representation of a DBIx::Class search clause. For example:
268 "cond": { "name": "Dave" }
272 This will dump all artists whose name is 'dave'. Essentially
273 $artist_rs->search({ name => 'Dave' })->all.
275 Sometimes in a search clause it's useful to use scalar refs to do things like:
277 $artist_rs->search({ no1_singles => \'> no1_albums' })
279 This could be specified in the cond hash like so:
285 "cond": { "no1_singles": "\> no1_albums" }
289 So if the value starts with a backslash the value is made a scalar ref before
290 being passed to search.
294 An array of relationships to be used in the cond clause.
300 "cond": { "cds.position": { ">": 4 } },
305 Fetch all artists who have cds with position greater than 4.
309 Must be an array of hashes. Specifies which rels to also dump. For example:
318 "cond": { "position": "2" }
323 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
325 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
326 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
327 as in the set attributes. quantity is necessary for has_many relationships, but
328 not if using for belongs_to or might_have relationships.
332 Specifies whether to fetch has_many rels for this set. Must be a hash
333 containing keys fetch and quantity.
335 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
338 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
342 As with has_many but for might_have relationships. Quantity doesn't do anything
345 This value will be inherited by all fetches in this set. This is not true for
346 the has_many attribute.
350 In some cases your database information might be keys to values in some sort of
351 external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
352 to store blob information on the filesystem. In this case you may wish the ability
353 to backup your external storage in the same way your database data. The L</external>
354 attribute lets you specify a handler for this type of issue. For example:
363 "args": {"path":"__ATTR(photo_dir)__"}
369 This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
370 where the path to a file is specified by the C<file> field of the C<Photo> source.
371 We use the uninflated value of the field so you need to completely handle backup
372 and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
373 and you can create your own custom handlers by placing a '+' in the namespace:
375 "class": "+MyApp::Schema::SomeExternalStorage",
377 Although if possible I'd love to get patches to add some of the other common
378 types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
380 See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
382 =head1 RULE ATTRIBUTES
386 Same as with L</SET ATTRIBUTES>
390 Same as with L</SET ATTRIBUTES>
394 Same as with L</SET ATTRIBUTES>
398 Same as with L</SET ATTRIBUTES>
402 Same as with L</SET ATTRIBUTES>
404 =head1 RULE SUBSTITUTIONS
406 You can provide the following substitution patterns for your rule values. An
407 example of this might be:
412 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
418 Provide a value from %ENV
422 Provide a value from L</config_attrs>
426 Create the path to a file from a list
430 Create the path to a directory from a list
438 =item Arguments: \%$attrs
440 =item Return Value: $fixture_object
444 Returns a new DBIx::Class::Fixture object. %attrs can have the following
451 required. must contain a valid path to the directory in which your .json
456 determines whether to be verbose
458 =item ignore_sql_errors:
460 ignore errors on import of DDL etc
464 A hash of information you can use to do replacements inside your configuration
465 sets. For example, if your set looks like:
473 "quantity": "__ATTR(quantity)__",
478 my $fixtures = DBIx::Class::Fixtures->new( {
479 config_dir => '/home/me/app/fixture_configs'
485 You may wish to do this if you want to let whoever runs the dumps have a bit
490 my $fixtures = DBIx::Class::Fixtures->new( {
491 config_dir => '/home/me/app/fixture_configs'
500 unless (ref $params eq 'HASH') {
501 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
504 unless ($params->{config_dir}) {
505 return DBIx::Class::Exception->throw('config_dir param not specified');
508 my $config_dir = dir($params->{config_dir});
509 unless (-e $params->{config_dir}) {
510 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
514 config_dir => $config_dir,
515 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
516 debug => $params->{debug} || 0,
517 ignore_sql_errors => $params->{ignore_sql_errors},
518 dumped_objects => {},
519 use_create => $params->{use_create} || 0,
520 config_attrs => $params->{config_attrs} || {},
528 =head2 available_config_sets
530 Returns a list of all the config sets found in the L</config_dir>. These will
531 be a list of the json based files containing dump rules.
536 sub available_config_sets {
537 @config_sets = scalar(@config_sets) ? @config_sets : map {
540 -f $_ && $_=~/json$/;
541 } dir((shift)->config_dir)->children;
548 =item Arguments: \%$attrs
550 =item Return Value: 1
555 config => 'set_config.json', # config file to use. must be in the config
556 # directory specified in the constructor
557 schema => $source_dbic_schema,
558 directory => '/home/me/app/fixtures' # output directory
564 all => 1, # just dump everything that's in the schema
565 schema => $source_dbic_schema,
566 directory => '/home/me/app/fixtures' # output directory
569 In this case objects will be dumped to subdirectories in the specified
570 directory. For example:
572 /home/me/app/fixtures/artist/1.fix
573 /home/me/app/fixtures/artist/3.fix
574 /home/me/app/fixtures/producer/5.fix
576 schema and directory are required attributes. also, one of config or all must
579 Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
580 If this form is used your HashRef should conform to the structure rules defined
581 for the JSON representations.
589 unless (ref $params eq 'HASH') {
590 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
593 foreach my $param (qw/schema directory/) {
594 unless ($params->{$param}) {
595 return DBIx::Class::Exception->throw($param . ' param not specified');
599 if($params->{excludes} && !$params->{all}) {
600 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
603 my $schema = $params->{schema};
605 if ($params->{config}) {
606 $config = ref $params->{config} eq 'HASH' ?
610 my $config_file = $self->config_dir->file($params->{config});
611 $self->load_config_file($config_file);
613 } elsif ($params->{all}) {
614 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
616 might_have => { fetch => 0 },
617 has_many => { fetch => 0 },
618 belongs_to => { fetch => 0 },
621 { class => $_, quantity => 'all' };
627 DBIx::Class::Exception->throw('must pass config or set all');
630 my $output_dir = dir($params->{directory});
631 unless (-e $output_dir) {
632 $output_dir->mkpath ||
633 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
636 $self->msg("generating fixtures");
637 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
639 if (-e $tmp_output_dir) {
640 $self->msg("- clearing existing $tmp_output_dir");
641 $tmp_output_dir->rmtree;
643 $self->msg("- creating $tmp_output_dir");
644 $tmp_output_dir->mkpath;
646 # write version file (for the potential benefit of populate)
647 $tmp_output_dir->file('_dumper_version')
651 # write our current config set
652 $tmp_output_dir->file('_config_set')
654 ->print( Dumper $config );
656 $config->{rules} ||= {};
657 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
659 while ( my ($k,$v) = each %{ $config->{rules} } ) {
660 if ( my $source = eval { $schema->source($k) } ) {
661 $config->{rules}{$source->source_name} = $v;
665 foreach my $source (@sources) {
666 # apply rule to set if specified
667 my $rule = $config->{rules}->{$source->{class}};
668 $source = merge( $source, $rule ) if ($rule);
671 my $rs = $schema->resultset($source->{class});
673 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
674 # if value starts with \ assume it's meant to be passed as a scalar ref
675 # to dbic. ideally this would substitute deeply
678 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
679 : $source->{cond}->{$_}
680 } keys %{$source->{cond}}
684 $rs = $rs->search($source->{cond}, { join => $source->{join} })
687 $self->msg("- dumping $source->{class}");
689 my %source_options = ( set => { %{$config}, %{$source} } );
690 if ($source->{quantity}) {
691 $rs = $rs->search({}, { order_by => $source->{order_by} })
692 if $source->{order_by};
694 if ($source->{quantity} =~ /^\d+$/) {
695 $rs = $rs->search({}, { rows => $source->{quantity} });
696 } elsif ($source->{quantity} ne 'all') {
697 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
700 elsif ($source->{ids} && @{$source->{ids}}) {
701 my @ids = @{$source->{ids}};
702 my (@pks) = $rs->result_source->primary_columns;
703 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
704 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
707 DBIx::Class::Exception->throw('must specify either quantity or ids');
710 $source_options{set_dir} = $tmp_output_dir;
711 $self->dump_rs($rs, \%source_options );
714 # clear existing output dir
715 foreach my $child ($output_dir->children) {
716 if ($child->is_dir) {
717 next if ($child eq $tmp_output_dir);
718 if (grep { $_ =~ /\.fix/ } $child->children) {
721 } elsif ($child =~ /_dumper_version$/) {
726 $self->msg("- moving temp dir to $output_dir");
727 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
728 for $tmp_output_dir->children;
730 if (-e $output_dir) {
731 $self->msg("- clearing tmp dir $tmp_output_dir");
732 # delete existing fixture set
733 $tmp_output_dir->remove;
741 sub load_config_file {
742 my ($self, $config_file) = @_;
743 DBIx::Class::Exception->throw("config does not exist at $config_file")
744 unless -e $config_file;
746 my $config = Config::Any::JSON->load($config_file);
749 if (my $incs = $config->{includes}) {
751 DBIx::Class::Exception->throw(
752 'includes params of config must be an array ref of hashrefs'
753 ) unless ref $incs eq 'ARRAY';
755 foreach my $include_config (@$incs) {
756 DBIx::Class::Exception->throw(
757 'includes params of config must be an array ref of hashrefs'
758 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
760 my $include_file = $self->config_dir->file($include_config->{file});
762 DBIx::Class::Exception->throw("config does not exist at $include_file")
763 unless -e $include_file;
765 my $include = Config::Any::JSON->load($include_file);
766 $self->msg($include);
767 $config = merge( $config, $include );
769 delete $config->{includes};
773 return DBIx::Class::Exception->throw('config has no sets')
774 unless $config && $config->{sets} &&
775 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
777 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
778 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
779 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
785 my ($self, $rs, $params) = @_;
787 while (my $row = $rs->next) {
788 $self->dump_object($row, $params);
793 my ($self, $object, $params) = @_;
794 my $set = $params->{set};
796 my $v = Data::Visitor::Callback->new(
798 my ($visitor, $data) = @_;
801 my ( $self, $v ) = @_;
802 if (! defined($ENV{$v})) {
810 if(my $attr = $self->config_attrs->{$v}) {
817 my ($self, @args) = @_;
821 my ($self, @args) = @_;
826 my $subsre = join( '|', keys %$subs );
827 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
835 die 'no dir passed to dump_object' unless $params->{set_dir};
836 die 'no object passed to dump_object' unless $object;
838 my @inherited_attrs = @{$self->_inherited_attributes};
841 $object->get_column($_)
842 } $object->primary_columns;
844 my $key = join("\0", @pk_vals);
846 my $src = $object->result_source;
847 my $exists = $self->dumped_objects->{$src->name}{$key}++;
850 # write dir and gen filename
851 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
852 $source_dir->mkpath(0, 0777);
854 # strip dir separators from file name
855 my $file = $source_dir->file(
856 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
861 $self->msg('-- dumping ' . $file->stringify, 2);
862 my %ds = $object->get_columns;
864 if($set->{external}) {
865 foreach my $field (keys %{$set->{external}}) {
866 my $key = $ds{$field};
867 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
868 my $args = $set->{external}->{$field}->{args};
870 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
873 $ds{external}->{$field} =
874 encode_base64( $class
875 ->backup($key => $args));
879 # mess with dates if specified
880 if ($set->{datetime_relative}) {
881 my $formatter= $object->result_source->schema->storage->datetime_parser;
882 unless ($@ || !$formatter) {
884 if ($set->{datetime_relative} eq 'today') {
885 $dt = DateTime->today;
887 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
890 while (my ($col, $value) = each %ds) {
891 my $col_info = $object->result_source->column_info($col);
894 && $col_info->{_inflate_info}
895 && uc($col_info->{data_type}) eq 'DATETIME';
897 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
900 warn "datetime_relative not supported for this db driver at the moment";
904 # do the actual dumping
905 my $serialized = Dump(\%ds)->Out();
906 $file->openw->print($serialized);
909 # don't bother looking at rels unless we are actually planning to dump at least one type
910 my ($might_have, $belongs_to, $has_many) = map {
911 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
912 } qw/might_have belongs_to has_many/;
914 return unless $might_have
919 # dump rels of object
921 foreach my $name (sort $src->relationships) {
922 my $info = $src->relationship_info($name);
923 my $r_source = $src->related_source($name);
924 # if belongs_to or might_have with might_have param set or has_many with
925 # has_many param set then
927 ( $info->{attrs}{accessor} eq 'single' &&
928 (!$info->{attrs}{join_type} || $might_have)
930 || $info->{attrs}{accessor} eq 'filter'
932 ($info->{attrs}{accessor} eq 'multi' && $has_many)
934 my $related_rs = $object->related_resultset($name);
935 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
936 # these parts of the rule only apply to has_many rels
937 if ($rule && $info->{attrs}{accessor} eq 'multi') {
938 $related_rs = $related_rs->search(
940 { join => $rule->{join} }
941 ) if ($rule->{cond});
943 $related_rs = $related_rs->search(
945 { rows => $rule->{quantity} }
946 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
948 $related_rs = $related_rs->search(
950 { order_by => $rule->{order_by} }
951 ) if ($rule->{order_by});
954 if ($set->{has_many}{quantity} &&
955 $set->{has_many}{quantity} =~ /^\d+$/) {
956 $related_rs = $related_rs->search(
958 { rows => $set->{has_many}->{quantity} }
962 my %c_params = %{$params};
966 } grep { $set->{$_} } @inherited_attrs;
968 $c_params{set} = \%mock_set;
969 $c_params{set} = merge( $c_params{set}, $rule)
970 if $rule && $rule->{fetch};
972 $self->dump_rs($related_rs, \%c_params);
977 return unless $set && $set->{fetch};
978 foreach my $fetch (@{$set->{fetch}}) {
980 $fetch->{$_} = $set->{$_} foreach
981 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
982 my $related_rs = $object->related_resultset($fetch->{rel});
983 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
986 my $info = $object->result_source->relationship_info($fetch->{rel});
987 if ($info->{attrs}{accessor} eq 'multi') {
988 $fetch = merge( $fetch, $rule );
989 } elsif ($rule->{fetch}) {
990 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
994 die "relationship $fetch->{rel} does not exist for " . $src->source_name
995 unless ($related_rs);
997 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
998 # if value starts with \ assume it's meant to be passed as a scalar ref
999 # to dbic. ideally this would substitute deeply
1000 $fetch->{cond} = { map {
1001 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1002 : $fetch->{cond}->{$_}
1003 } keys %{$fetch->{cond}} };
1006 $related_rs = $related_rs->search(
1008 { join => $fetch->{join} }
1009 ) if $fetch->{cond};
1011 $related_rs = $related_rs->search(
1013 { rows => $fetch->{quantity} }
1014 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1015 $related_rs = $related_rs->search(
1017 { order_by => $fetch->{order_by} }
1018 ) if $fetch->{order_by};
1020 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1024 sub _generate_schema {
1026 my $params = shift || {};
1028 $self->msg("\ncreating schema");
1030 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1031 eval "require $schema_class";
1035 my $connection_details = $params->{connection_details};
1037 $namespace_counter++;
1039 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1040 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1042 $pre_schema = $namespace->connect(@{$connection_details});
1043 unless( $pre_schema ) {
1044 return DBIx::Class::Exception->throw('connection details not valid');
1046 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
1047 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1048 my $dbh = $pre_schema->storage->dbh;
1051 $self->msg("- clearing DB of existing tables");
1052 $pre_schema->storage->txn_do(sub {
1053 $pre_schema->storage->with_deferred_fk_checks(sub {
1054 foreach my $table (@tables) {
1056 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1062 # import new ddl file to db
1063 my $ddl_file = $params->{ddl};
1064 $self->msg("- deploying schema using $ddl_file");
1065 my $data = _read_sql($ddl_file);
1067 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1068 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1070 $self->msg("- finished importing DDL into DB");
1072 # load schema object from our new DB
1073 $namespace_counter++;
1074 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1075 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1076 my $schema = $namespace2->connect(@{$connection_details});
1081 my $ddl_file = shift;
1083 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1084 my @data = split(/\n/, join('', <$fh>));
1085 @data = grep(!/^--/, @data);
1086 @data = split(/;/, join('', @data));
1088 @data = grep { $_ && $_ !~ /^-- / } @data;
1092 =head2 dump_config_sets
1094 Works just like L</dump> but instead of specifying a single json config set
1095 located in L</config_dir> we dump each set named in the C<configs> parameter.
1097 The parameters are the same as for L</dump> except instead of a C<directory>
1098 parameter we have a C<directory_template> which is a coderef expected to return
1099 a scalar that is a root directory where we will do the actual dumping. This
1100 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1103 $fixture->dump_all_config_sets({
1105 configs => [qw/one.json other.json/],
1106 directory_template => sub {
1107 my ($fixture, $params, $set) = @_;
1108 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1114 sub dump_config_sets {
1115 my ($self, $params) = @_;
1116 my $available_config_sets = delete $params->{configs};
1117 my $directory_template = delete $params->{directory_template} ||
1118 DBIx::Class::Exception->throw("'directory_template is required parameter");
1120 for my $set (@$available_config_sets) {
1121 my $localparams = $params;
1122 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1123 $localparams->{config} = $set;
1124 $self->dump($localparams);
1125 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1129 =head2 dump_all_config_sets
1131 my %local_params = %$params;
1132 my $local_self = bless { %$self }, ref($self);
1133 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1134 $local_params{config} = $set;
1135 $self->dump(\%local_params);
1138 Works just like L</dump> but instead of specifying a single json config set
1139 located in L</config_dir> we dump each set in turn to the specified directory.
1141 The parameters are the same as for L</dump> except instead of a C<directory>
1142 parameter we have a C<directory_template> which is a coderef expected to return
1143 a scalar that is a root directory where we will do the actual dumping. This
1144 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1147 $fixture->dump_all_config_sets({
1149 directory_template => sub {
1150 my ($fixture, $params, $set) = @_;
1151 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1157 sub dump_all_config_sets {
1158 my ($self, $params) = @_;
1159 $self->dump_config_sets({
1161 configs=>[$self->available_config_sets],
1169 =item Arguments: \%$attrs
1171 =item Return Value: 1
1175 $fixtures->populate( {
1176 # directory to look for fixtures in, as specified to dump
1177 directory => '/home/me/app/fixtures',
1180 ddl => '/home/me/app/sql/ddl.sql',
1182 # database to clear, deploy and then populate
1183 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1185 # DDL to deploy after populating records, ie. FK constraints
1186 post_ddl => '/home/me/app/sql/post_ddl.sql',
1188 # use CASCADE option when dropping tables
1191 # optional, set to 1 to run ddl but not populate
1194 # optional, set to 1 to run each fixture through ->create rather than have
1195 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1196 # that effects the value of column(s).
1199 # Dont try to clean the database, just populate over whats there. Requires
1200 # schema option. Use this if you want to handle removing old data yourself
1205 In this case the database app_dev will be cleared of all tables, then the
1206 specified DDL deployed to it, then finally all fixtures found in
1207 /home/me/app/fixtures will be added to it. populate will generate its own
1208 DBIx::Class schema from the DDL rather than being passed one to use. This is
1209 better as custom insert methods are avoided which can to get in the way. In
1210 some cases you might not have a DDL, and so this method will eventually allow a
1211 $schema object to be passed instead.
1213 If needed, you can specify a post_ddl attribute which is a DDL to be applied
1214 after all the fixtures have been added to the database. A good use of this
1215 option would be to add foreign key constraints since databases like Postgresql
1216 cannot disable foreign key checks.
1218 If your tables have foreign key constraints you may want to use the cascade
1219 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1222 C<directory> is a required attribute.
1224 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1225 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1228 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1229 attribute containing the connected schema you wish to operate on and set the
1230 C<no_deploy> attribute.
1237 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1238 unless ref $params eq 'HASH';
1240 DBIx::Class::Exception->throw('directory param not specified')
1241 unless $params->{directory};
1243 my $fixture_dir = dir(delete $params->{directory});
1244 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1245 unless -d $fixture_dir;
1250 if ($params->{ddl} && $params->{connection_details}) {
1251 $ddl_file = file(delete $params->{ddl});
1252 unless (-e $ddl_file) {
1253 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1255 unless (ref $params->{connection_details} eq 'ARRAY') {
1256 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1258 $schema = $self->_generate_schema({
1260 connection_details => delete $params->{connection_details},
1263 } elsif ($params->{schema} && $params->{no_deploy}) {
1264 $schema = $params->{schema};
1266 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1270 return 1 if $params->{no_populate};
1272 $self->msg("\nimporting fixtures");
1273 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
1274 my $version_file = file($fixture_dir, '_dumper_version');
1275 my $config_set_path = file($fixture_dir, '_config_set');
1276 my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1278 my $v = Data::Visitor::Callback->new(
1279 plain_value => sub {
1280 my ($visitor, $data) = @_;
1283 my ( $self, $v ) = @_;
1284 if (! defined($ENV{$v})) {
1291 my ($self, $v) = @_;
1292 if(my $attr = $self->config_attrs->{$v}) {
1299 my ($self, @args) = @_;
1303 my ($self, @args) = @_;
1308 my $subsre = join( '|', keys %$subs );
1309 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1315 $v->visit( $config_set );
1320 %sets_by_src = map { delete($_->{class}) => $_ }
1321 @{$config_set->{sets}}
1324 # DBIx::Class::Exception->throw('no version file found');
1325 # unless -e $version_file;
1327 if (-e $tmp_fixture_dir) {
1328 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1329 $tmp_fixture_dir->rmtree;
1331 $self->msg("- creating temp dir");
1332 $tmp_fixture_dir->mkpath();
1333 for ( map { $schema->source($_)->from } $schema->sources) {
1334 my $from_dir = $fixture_dir->subdir($_);
1335 next unless -e $from_dir;
1336 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1339 unless (-d $tmp_fixture_dir) {
1340 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1344 my $formatter = $schema->storage->datetime_parser;
1345 unless ($@ || !$formatter) {
1347 if ($params->{datetime_relative_to}) {
1348 $callbacks{'DateTime::Duration'} = sub {
1349 $params->{datetime_relative_to}->clone->add_duration($_);
1352 $callbacks{'DateTime::Duration'} = sub {
1353 $formatter->format_datetime(DateTime->today->add_duration($_))
1356 $callbacks{object} ||= "visit_ref";
1357 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1360 $schema->storage->txn_do(sub {
1361 $schema->storage->with_deferred_fk_checks(sub {
1362 foreach my $source (sort $schema->sources) {
1363 $self->msg("- adding " . $source);
1364 my $rs = $schema->resultset($source);
1365 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1366 next unless (-e $source_dir);
1368 while (my $file = $source_dir->next) {
1369 next unless ($file =~ /\.fix$/);
1370 next if $file->is_dir;
1371 my $contents = $file->slurp;
1374 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1375 if(my $external = delete $HASH1->{external}) {
1376 my @fields = keys %{$sets_by_src{$source}->{external}};
1377 foreach my $field(@fields) {
1378 my $key = $HASH1->{$field};
1379 my $content = decode_base64 ($external->{$field});
1380 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1381 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1382 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1384 $class->restore($key, $content, $args);
1387 if ( $params->{use_create} ) {
1388 $rs->create( $HASH1 );
1390 push(@rows, $HASH1);
1393 $rs->populate(\@rows) if scalar(@rows);
1397 $self->do_post_ddl( {
1399 post_ddl=>$params->{post_ddl}
1400 } ) if $params->{post_ddl};
1402 $self->msg("- fixtures imported");
1403 $self->msg("- cleaning up");
1404 $tmp_fixture_dir->rmtree;
1409 my ($self, $params) = @_;
1411 my $schema = $params->{schema};
1412 my $data = _read_sql($params->{post_ddl});
1414 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1415 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1417 $self->msg("- finished importing post-populate DDL into DB");
1422 my $subject = shift || return;
1423 my $level = shift || 1;
1424 return unless $self->debug >= $level;
1426 print Dumper($subject);
1428 print $subject . "\n";
1434 Luke Saunders <luke@shadowcatsystems.co.uk>
1436 Initial development sponsored by and (c) Takkle, Inc. 2007
1440 Ash Berlin <ash@shadowcatsystems.co.uk>
1442 Matt S. Trout <mst@shadowcatsystems.co.uk>
1444 Drew Taylor <taylor.andrew.j@gmail.com>
1446 Frank Switalski <fswitalski@gmail.com>
1450 This library is free software under the same license as perl itself