1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file);
10 use File::Spec::Functions 'catfile', 'catdir';
11 use Config::Any::JSON;
12 use Data::Dump::Streamer;
13 use Data::Visitor::Callback;
15 use File::Copy::Recursive qw/dircopy/;
16 use File::Copy qw/move/;
17 use Hash::Merge qw( merge );
19 use Class::C3::Componentised;
21 use File::Temp qw/tempdir/;
23 use base qw(Class::Accessor::Grouped);
25 our $namespace_counter = 0;
27 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
28 _inherited_attributes debug schema_class dumped_objects config_attrs/);
30 our $VERSION = '1.001018';
34 DBIx::Class::Fixtures - Dump data and repopulate a database using rules
38 use DBIx::Class::Fixtures;
42 my $fixtures = DBIx::Class::Fixtures->new({
43 config_dir => '/home/me/app/fixture_configs'
47 config => 'set_config.json',
48 schema => $source_dbic_schema,
49 directory => '/home/me/app/fixtures'
53 directory => '/home/me/app/fixtures',
54 ddl => '/home/me/app/sql/ddl.sql',
55 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
56 post_ddl => '/home/me/app/sql/post_ddl.sql',
61 Dump fixtures from source database to filesystem then import to another
62 database (with same schema) at any time. Use as a constant dataset for running
63 tests against or for populating development databases when impractical to use
64 production clones. Describe fixture set using relations and conditions based on
65 your DBIx::Class schema.
67 =head1 DEFINE YOUR FIXTURE SET
69 Fixture sets are currently defined in .json files which must reside in your
70 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
71 describe which data to pull and dump from the source database.
94 This will fetch artists with primary keys 1 and 3, the producer with primary
95 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
96 rel from Producer to Artist.
98 The top level attributes are as follows:
102 Sets must be an array of hashes, as in the example given above. Each set
103 defines a set of objects to be included in the fixtures. For details on valid
104 set attributes see L</SET ATTRIBUTES> below.
108 Rules place general conditions on classes. For example if whenever an artist
109 was dumped you also wanted all of their cds dumped too, then you could use a
110 rule to specify this. For example:
139 In this case all the cds of artists 1, 3 and all producer 5's artists will be
140 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
141 to CD. This is eqivalent to:
168 rules must be a hash keyed by class name.
174 To prevent repetition between configs you can include other configs. For
183 { "file": "base.json" }
187 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
188 which is the name of another config file in the same directory. The original
189 config is merged with its includes using L<Hash::Merge>.
191 =head2 datetime_relative
193 Only available for MySQL and PostgreSQL at the moment, must be a value that
194 DateTime::Format::* can parse. For example:
198 "class": "RecentItems",
201 "datetime_relative": "2007-10-30 00:00:00"
204 This will work when dumping from a MySQL database and will cause any datetime
205 fields (where datatype => 'datetime' in the column def of the schema class) to
206 be dumped as a DateTime::Duration object relative to the date specified in the
207 datetime_relative value. For example if the RecentItem object had a date field
208 set to 2007-10-25, then when the fixture is imported the field will be set to 5
209 days in the past relative to the current time.
213 Specifies whether to automatically dump might_have relationships. Should be a
214 hash with one attribute - fetch. Set fetch to 1 or 0.
217 "might_have": { "fetch": 1 },
230 Note: belongs_to rels are automatically dumped whether you like it or not, this
231 is to avoid FKs to nowhere when importing. General rules on has_many rels are
232 not accepted at this top level, but you can turn them on for individual sets -
233 see L</SET ATTRIBUTES>.
235 =head1 SET ATTRIBUTES
239 Required attribute. Specifies the DBIx::Class object class you wish to dump.
243 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
244 If the id is not in the source db then it just won't get dumped, no warnings or
249 Must be either an integer or the string 'all'. Specifying an integer will
250 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
251 will cause the rows attribute to be left off and for all matching rows to be
252 dumped. There's no randomising here, it's just the first x rows.
256 A hash specifying the conditions dumped objects must match. Essentially this is
257 a JSON representation of a DBIx::Class search clause. For example:
263 "cond": { "name": "Dave" }
267 This will dump all artists whose name is 'dave'. Essentially
268 $artist_rs->search({ name => 'Dave' })->all.
270 Sometimes in a search clause it's useful to use scalar refs to do things like:
272 $artist_rs->search({ no1_singles => \'> no1_albums' })
274 This could be specified in the cond hash like so:
280 "cond": { "no1_singles": "\> no1_albums" }
284 So if the value starts with a backslash the value is made a scalar ref before
285 being passed to search.
289 An array of relationships to be used in the cond clause.
295 "cond": { "cds.position": { ">": 4 } },
300 Fetch all artists who have cds with position greater than 4.
304 Must be an array of hashes. Specifies which rels to also dump. For example:
313 "cond": { "position": "2" }
318 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
320 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
321 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
322 as in the set attributes. quantity is necessary for has_many relationships, but
323 not if using for belongs_to or might_have relationships.
327 Specifies whether to fetch has_many rels for this set. Must be a hash
328 containing keys fetch and quantity.
330 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
333 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
337 As with has_many but for might_have relationships. Quantity doesn't do anything
340 This value will be inherited by all fetches in this set. This is not true for
341 the has_many attribute.
345 In some cases your database information might be keys to values in some sort of
346 external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
347 to store blob information on the filesystem. In this case you may wish the ability
348 to backup your external storage in the same way your database data. The L</external>
349 attribute lets you specify a handler for this type of issue. For example:
358 "args": {"path":"__ATTR(photo_dir)__"}
364 This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
365 where the path to a file is specified by the C<file> field of the C<Photo> source.
366 We use the uninflated value of the field so you need to completely handle backup
367 and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
368 and you can create your own custom handlers by placing a '+' in the namespace:
370 "class": "+MyApp::Schema::SomeExternalStorage",
372 Although if possible I'd love to get patches to add some of the other common
373 types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
375 See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
377 =head1 RULE ATTRIBUTES
381 Same as with L</SET ATTRIBUTES>
385 Same as with L</SET ATTRIBUTES>
389 Same as with L</SET ATTRIBUTES>
393 Same as with L</SET ATTRIBUTES>
397 Same as with L</SET ATTRIBUTES>
399 =head1 RULE SUBSTITUTIONS
401 You can provide the following substitution patterns for your rule values. An
402 example of this might be:
407 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
413 Provide a value from %ENV
417 Provide a value from L</config_attrs>
421 Create the path to a file from a list
425 Create the path to a directory from a list
433 =item Arguments: \%$attrs
435 =item Return Value: $fixture_object
439 Returns a new DBIx::Class::Fixture object. %attrs can have the following
446 required. must contain a valid path to the directory in which your .json
451 determines whether to be verbose
453 =item ignore_sql_errors:
455 ignore errors on import of DDL etc
459 A hash of information you can use to do replacements inside your configuration
460 sets. For example, if your set looks like:
468 "quantity": "__ATTR(quantity)__",
473 my $fixtures = DBIx::Class::Fixtures->new( {
474 config_dir => '/home/me/app/fixture_configs'
480 You may wish to do this if you want to let whoever runs the dumps have a bit
485 my $fixtures = DBIx::Class::Fixtures->new( {
486 config_dir => '/home/me/app/fixture_configs'
495 unless (ref $params eq 'HASH') {
496 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
499 unless ($params->{config_dir}) {
500 return DBIx::Class::Exception->throw('config_dir param not specified');
503 my $config_dir = dir($params->{config_dir});
504 unless (-e $params->{config_dir}) {
505 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
509 config_dir => $config_dir,
510 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
511 debug => $params->{debug} || 0,
512 ignore_sql_errors => $params->{ignore_sql_errors},
513 dumped_objects => {},
514 use_create => $params->{use_create} || 0,
515 config_attrs => $params->{config_attrs} || {},
523 =head2 available_config_sets
525 Returns a list of all the config sets found in the L</config_dir>. These will
526 be a list of the json based files containing dump rules.
531 sub available_config_sets {
532 @config_sets = scalar(@config_sets) ? @config_sets : map {
535 -f $_ && $_=~/json$/;
536 } dir((shift)->config_dir)->children;
543 =item Arguments: \%$attrs
545 =item Return Value: 1
550 config => 'set_config.json', # config file to use. must be in the config
551 # directory specified in the constructor
552 schema => $source_dbic_schema,
553 directory => '/home/me/app/fixtures' # output directory
559 all => 1, # just dump everything that's in the schema
560 schema => $source_dbic_schema,
561 directory => '/home/me/app/fixtures' # output directory
564 In this case objects will be dumped to subdirectories in the specified
565 directory. For example:
567 /home/me/app/fixtures/artist/1.fix
568 /home/me/app/fixtures/artist/3.fix
569 /home/me/app/fixtures/producer/5.fix
571 schema and directory are required attributes. also, one of config or all must
574 Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
575 If this form is used your HashRef should conform to the structure rules defined
576 for the JSON representations.
584 unless (ref $params eq 'HASH') {
585 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
588 foreach my $param (qw/schema directory/) {
589 unless ($params->{$param}) {
590 return DBIx::Class::Exception->throw($param . ' param not specified');
594 if($params->{excludes} && !$params->{all}) {
595 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
598 my $schema = $params->{schema};
600 if ($params->{config}) {
601 $config = ref $params->{config} eq 'HASH' ?
605 my $config_file = $self->config_dir->file($params->{config});
606 $self->load_config_file($config_file);
608 } elsif ($params->{all}) {
609 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
611 might_have => { fetch => 0 },
612 has_many => { fetch => 0 },
613 belongs_to => { fetch => 0 },
616 { class => $_, quantity => 'all' };
622 DBIx::Class::Exception->throw('must pass config or set all');
625 my $output_dir = dir($params->{directory});
626 unless (-e $output_dir) {
627 $output_dir->mkpath ||
628 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
631 $self->msg("generating fixtures");
632 my $tmp_output_dir = dir(tmpdir());
634 if (-e $tmp_output_dir) {
635 $self->msg("- clearing existing $tmp_output_dir");
636 $tmp_output_dir->rmtree;
638 $self->msg("- creating $tmp_output_dir");
639 $tmp_output_dir->mkpath;
641 # write version file (for the potential benefit of populate)
642 $tmp_output_dir->file('_dumper_version')
646 # write our current config set
647 $tmp_output_dir->file('_config_set')
649 ->print( Dumper $config );
651 $config->{rules} ||= {};
652 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
654 while ( my ($k,$v) = each %{ $config->{rules} } ) {
655 if ( my $source = eval { $schema->source($k) } ) {
656 $config->{rules}{$source->source_name} = $v;
660 foreach my $source (@sources) {
661 # apply rule to set if specified
662 my $rule = $config->{rules}->{$source->{class}};
663 $source = merge( $source, $rule ) if ($rule);
666 my $rs = $schema->resultset($source->{class});
668 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
669 # if value starts with \ assume it's meant to be passed as a scalar ref
670 # to dbic. ideally this would substitute deeply
673 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
674 : $source->{cond}->{$_}
675 } keys %{$source->{cond}}
679 $rs = $rs->search($source->{cond}, { join => $source->{join} })
682 $self->msg("- dumping $source->{class}");
684 my %source_options = ( set => { %{$config}, %{$source} } );
685 if ($source->{quantity}) {
686 $rs = $rs->search({}, { order_by => $source->{order_by} })
687 if $source->{order_by};
689 if ($source->{quantity} =~ /^\d+$/) {
690 $rs = $rs->search({}, { rows => $source->{quantity} });
691 } elsif ($source->{quantity} ne 'all') {
692 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
695 elsif ($source->{ids} && @{$source->{ids}}) {
696 my @ids = @{$source->{ids}};
697 my (@pks) = $rs->result_source->primary_columns;
698 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
699 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
702 DBIx::Class::Exception->throw('must specify either quantity or ids');
705 $source_options{set_dir} = $tmp_output_dir;
706 $self->dump_rs($rs, \%source_options );
709 # clear existing output dir
710 foreach my $child ($output_dir->children) {
711 if ($child->is_dir) {
712 next if ($child eq $tmp_output_dir);
713 if (grep { $_ =~ /\.fix/ } $child->children) {
716 } elsif ($child =~ /_dumper_version$/) {
721 $self->msg("- moving temp dir to $output_dir");
722 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
723 for $tmp_output_dir->children;
725 if (-e $output_dir) {
726 $self->msg("- clearing tmp dir $tmp_output_dir");
727 # delete existing fixture set
728 $tmp_output_dir->remove;
736 sub load_config_file {
737 my ($self, $config_file) = @_;
738 DBIx::Class::Exception->throw("config does not exist at $config_file")
739 unless -e $config_file;
741 my $config = Config::Any::JSON->load($config_file);
744 if (my $incs = $config->{includes}) {
746 DBIx::Class::Exception->throw(
747 'includes params of config must be an array ref of hashrefs'
748 ) unless ref $incs eq 'ARRAY';
750 foreach my $include_config (@$incs) {
751 DBIx::Class::Exception->throw(
752 'includes params of config must be an array ref of hashrefs'
753 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
755 my $include_file = $self->config_dir->file($include_config->{file});
757 DBIx::Class::Exception->throw("config does not exist at $include_file")
758 unless -e $include_file;
760 my $include = Config::Any::JSON->load($include_file);
761 $self->msg($include);
762 $config = merge( $config, $include );
764 delete $config->{includes};
768 return DBIx::Class::Exception->throw('config has no sets')
769 unless $config && $config->{sets} &&
770 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
772 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
773 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
774 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
780 my ($self, $rs, $params) = @_;
782 while (my $row = $rs->next) {
783 $self->dump_object($row, $params);
788 my ($self, $object, $params) = @_;
789 my $set = $params->{set};
791 my $v = Data::Visitor::Callback->new(
793 my ($visitor, $data) = @_;
796 my ( $self, $v ) = @_;
797 if (! defined($ENV{$v})) {
805 if(my $attr = $self->config_attrs->{$v}) {
812 my ($self, @args) = @_;
816 my ($self, @args) = @_;
821 my $subsre = join( '|', keys %$subs );
822 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
830 die 'no dir passed to dump_object' unless $params->{set_dir};
831 die 'no object passed to dump_object' unless $object;
833 my @inherited_attrs = @{$self->_inherited_attributes};
836 $object->get_column($_)
837 } $object->primary_columns;
839 my $key = join("\0", @pk_vals);
841 my $src = $object->result_source;
842 my $exists = $self->dumped_objects->{$src->name}{$key}++;
845 # write dir and gen filename
846 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
847 $source_dir->mkpath(0, 0777);
849 # strip dir separators from file name
850 my $file = $source_dir->file(
851 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
856 $self->msg('-- dumping ' . $file->stringify, 2);
857 my %ds = $object->get_columns;
859 if($set->{external}) {
860 foreach my $field (keys %{$set->{external}}) {
861 my $key = $ds{$field};
862 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
863 my $args = $set->{external}->{$field}->{args};
865 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
868 $ds{external}->{$field} =
869 encode_base64( $class
870 ->backup($key => $args));
874 # mess with dates if specified
875 if ($set->{datetime_relative}) {
876 my $formatter= $object->result_source->schema->storage->datetime_parser;
877 unless ($@ || !$formatter) {
879 if ($set->{datetime_relative} eq 'today') {
880 $dt = DateTime->today;
882 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
885 while (my ($col, $value) = each %ds) {
886 my $col_info = $object->result_source->column_info($col);
889 && $col_info->{_inflate_info}
891 (uc($col_info->{data_type}) eq 'DATETIME')
892 or (uc($col_info->{data_type}) eq 'DATE')
893 or (uc($col_info->{data_type}) eq 'TIME')
894 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
895 or (uc($col_info->{data_type}) eq 'INTERVAL')
898 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
901 warn "datetime_relative not supported for this db driver at the moment";
905 # do the actual dumping
906 my $serialized = Dump(\%ds)->Out();
907 $file->openw->print($serialized);
910 # don't bother looking at rels unless we are actually planning to dump at least one type
911 my ($might_have, $belongs_to, $has_many) = map {
912 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
913 } qw/might_have belongs_to has_many/;
915 return unless $might_have
920 # dump rels of object
922 foreach my $name (sort $src->relationships) {
923 my $info = $src->relationship_info($name);
924 my $r_source = $src->related_source($name);
925 # if belongs_to or might_have with might_have param set or has_many with
926 # has_many param set then
928 ( $info->{attrs}{accessor} eq 'single' &&
929 (!$info->{attrs}{join_type} || $might_have)
931 || $info->{attrs}{accessor} eq 'filter'
933 ($info->{attrs}{accessor} eq 'multi' && $has_many)
935 my $related_rs = $object->related_resultset($name);
936 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
937 # these parts of the rule only apply to has_many rels
938 if ($rule && $info->{attrs}{accessor} eq 'multi') {
939 $related_rs = $related_rs->search(
941 { join => $rule->{join} }
942 ) if ($rule->{cond});
944 $related_rs = $related_rs->search(
946 { rows => $rule->{quantity} }
947 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
949 $related_rs = $related_rs->search(
951 { order_by => $rule->{order_by} }
952 ) if ($rule->{order_by});
955 if ($set->{has_many}{quantity} &&
956 $set->{has_many}{quantity} =~ /^\d+$/) {
957 $related_rs = $related_rs->search(
959 { rows => $set->{has_many}->{quantity} }
963 my %c_params = %{$params};
967 } grep { $set->{$_} } @inherited_attrs;
969 $c_params{set} = \%mock_set;
970 $c_params{set} = merge( $c_params{set}, $rule)
971 if $rule && $rule->{fetch};
973 $self->dump_rs($related_rs, \%c_params);
978 return unless $set && $set->{fetch};
979 foreach my $fetch (@{$set->{fetch}}) {
981 $fetch->{$_} = $set->{$_} foreach
982 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
983 my $related_rs = $object->related_resultset($fetch->{rel});
984 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
987 my $info = $object->result_source->relationship_info($fetch->{rel});
988 if ($info->{attrs}{accessor} eq 'multi') {
989 $fetch = merge( $fetch, $rule );
990 } elsif ($rule->{fetch}) {
991 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
995 die "relationship $fetch->{rel} does not exist for " . $src->source_name
996 unless ($related_rs);
998 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
999 # if value starts with \ assume it's meant to be passed as a scalar ref
1000 # to dbic. ideally this would substitute deeply
1001 $fetch->{cond} = { map {
1002 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1003 : $fetch->{cond}->{$_}
1004 } keys %{$fetch->{cond}} };
1007 $related_rs = $related_rs->search(
1009 { join => $fetch->{join} }
1010 ) if $fetch->{cond};
1012 $related_rs = $related_rs->search(
1014 { rows => $fetch->{quantity} }
1015 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1016 $related_rs = $related_rs->search(
1018 { order_by => $fetch->{order_by} }
1019 ) if $fetch->{order_by};
1021 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1025 sub _generate_schema {
1027 my $params = shift || {};
1029 $self->msg("\ncreating schema");
1031 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1032 eval "require $schema_class";
1036 my $connection_details = $params->{connection_details};
1038 $namespace_counter++;
1040 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1041 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1043 $pre_schema = $namespace->connect(@{$connection_details});
1044 unless( $pre_schema ) {
1045 return DBIx::Class::Exception->throw('connection details not valid');
1047 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
1048 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1049 my $dbh = $pre_schema->storage->dbh;
1052 $self->msg("- clearing DB of existing tables");
1053 $pre_schema->storage->txn_do(sub {
1054 $pre_schema->storage->with_deferred_fk_checks(sub {
1055 foreach my $table (@tables) {
1057 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1063 # import new ddl file to db
1064 my $ddl_file = $params->{ddl};
1065 $self->msg("- deploying schema using $ddl_file");
1066 my $data = _read_sql($ddl_file);
1068 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1069 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1071 $self->msg("- finished importing DDL into DB");
1073 # load schema object from our new DB
1074 $namespace_counter++;
1075 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1076 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1077 my $schema = $namespace2->connect(@{$connection_details});
1082 my $ddl_file = shift;
1084 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1085 my @data = split(/\n/, join('', <$fh>));
1086 @data = grep(!/^--/, @data);
1087 @data = split(/;/, join('', @data));
1089 @data = grep { $_ && $_ !~ /^-- / } @data;
1093 =head2 dump_config_sets
1095 Works just like L</dump> but instead of specifying a single json config set
1096 located in L</config_dir> we dump each set named in the C<configs> parameter.
1098 The parameters are the same as for L</dump> except instead of a C<directory>
1099 parameter we have a C<directory_template> which is a coderef expected to return
1100 a scalar that is a root directory where we will do the actual dumping. This
1101 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1104 $fixture->dump_all_config_sets({
1106 configs => [qw/one.json other.json/],
1107 directory_template => sub {
1108 my ($fixture, $params, $set) = @_;
1109 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1115 sub dump_config_sets {
1116 my ($self, $params) = @_;
1117 my $available_config_sets = delete $params->{configs};
1118 my $directory_template = delete $params->{directory_template} ||
1119 DBIx::Class::Exception->throw("'directory_template is required parameter");
1121 for my $set (@$available_config_sets) {
1122 my $localparams = $params;
1123 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1124 $localparams->{config} = $set;
1125 $self->dump($localparams);
1126 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1130 =head2 dump_all_config_sets
1132 my %local_params = %$params;
1133 my $local_self = bless { %$self }, ref($self);
1134 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1135 $local_params{config} = $set;
1136 $self->dump(\%local_params);
1139 Works just like L</dump> but instead of specifying a single json config set
1140 located in L</config_dir> we dump each set in turn to the specified directory.
1142 The parameters are the same as for L</dump> except instead of a C<directory>
1143 parameter we have a C<directory_template> which is a coderef expected to return
1144 a scalar that is a root directory where we will do the actual dumping. This
1145 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1148 $fixture->dump_all_config_sets({
1150 directory_template => sub {
1151 my ($fixture, $params, $set) = @_;
1152 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1158 sub dump_all_config_sets {
1159 my ($self, $params) = @_;
1160 $self->dump_config_sets({
1162 configs=>[$self->available_config_sets],
1170 =item Arguments: \%$attrs
1172 =item Return Value: 1
1176 $fixtures->populate( {
1177 # directory to look for fixtures in, as specified to dump
1178 directory => '/home/me/app/fixtures',
1181 ddl => '/home/me/app/sql/ddl.sql',
1183 # database to clear, deploy and then populate
1184 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1186 # DDL to deploy after populating records, ie. FK constraints
1187 post_ddl => '/home/me/app/sql/post_ddl.sql',
1189 # use CASCADE option when dropping tables
1192 # optional, set to 1 to run ddl but not populate
1195 # optional, set to 1 to run each fixture through ->create rather than have
1196 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1197 # that effects the value of column(s).
1200 # Dont try to clean the database, just populate over whats there. Requires
1201 # schema option. Use this if you want to handle removing old data yourself
1206 In this case the database app_dev will be cleared of all tables, then the
1207 specified DDL deployed to it, then finally all fixtures found in
1208 /home/me/app/fixtures will be added to it. populate will generate its own
1209 DBIx::Class schema from the DDL rather than being passed one to use. This is
1210 better as custom insert methods are avoided which can to get in the way. In
1211 some cases you might not have a DDL, and so this method will eventually allow a
1212 $schema object to be passed instead.
1214 If needed, you can specify a post_ddl attribute which is a DDL to be applied
1215 after all the fixtures have been added to the database. A good use of this
1216 option would be to add foreign key constraints since databases like Postgresql
1217 cannot disable foreign key checks.
1219 If your tables have foreign key constraints you may want to use the cascade
1220 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1223 C<directory> is a required attribute.
1225 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1226 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1229 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1230 attribute containing the connected schema you wish to operate on and set the
1231 C<no_deploy> attribute.
1238 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1239 unless ref $params eq 'HASH';
1241 DBIx::Class::Exception->throw('directory param not specified')
1242 unless $params->{directory};
1244 my $fixture_dir = dir(delete $params->{directory});
1245 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1246 unless -d $fixture_dir;
1251 if ($params->{ddl} && $params->{connection_details}) {
1252 $ddl_file = file(delete $params->{ddl});
1253 unless (-e $ddl_file) {
1254 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1256 unless (ref $params->{connection_details} eq 'ARRAY') {
1257 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1259 $schema = $self->_generate_schema({
1261 connection_details => delete $params->{connection_details},
1264 } elsif ($params->{schema} && $params->{no_deploy}) {
1265 $schema = $params->{schema};
1267 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1271 return 1 if $params->{no_populate};
1273 $self->msg("\nimporting fixtures");
1274 my $tmp_fixture_dir = dir(tmpdir());
1275 my $version_file = file($fixture_dir, '_dumper_version');
1276 my $config_set_path = file($fixture_dir, '_config_set');
1277 my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1279 my $v = Data::Visitor::Callback->new(
1280 plain_value => sub {
1281 my ($visitor, $data) = @_;
1284 my ( $self, $v ) = @_;
1285 if (! defined($ENV{$v})) {
1292 my ($self, $v) = @_;
1293 if(my $attr = $self->config_attrs->{$v}) {
1300 my ($self, @args) = @_;
1304 my ($self, @args) = @_;
1309 my $subsre = join( '|', keys %$subs );
1310 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1316 $v->visit( $config_set );
1321 %sets_by_src = map { delete($_->{class}) => $_ }
1322 @{$config_set->{sets}}
1325 # DBIx::Class::Exception->throw('no version file found');
1326 # unless -e $version_file;
1328 if (-e $tmp_fixture_dir) {
1329 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1330 $tmp_fixture_dir->rmtree;
1332 $self->msg("- creating temp dir");
1333 $tmp_fixture_dir->mkpath();
1334 for ( map { $schema->source($_)->from } $schema->sources) {
1335 my $from_dir = $fixture_dir->subdir($_);
1336 next unless -e $from_dir;
1337 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1340 unless (-d $tmp_fixture_dir) {
1341 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1345 my $formatter = $schema->storage->datetime_parser;
1346 unless ($@ || !$formatter) {
1348 if ($params->{datetime_relative_to}) {
1349 $callbacks{'DateTime::Duration'} = sub {
1350 $params->{datetime_relative_to}->clone->add_duration($_);
1353 $callbacks{'DateTime::Duration'} = sub {
1354 $formatter->format_datetime(DateTime->today->add_duration($_))
1357 $callbacks{object} ||= "visit_ref";
1358 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1361 $schema->storage->txn_do(sub {
1362 $schema->storage->with_deferred_fk_checks(sub {
1363 foreach my $source (sort $schema->sources) {
1364 $self->msg("- adding " . $source);
1365 my $rs = $schema->resultset($source);
1366 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1367 next unless (-e $source_dir);
1369 while (my $file = $source_dir->next) {
1370 next unless ($file =~ /\.fix$/);
1371 next if $file->is_dir;
1372 my $contents = $file->slurp;
1375 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1376 if(my $external = delete $HASH1->{external}) {
1377 my @fields = keys %{$sets_by_src{$source}->{external}};
1378 foreach my $field(@fields) {
1379 my $key = $HASH1->{$field};
1380 my $content = decode_base64 ($external->{$field});
1381 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1382 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1383 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1385 $class->restore($key, $content, $args);
1388 if ( $params->{use_create} ) {
1389 $rs->create( $HASH1 );
1391 push(@rows, $HASH1);
1394 $rs->populate(\@rows) if scalar(@rows);
1396 ## Now we need to do some db specific cleanup
1397 ## this probably belongs in a more isolated space. Right now this is
1398 ## to just handle postgresql SERIAL types that use Sequences
1400 my $table = $rs->result_source->name;
1401 for my $column(my @columns = $rs->result_source->columns) {
1402 my $info = $rs->result_source->column_info($column);
1403 if(my $sequence = $info->{sequence}) {
1404 $self->msg("- updating sequence $sequence");
1405 $rs->result_source->storage->dbh_do(sub {
1406 my ($storage, $dbh, @cols) = @_;
1407 $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
1408 my $sth = $dbh->prepare($sql);
1409 my $rv = $sth->execute or die $sth->errstr;
1410 $self->msg("- $sql");
1418 $self->do_post_ddl( {
1420 post_ddl=>$params->{post_ddl}
1421 } ) if $params->{post_ddl};
1423 $self->msg("- fixtures imported");
1424 $self->msg("- cleaning up");
1425 $tmp_fixture_dir->rmtree;
1430 my ($self, $params) = @_;
1432 my $schema = $params->{schema};
1433 my $data = _read_sql($params->{post_ddl});
1435 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1436 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1438 $self->msg("- finished importing post-populate DDL into DB");
1443 my $subject = shift || return;
1444 my $level = shift || 1;
1445 return unless $self->debug >= $level;
1447 print Dumper($subject);
1449 print $subject . "\n";
1455 Luke Saunders <luke@shadowcatsystems.co.uk>
1457 Initial development sponsored by and (c) Takkle, Inc. 2007
1461 Ash Berlin <ash@shadowcatsystems.co.uk>
1463 Matt S. Trout <mst@shadowcatsystems.co.uk>
1465 Drew Taylor <taylor.andrew.j@gmail.com>
1467 Frank Switalski <fswitalski@gmail.com>
1471 This library is free software under the same license as perl itself