1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
10 use Data::Dump::Streamer;
11 use Data::Visitor::Callback;
12 use Hash::Merge qw( merge );
14 use Class::C3::Componentised;
17 use File::Temp qw/tempdir/;
19 use base qw(Class::Accessor::Grouped);
21 our $namespace_counter = 0;
23 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
24 _inherited_attributes debug schema_class dumped_objects config_attrs/);
26 our $VERSION = '1.001039';
28 $VERSION = eval $VERSION;
32 DBIx::Class::Fixtures - Dump data and repopulate a database using rules
36 use DBIx::Class::Fixtures;
40 my $fixtures = DBIx::Class::Fixtures->new({
41 config_dir => '/home/me/app/fixture_configs'
45 config => 'set_config.json',
46 schema => $source_dbic_schema,
47 directory => '/home/me/app/fixtures'
51 directory => '/home/me/app/fixtures',
52 ddl => '/home/me/app/sql/ddl.sql',
53 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
54 post_ddl => '/home/me/app/sql/post_ddl.sql',
59 Dump fixtures from source database to filesystem then import to another
60 database (with same schema) at any time. Use as a constant dataset for running
61 tests against or for populating development databases when impractical to use
62 production clones. Describe fixture set using relations and conditions based on
63 your DBIx::Class schema.
65 =head1 DEFINE YOUR FIXTURE SET
67 Fixture sets are currently defined in .json files which must reside in your
68 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
69 describe which data to pull and dump from the source database.
92 This will fetch artists with primary keys 1 and 3, the producer with primary
93 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
94 rel from Producer to Artist.
96 The top level attributes are as follows:
100 Sets must be an array of hashes, as in the example given above. Each set
101 defines a set of objects to be included in the fixtures. For details on valid
102 set attributes see L</SET ATTRIBUTES> below.
106 Rules place general conditions on classes. For example if whenever an artist
107 was dumped you also wanted all of their cds dumped too, then you could use a
108 rule to specify this. For example:
137 In this case all the cds of artists 1, 3 and all producer 5's artists will be
138 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
139 to CD. This is eqivalent to:
166 rules must be a hash keyed by class name.
172 To prevent repetition between configs you can include other configs. For
181 { "file": "base.json" }
185 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
186 which is the name of another config file in the same directory. The original
187 config is merged with its includes using L<Hash::Merge>.
189 =head2 datetime_relative
191 Only available for MySQL and PostgreSQL at the moment, must be a value that
192 DateTime::Format::* can parse. For example:
196 "class": "RecentItems",
199 "datetime_relative": "2007-10-30 00:00:00"
202 This will work when dumping from a MySQL database and will cause any datetime
203 fields (where datatype => 'datetime' in the column def of the schema class) to
204 be dumped as a DateTime::Duration object relative to the date specified in the
205 datetime_relative value. For example if the RecentItem object had a date field
206 set to 2007-10-25, then when the fixture is imported the field will be set to 5
207 days in the past relative to the current time.
211 Specifies whether to automatically dump might_have relationships. Should be a
212 hash with one attribute - fetch. Set fetch to 1 or 0.
215 "might_have": { "fetch": 1 },
228 Note: belongs_to rels are automatically dumped whether you like it or not, this
229 is to avoid FKs to nowhere when importing. General rules on has_many rels are
230 not accepted at this top level, but you can turn them on for individual sets -
231 see L</SET ATTRIBUTES>.
233 =head1 SET ATTRIBUTES
237 Required attribute. Specifies the DBIx::Class object class you wish to dump.
241 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
242 If the id is not in the source db then it just won't get dumped, no warnings or
247 Must be either an integer or the string 'all'. Specifying an integer will
248 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
249 will cause the rows attribute to be left off and for all matching rows to be
250 dumped. There's no randomising here, it's just the first x rows.
254 A hash specifying the conditions dumped objects must match. Essentially this is
255 a JSON representation of a DBIx::Class search clause. For example:
261 "cond": { "name": "Dave" }
265 This will dump all artists whose name is 'dave'. Essentially
266 $artist_rs->search({ name => 'Dave' })->all.
268 Sometimes in a search clause it's useful to use scalar refs to do things like:
270 $artist_rs->search({ no1_singles => \'> no1_albums' })
272 This could be specified in the cond hash like so:
278 "cond": { "no1_singles": "\> no1_albums" }
282 So if the value starts with a backslash the value is made a scalar ref before
283 being passed to search.
287 An array of relationships to be used in the cond clause.
293 "cond": { "cds.position": { ">": 4 } },
298 Fetch all artists who have cds with position greater than 4.
302 Must be an array of hashes. Specifies which rels to also dump. For example:
311 "cond": { "position": "2" }
316 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
318 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
319 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
320 as in the set attributes. quantity is necessary for has_many relationships, but
321 not if using for belongs_to or might_have relationships.
325 Specifies whether to fetch has_many rels for this set. Must be a hash
326 containing keys fetch and quantity.
328 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
331 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
335 As with has_many but for might_have relationships. Quantity doesn't do anything
338 This value will be inherited by all fetches in this set. This is not true for
339 the has_many attribute.
343 In some cases your database information might be keys to values in some sort of
344 external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
345 to store blob information on the filesystem. In this case you may wish the ability
346 to backup your external storage in the same way your database data. The L</external>
347 attribute lets you specify a handler for this type of issue. For example:
356 "args": {"path":"__ATTR(photo_dir)__"}
362 This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
363 where the path to a file is specified by the C<file> field of the C<Photo> source.
364 We use the uninflated value of the field so you need to completely handle backup
365 and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
366 and you can create your own custom handlers by placing a '+' in the namespace:
368 "class": "+MyApp::Schema::SomeExternalStorage",
370 Although if possible I'd love to get patches to add some of the other common
371 types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
373 See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
375 =head1 RULE ATTRIBUTES
379 Same as with L</SET ATTRIBUTES>
383 Same as with L</SET ATTRIBUTES>
387 Same as with L</SET ATTRIBUTES>
391 Same as with L</SET ATTRIBUTES>
395 Same as with L</SET ATTRIBUTES>
397 =head1 RULE SUBSTITUTIONS
399 You can provide the following substitution patterns for your rule values. An
400 example of this might be:
405 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
411 Provide a value from %ENV
415 Provide a value from L</config_attrs>
419 Create the path to a file from a list
423 Create the path to a directory from a list
431 =item Arguments: \%$attrs
433 =item Return Value: $fixture_object
437 Returns a new DBIx::Class::Fixture object. %attrs can have the following
444 required. must contain a valid path to the directory in which your .json
449 determines whether to be verbose
451 =item ignore_sql_errors:
453 ignore errors on import of DDL etc
457 A hash of information you can use to do replacements inside your configuration
458 sets. For example, if your set looks like:
466 "quantity": "__ATTR(quantity)__",
471 my $fixtures = DBIx::Class::Fixtures->new( {
472 config_dir => '/home/me/app/fixture_configs'
478 You may wish to do this if you want to let whoever runs the dumps have a bit
483 my $fixtures = DBIx::Class::Fixtures->new( {
484 config_dir => '/home/me/app/fixture_configs'
493 unless (ref $params eq 'HASH') {
494 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
497 unless ($params->{config_dir}) {
498 return DBIx::Class::Exception->throw('config_dir param not specified');
501 my $config_dir = io->dir($params->{config_dir});
502 unless (-e $params->{config_dir}) {
503 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
507 config_dir => $config_dir,
508 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
509 debug => $params->{debug} || 0,
510 ignore_sql_errors => $params->{ignore_sql_errors},
511 dumped_objects => {},
512 use_create => $params->{use_create} || 0,
513 use_find_or_create => $params->{use_find_or_create} || 0,
514 config_attrs => $params->{config_attrs} || {},
522 =head2 available_config_sets
524 Returns a list of all the config sets found in the L</config_dir>. These will
525 be a list of the json based files containing dump rules.
530 sub available_config_sets {
531 @config_sets = scalar(@config_sets) ? @config_sets : map {
534 -f "$_" && $_=~/json$/;
535 } shift->config_dir->all;
542 =item Arguments: \%$attrs
544 =item Return Value: 1
549 config => 'set_config.json', # config file to use. must be in the config
550 # directory specified in the constructor
551 schema => $source_dbic_schema,
552 directory => '/home/me/app/fixtures' # output directory
558 all => 1, # just dump everything that's in the schema
559 schema => $source_dbic_schema,
560 directory => '/home/me/app/fixtures', # output directory
561 #excludes => [ qw/Foo MyView/ ], # optionally exclude certain sources
564 In this case objects will be dumped to subdirectories in the specified
565 directory. For example:
567 /home/me/app/fixtures/artist/1.fix
568 /home/me/app/fixtures/artist/3.fix
569 /home/me/app/fixtures/producer/5.fix
571 C<schema> and C<directory> are required attributes. also, one of C<config> or C<all> must
574 The optional parameter C<excludes> takes an array ref of source names and can be
575 used to exclude those sources when dumping the whole schema. This is useful if
576 you have views in there, since those do not need fixtures and will currently result
577 in an error when they are created and then used with C<populate>.
579 Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
580 If this form is used your HashRef should conform to the structure rules defined
581 for the JSON representations.
589 unless (ref $params eq 'HASH') {
590 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
593 foreach my $param (qw/schema directory/) {
594 unless ($params->{$param}) {
595 return DBIx::Class::Exception->throw($param . ' param not specified');
599 if($params->{excludes} && !$params->{all}) {
600 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
603 my $schema = $params->{schema};
605 if ($params->{config}) {
606 $config = ref $params->{config} eq 'HASH' ?
610 my $config_file = io->catfile($self->config_dir, $params->{config});
611 $self->load_config_file("$config_file");
613 } elsif ($params->{all}) {
614 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
616 might_have => { fetch => 0 },
617 has_many => { fetch => 0 },
618 belongs_to => { fetch => 0 },
621 { class => $_, quantity => 'all' };
627 DBIx::Class::Exception->throw('must pass config or set all');
630 my $output_dir = io->dir($params->{directory});
631 unless (-e "$output_dir") {
632 $output_dir->mkpath ||
633 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
636 $self->msg("generating fixtures");
637 my $tmp_output_dir = io->dir(tempdir);
639 if (-e "$tmp_output_dir") {
640 $self->msg("- clearing existing $tmp_output_dir");
641 $tmp_output_dir->rmtree;
643 $self->msg("- creating $tmp_output_dir");
644 $tmp_output_dir->mkpath;
646 # write version file (for the potential benefit of populate)
647 $tmp_output_dir->file('_dumper_version')->print($VERSION);
649 # write our current config set
650 $tmp_output_dir->file('_config_set')->print( Dumper $config );
652 $config->{rules} ||= {};
653 my @sources = @{delete $config->{sets}};
655 while ( my ($k,$v) = each %{ $config->{rules} } ) {
656 if ( my $source = eval { $schema->source($k) } ) {
657 $config->{rules}{$source->source_name} = $v;
661 foreach my $source (@sources) {
662 # apply rule to set if specified
663 my $rule = $config->{rules}->{$source->{class}};
664 $source = merge( $source, $rule ) if ($rule);
667 my $rs = $schema->resultset($source->{class});
669 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
670 # if value starts with \ assume it's meant to be passed as a scalar ref
671 # to dbic. ideally this would substitute deeply
674 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
675 : $source->{cond}->{$_}
676 } keys %{$source->{cond}}
680 $rs = $rs->search($source->{cond}, { join => $source->{join} })
683 $self->msg("- dumping $source->{class}");
685 my %source_options = ( set => { %{$config}, %{$source} } );
686 if ($source->{quantity}) {
687 $rs = $rs->search({}, { order_by => $source->{order_by} })
688 if $source->{order_by};
690 if ($source->{quantity} =~ /^\d+$/) {
691 $rs = $rs->search({}, { rows => $source->{quantity} });
692 } elsif ($source->{quantity} ne 'all') {
693 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
696 elsif ($source->{ids} && @{$source->{ids}}) {
697 my @ids = @{$source->{ids}};
698 my (@pks) = $rs->result_source->primary_columns;
699 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
700 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
703 DBIx::Class::Exception->throw('must specify either quantity or ids');
706 $source_options{set_dir} = $tmp_output_dir;
707 $self->dump_rs($rs, \%source_options );
710 # clear existing output dir
711 foreach my $child ($output_dir->all) {
712 if ($child->is_dir) {
713 next if ("$child" eq "$tmp_output_dir");
714 if (grep { $_ =~ /\.fix/ } $child->all) {
717 } elsif ($child =~ /_dumper_version$/) {
722 $self->msg("- moving temp dir to $output_dir");
723 $tmp_output_dir->copy("$output_dir");
725 if (-e "$output_dir") {
726 $self->msg("- clearing tmp dir $tmp_output_dir");
727 # delete existing fixture set
728 $tmp_output_dir->rmtree;
736 sub load_config_file {
737 my ($self, $config_file) = @_;
738 DBIx::Class::Exception->throw("config does not exist at $config_file")
739 unless -e "$config_file";
741 my $config = Config::Any::JSON->load($config_file);
744 if (my $incs = $config->{includes}) {
746 DBIx::Class::Exception->throw(
747 'includes params of config must be an array ref of hashrefs'
748 ) unless ref $incs eq 'ARRAY';
750 foreach my $include_config (@$incs) {
751 DBIx::Class::Exception->throw(
752 'includes params of config must be an array ref of hashrefs'
753 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
755 my $include_file = $self->config_dir->file($include_config->{file});
757 DBIx::Class::Exception->throw("config does not exist at $include_file")
758 unless -e "$include_file";
760 my $include = Config::Any::JSON->load($include_file);
761 $self->msg($include);
762 $config = merge( $config, $include );
764 delete $config->{includes};
768 return DBIx::Class::Exception->throw('config has no sets')
769 unless $config && $config->{sets} &&
770 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
772 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
773 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
774 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
780 my ($self, $rs, $params) = @_;
782 while (my $row = $rs->next) {
783 $self->dump_object($row, $params);
788 my ($self, $object, $params) = @_;
789 my $set = $params->{set};
791 my $v = Data::Visitor::Callback->new(
793 my ($visitor, $data) = @_;
796 my ( $self, $v ) = @_;
797 if (! defined($ENV{$v})) {
805 if(my $attr = $self->config_attrs->{$v}) {
812 my ($self, @args) = @_;
813 "".io->catfile(@args);
816 my ($self, @args) = @_;
817 "".io->catdir(@args);
821 my $subsre = join( '|', keys %$subs );
822 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
830 die 'no dir passed to dump_object' unless $params->{set_dir};
831 die 'no object passed to dump_object' unless $object;
833 my @inherited_attrs = @{$self->_inherited_attributes};
836 $object->get_column($_)
837 } $object->primary_columns;
839 my $key = join("\0", @pk_vals);
841 my $src = $object->result_source;
842 my $exists = $self->dumped_objects->{$src->name}{$key}++;
845 # write dir and gen filename
846 my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
847 $source_dir->mkpath(0, 0777);
849 # Convert characters not allowed on windows
850 my $file = io->catfile("$source_dir",
851 join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
856 $self->msg('-- dumping ' . "$file", 2);
858 # get_columns will return virtual columns; we just want stored columns.
859 # columns_info keys seems to be the actual storage column names, so we'll
861 my $col_info = $src->columns_info;
862 my @column_names = keys %$col_info;
863 my %columns = $object->get_columns;
864 my %ds; @ds{@column_names} = @columns{@column_names};
866 if($set->{external}) {
867 foreach my $field (keys %{$set->{external}}) {
868 my $key = $ds{$field};
869 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
870 my $args = $set->{external}->{$field}->{args};
872 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
875 $ds{external}->{$field} =
876 encode_base64( $class
877 ->backup($key => $args),'');
881 # mess with dates if specified
882 if ($set->{datetime_relative}) {
883 my $formatter= eval {$object->result_source->schema->storage->datetime_parser};
884 unless (!$formatter) {
886 if ($set->{datetime_relative} eq 'today') {
887 $dt = DateTime->today;
889 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
892 while (my ($col, $value) = each %ds) {
893 my $col_info = $object->result_source->column_info($col);
896 && $col_info->{_inflate_info}
898 (uc($col_info->{data_type}) eq 'DATETIME')
899 or (uc($col_info->{data_type}) eq 'DATE')
900 or (uc($col_info->{data_type}) eq 'TIME')
901 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
902 or (uc($col_info->{data_type}) eq 'INTERVAL')
905 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
908 warn "datetime_relative not supported for this db driver at the moment";
912 # do the actual dumping
913 my $serialized = Dump(\%ds)->Out();
915 $file->print($serialized);
918 # don't bother looking at rels unless we are actually planning to dump at least one type
919 my ($might_have, $belongs_to, $has_many) = map {
920 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
921 } qw/might_have belongs_to has_many/;
923 return unless $might_have
928 # dump rels of object
930 foreach my $name (sort $src->relationships) {
931 my $info = $src->relationship_info($name);
932 my $r_source = $src->related_source($name);
933 # if belongs_to or might_have with might_have param set or has_many with
934 # has_many param set then
936 ( $info->{attrs}{accessor} eq 'single' &&
937 (!$info->{attrs}{join_type} || $might_have)
939 || $info->{attrs}{accessor} eq 'filter'
941 ($info->{attrs}{accessor} eq 'multi' && $has_many)
943 my $related_rs = $object->related_resultset($name);
944 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
945 # these parts of the rule only apply to has_many rels
946 if ($rule && $info->{attrs}{accessor} eq 'multi') {
947 $related_rs = $related_rs->search(
949 { join => $rule->{join} }
950 ) if ($rule->{cond});
952 $related_rs = $related_rs->search(
954 { rows => $rule->{quantity} }
955 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
957 $related_rs = $related_rs->search(
959 { order_by => $rule->{order_by} }
960 ) if ($rule->{order_by});
963 if ($set->{has_many}{quantity} &&
964 $set->{has_many}{quantity} =~ /^\d+$/) {
965 $related_rs = $related_rs->search(
967 { rows => $set->{has_many}->{quantity} }
971 my %c_params = %{$params};
975 } grep { $set->{$_} } @inherited_attrs;
977 $c_params{set} = \%mock_set;
978 $c_params{set} = merge( $c_params{set}, $rule)
979 if $rule && $rule->{fetch};
981 $self->dump_rs($related_rs, \%c_params);
986 return unless $set && $set->{fetch};
987 foreach my $fetch (@{$set->{fetch}}) {
989 $fetch->{$_} = $set->{$_} foreach
990 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
991 my $related_rs = $object->related_resultset($fetch->{rel});
992 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
995 my $info = $object->result_source->relationship_info($fetch->{rel});
996 if ($info->{attrs}{accessor} eq 'multi') {
997 $fetch = merge( $fetch, $rule );
998 } elsif ($rule->{fetch}) {
999 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
1003 die "relationship $fetch->{rel} does not exist for " . $src->source_name
1004 unless ($related_rs);
1006 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
1007 # if value starts with \ assume it's meant to be passed as a scalar ref
1008 # to dbic. ideally this would substitute deeply
1009 $fetch->{cond} = { map {
1010 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1011 : $fetch->{cond}->{$_}
1012 } keys %{$fetch->{cond}} };
1015 $related_rs = $related_rs->search(
1017 { join => $fetch->{join} }
1018 ) if $fetch->{cond};
1020 $related_rs = $related_rs->search(
1022 { rows => $fetch->{quantity} }
1023 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1024 $related_rs = $related_rs->search(
1026 { order_by => $fetch->{order_by} }
1027 ) if $fetch->{order_by};
1029 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1033 sub _generate_schema {
1035 my $params = shift || {};
1037 $self->msg("\ncreating schema");
1039 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1040 eval "require $schema_class";
1044 my $connection_details = $params->{connection_details};
1046 $namespace_counter++;
1048 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1049 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1051 $pre_schema = $namespace->connect(@{$connection_details});
1052 unless( $pre_schema ) {
1053 return DBIx::Class::Exception->throw('connection details not valid');
1055 my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources;
1056 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1057 my $dbh = $pre_schema->storage->dbh;
1060 $self->msg("- clearing DB of existing tables");
1061 $pre_schema->storage->txn_do(sub {
1062 $pre_schema->storage->with_deferred_fk_checks(sub {
1063 foreach my $table (@tables) {
1065 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1071 # import new ddl file to db
1072 my $ddl_file = $params->{ddl};
1073 $self->msg("- deploying schema using $ddl_file");
1074 my $data = _read_sql($ddl_file);
1076 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1077 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1079 $self->msg("- finished importing DDL into DB");
1081 # load schema object from our new DB
1082 $namespace_counter++;
1083 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1084 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1085 my $schema = $namespace2->connect(@{$connection_details});
1090 my $ddl_file = shift;
1092 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1093 my @data = split(/\n/, join('', <$fh>));
1094 @data = grep(!/^--/, @data);
1095 @data = split(/;/, join('', @data));
1097 @data = grep { $_ && $_ !~ /^-- / } @data;
1101 =head2 dump_config_sets
1103 Works just like L</dump> but instead of specifying a single json config set
1104 located in L</config_dir> we dump each set named in the C<configs> parameter.
1106 The parameters are the same as for L</dump> except instead of a C<directory>
1107 parameter we have a C<directory_template> which is a coderef expected to return
1108 a scalar that is a root directory where we will do the actual dumping. This
1109 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1112 $fixture->dump_all_config_sets({
1114 configs => [qw/one.json other.json/],
1115 directory_template => sub {
1116 my ($fixture, $params, $set) = @_;
1117 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1123 sub dump_config_sets {
1124 my ($self, $params) = @_;
1125 my $available_config_sets = delete $params->{configs};
1126 my $directory_template = delete $params->{directory_template} ||
1127 DBIx::Class::Exception->throw("'directory_template is required parameter");
1129 for my $set (@$available_config_sets) {
1130 my $localparams = $params;
1131 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1132 $localparams->{config} = $set;
1133 $self->dump($localparams);
1134 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1138 =head2 dump_all_config_sets
1140 my %local_params = %$params;
1141 my $local_self = bless { %$self }, ref($self);
1142 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1143 $local_params{config} = $set;
1144 $self->dump(\%local_params);
1147 Works just like L</dump> but instead of specifying a single json config set
1148 located in L</config_dir> we dump each set in turn to the specified directory.
1150 The parameters are the same as for L</dump> except instead of a C<directory>
1151 parameter we have a C<directory_template> which is a coderef expected to return
1152 a scalar that is a root directory where we will do the actual dumping. This
1153 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1156 $fixture->dump_all_config_sets({
1158 directory_template => sub {
1159 my ($fixture, $params, $set) = @_;
1160 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1166 sub dump_all_config_sets {
1167 my ($self, $params) = @_;
1168 $self->dump_config_sets({
1170 configs=>[$self->available_config_sets],
1178 =item Arguments: \%$attrs
1180 =item Return Value: 1
1184 $fixtures->populate( {
1185 # directory to look for fixtures in, as specified to dump
1186 directory => '/home/me/app/fixtures',
1189 ddl => '/home/me/app/sql/ddl.sql',
1191 # database to clear, deploy and then populate
1192 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1194 # DDL to deploy after populating records, ie. FK constraints
1195 post_ddl => '/home/me/app/sql/post_ddl.sql',
1197 # use CASCADE option when dropping tables
1200 # optional, set to 1 to run ddl but not populate
1203 # optional, set to 1 to run each fixture through ->create rather than have
1204 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1205 # that effects the value of column(s).
1208 # optional, same as use_create except with find_or_create.
1209 # Useful if you are populating a persistent data store.
1210 use_find_or_create => 0,
1212 # Dont try to clean the database, just populate over whats there. Requires
1213 # schema option. Use this if you want to handle removing old data yourself
1218 In this case the database app_dev will be cleared of all tables, then the
1219 specified DDL deployed to it, then finally all fixtures found in
1220 /home/me/app/fixtures will be added to it. populate will generate its own
1221 DBIx::Class schema from the DDL rather than being passed one to use. This is
1222 better as custom insert methods are avoided which can to get in the way. In
1223 some cases you might not have a DDL, and so this method will eventually allow a
1224 $schema object to be passed instead.
1226 If needed, you can specify a post_ddl attribute which is a DDL to be applied
1227 after all the fixtures have been added to the database. A good use of this
1228 option would be to add foreign key constraints since databases like Postgresql
1229 cannot disable foreign key checks.
1231 If your tables have foreign key constraints you may want to use the cascade
1232 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1235 C<directory> is a required attribute.
1237 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1238 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1241 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1242 attribute containing the connected schema you wish to operate on and set the
1243 C<no_deploy> attribute.
1250 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1251 unless ref $params eq 'HASH';
1253 DBIx::Class::Exception->throw('directory param not specified')
1254 unless $params->{directory};
1256 my $fixture_dir = io->dir(delete $params->{directory});
1257 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1258 unless -d "$fixture_dir";
1263 if ($params->{ddl} && $params->{connection_details}) {
1264 $ddl_file = io->file(delete $params->{ddl});
1265 unless (-e "$ddl_file") {
1266 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1268 unless (ref $params->{connection_details} eq 'ARRAY') {
1269 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1271 $schema = $self->_generate_schema({
1273 connection_details => delete $params->{connection_details},
1276 } elsif ($params->{schema} && $params->{no_deploy}) {
1277 $schema = $params->{schema};
1279 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1283 return 1 if $params->{no_populate};
1285 $self->msg("\nimporting fixtures");
1286 my $tmp_fixture_dir = io->dir(tempdir());
1287 my $config_set_path = io->file($fixture_dir, '_config_set');
1288 my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1290 my $v = Data::Visitor::Callback->new(
1291 plain_value => sub {
1292 my ($visitor, $data) = @_;
1295 my ( $self, $v ) = @_;
1296 if (! defined($ENV{$v})) {
1303 my ($self, $v) = @_;
1304 if(my $attr = $self->config_attrs->{$v}) {
1311 my ($self, @args) = @_;
1315 my ($self, @args) = @_;
1320 my $subsre = join( '|', keys %$subs );
1321 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1327 $v->visit( $config_set );
1332 %sets_by_src = map { delete($_->{class}) => $_ }
1333 @{$config_set->{sets}}
1336 if (-e "$tmp_fixture_dir") {
1337 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1338 $tmp_fixture_dir->rmtree;
1340 $self->msg("- creating temp dir");
1341 $tmp_fixture_dir->mkpath();
1342 for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
1343 my $from_dir = io->catdir($fixture_dir, $_);
1344 next unless -e "$from_dir";
1345 $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
1348 unless (-d "$tmp_fixture_dir") {
1349 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1353 my $formatter = $schema->storage->datetime_parser;
1354 unless ($@ || !$formatter) {
1356 if ($params->{datetime_relative_to}) {
1357 $callbacks{'DateTime::Duration'} = sub {
1358 $params->{datetime_relative_to}->clone->add_duration($_);
1361 $callbacks{'DateTime::Duration'} = sub {
1362 $formatter->format_datetime(DateTime->today->add_duration($_))
1365 $callbacks{object} ||= "visit_ref";
1366 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1369 my @sorted_source_names = $self->_get_sorted_sources( $schema );
1370 $schema->storage->txn_do(sub {
1371 $schema->storage->with_deferred_fk_checks(sub {
1372 foreach my $source (@sorted_source_names) {
1373 $self->msg("- adding " . $source);
1374 my $rs = $schema->resultset($source);
1375 my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
1376 next unless (-e "$source_dir");
1378 while (my $file = $source_dir->next) {
1379 next unless ($file =~ /\.fix$/);
1380 next if $file->is_dir;
1381 my $contents = $file->slurp;
1384 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1385 if(my $external = delete $HASH1->{external}) {
1386 my @fields = keys %{$sets_by_src{$source}->{external}};
1387 foreach my $field(@fields) {
1388 my $key = $HASH1->{$field};
1389 my $content = decode_base64 ($external->{$field});
1390 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1391 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1392 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1394 $class->restore($key, $content, $args);
1397 if ( $params->{use_create} ) {
1398 $rs->create( $HASH1 );
1399 } elsif( $params->{use_find_or_create} ) {
1400 $rs->find_or_create( $HASH1 );
1402 push(@rows, $HASH1);
1405 $rs->populate(\@rows) if scalar(@rows);
1407 ## Now we need to do some db specific cleanup
1408 ## this probably belongs in a more isolated space. Right now this is
1409 ## to just handle postgresql SERIAL types that use Sequences
1410 ## Will completely ignore sequences in Oracle due to having to drop
1411 ## and recreate them
1413 my $table = $rs->result_source->name;
1414 for my $column(my @columns = $rs->result_source->columns) {
1415 my $info = $rs->result_source->column_info($column);
1416 if(my $sequence = $info->{sequence}) {
1417 $self->msg("- updating sequence $sequence");
1418 $rs->result_source->storage->dbh_do(sub {
1419 my ($storage, $dbh, @cols) = @_;
1420 if ( $dbh->{Driver}->{Name} eq "Oracle" ) {
1421 $self->msg("- Cannot change sequence values in Oracle");
1424 my $sql = sprintf("SELECT setval(?, (SELECT max(%s) FROM %s));",$dbh->quote_identifier($column),$dbh->quote_identifier($table))
1426 my $sth = $dbh->prepare($sql);
1427 $sth->bind_param(1,$sequence);
1429 my $rv = $sth->execute or die $sth->errstr;
1430 $self->msg("- $sql");
1439 $self->do_post_ddl( {
1441 post_ddl=>$params->{post_ddl}
1442 } ) if $params->{post_ddl};
1444 $self->msg("- fixtures imported");
1445 $self->msg("- cleaning up");
1446 $tmp_fixture_dir->rmtree;
1450 # the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
1451 sub _get_sorted_sources {
1452 my ( $self, $dbicschema ) = @_;
1455 my %table_monikers = map { $_ => 1 } $dbicschema->sources;
1458 foreach my $moniker (sort keys %table_monikers) {
1459 my $source = $dbicschema->source($moniker);
1461 my $table_name = $source->name;
1462 my @primary = $source->primary_columns;
1463 my @rels = $source->relationships();
1465 my %created_FK_rels;
1466 foreach my $rel (sort @rels) {
1467 my $rel_info = $source->relationship_info($rel);
1469 # Ignore any rel cond that isn't a straight hash
1470 next unless ref $rel_info->{cond} eq 'HASH';
1472 my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
1474 # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
1476 if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
1477 $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
1478 } elsif ( $rel_info->{attrs}{accessor}
1479 && $rel_info->{attrs}{accessor} eq 'multi' ) {
1482 $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
1485 # Dont add a relation if its not constraining
1486 next unless $fk_constraint;
1488 my $rel_table = $source->related_source($rel)->source_name;
1489 # Make sure we don't create the same relation twice
1490 my $key_test = join("\x00", sort @keys);
1491 next if $created_FK_rels{$rel_table}->{$key_test};
1493 if (scalar(@keys)) {
1494 $created_FK_rels{$rel_table}->{$key_test} = 1;
1496 # calculate dependencies: do not consider deferrable constraints and
1497 # self-references for dependency calculations
1498 if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
1499 $tables{$moniker}{$rel_table}++;
1503 $tables{$moniker} = {} unless exists $tables{$moniker};
1506 # resolve entire dep tree
1507 my $dependencies = {
1508 map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
1511 # return the sorted result
1513 keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
1520 my ( $question, $answers, $seen ) = @_;
1524 my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
1525 $seen{$question} = 1;
1527 for my $dep (keys %{ $answers->{$question} }) {
1528 return {} if $seen->{$dep};
1529 my $subdeps = _resolve_deps( $dep, $answers, \%seen );
1530 $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
1537 my ($self, $params) = @_;
1539 my $schema = $params->{schema};
1540 my $data = _read_sql($params->{post_ddl});
1542 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1543 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1545 $self->msg("- finished importing post-populate DDL into DB");
1550 my $subject = shift || return;
1551 my $level = shift || 1;
1552 return unless $self->debug >= $level;
1554 print Dumper($subject);
1556 print $subject . "\n";
1560 # Helper method for ensuring that the name used for a given source
1561 # is always the same (This is used to name the fixture directories
1564 sub _name_for_source {
1565 my ($self, $source) = @_;
1567 return ref $source->name ? $source->source_name : $source->name;
1572 Luke Saunders <luke@shadowcatsystems.co.uk>
1574 Initial development sponsored by and (c) Takkle, Inc. 2007
1578 Ash Berlin <ash@shadowcatsystems.co.uk>
1580 Matt S. Trout <mst@shadowcatsystems.co.uk>
1582 John Napiorkowski <jjnapiork@cpan.org>
1584 Drew Taylor <taylor.andrew.j@gmail.com>
1586 Frank Switalski <fswitalski@gmail.com>
1588 Chris Akins <chris.hexx@gmail.com>
1590 Tom Bloor <t.bloor@shadowcat.co.uk>
1592 Samuel Kaufman <skaufman@cpan.org>
1596 This library is free software under the same license as perl itself