1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file tempdir);
10 use File::Spec::Functions 'catfile', 'catdir';
11 use Config::Any::JSON;
12 use Data::Dump::Streamer;
13 use Data::Visitor::Callback;
15 use File::Copy::Recursive qw/dircopy/;
16 use File::Copy qw/move/;
17 use Hash::Merge qw( merge );
19 use Class::C3::Componentised;
22 use base qw(Class::Accessor::Grouped);
24 our $namespace_counter = 0;
26 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
27 _inherited_attributes debug schema_class dumped_objects config_attrs/);
29 our $VERSION = '1.001020';
33 DBIx::Class::Fixtures - Dump data and repopulate a database using rules
37 use DBIx::Class::Fixtures;
41 my $fixtures = DBIx::Class::Fixtures->new({
42 config_dir => '/home/me/app/fixture_configs'
46 config => 'set_config.json',
47 schema => $source_dbic_schema,
48 directory => '/home/me/app/fixtures'
52 directory => '/home/me/app/fixtures',
53 ddl => '/home/me/app/sql/ddl.sql',
54 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
55 post_ddl => '/home/me/app/sql/post_ddl.sql',
60 Dump fixtures from source database to filesystem then import to another
61 database (with same schema) at any time. Use as a constant dataset for running
62 tests against or for populating development databases when impractical to use
63 production clones. Describe fixture set using relations and conditions based on
64 your DBIx::Class schema.
66 =head1 DEFINE YOUR FIXTURE SET
68 Fixture sets are currently defined in .json files which must reside in your
69 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
70 describe which data to pull and dump from the source database.
93 This will fetch artists with primary keys 1 and 3, the producer with primary
94 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
95 rel from Producer to Artist.
97 The top level attributes are as follows:
101 Sets must be an array of hashes, as in the example given above. Each set
102 defines a set of objects to be included in the fixtures. For details on valid
103 set attributes see L</SET ATTRIBUTES> below.
107 Rules place general conditions on classes. For example if whenever an artist
108 was dumped you also wanted all of their cds dumped too, then you could use a
109 rule to specify this. For example:
138 In this case all the cds of artists 1, 3 and all producer 5's artists will be
139 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
140 to CD. This is eqivalent to:
167 rules must be a hash keyed by class name.
173 To prevent repetition between configs you can include other configs. For
182 { "file": "base.json" }
186 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
187 which is the name of another config file in the same directory. The original
188 config is merged with its includes using L<Hash::Merge>.
190 =head2 datetime_relative
192 Only available for MySQL and PostgreSQL at the moment, must be a value that
193 DateTime::Format::* can parse. For example:
197 "class": "RecentItems",
200 "datetime_relative": "2007-10-30 00:00:00"
203 This will work when dumping from a MySQL database and will cause any datetime
204 fields (where datatype => 'datetime' in the column def of the schema class) to
205 be dumped as a DateTime::Duration object relative to the date specified in the
206 datetime_relative value. For example if the RecentItem object had a date field
207 set to 2007-10-25, then when the fixture is imported the field will be set to 5
208 days in the past relative to the current time.
212 Specifies whether to automatically dump might_have relationships. Should be a
213 hash with one attribute - fetch. Set fetch to 1 or 0.
216 "might_have": { "fetch": 1 },
229 Note: belongs_to rels are automatically dumped whether you like it or not, this
230 is to avoid FKs to nowhere when importing. General rules on has_many rels are
231 not accepted at this top level, but you can turn them on for individual sets -
232 see L</SET ATTRIBUTES>.
234 =head1 SET ATTRIBUTES
238 Required attribute. Specifies the DBIx::Class object class you wish to dump.
242 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
243 If the id is not in the source db then it just won't get dumped, no warnings or
248 Must be either an integer or the string 'all'. Specifying an integer will
249 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
250 will cause the rows attribute to be left off and for all matching rows to be
251 dumped. There's no randomising here, it's just the first x rows.
255 A hash specifying the conditions dumped objects must match. Essentially this is
256 a JSON representation of a DBIx::Class search clause. For example:
262 "cond": { "name": "Dave" }
266 This will dump all artists whose name is 'dave'. Essentially
267 $artist_rs->search({ name => 'Dave' })->all.
269 Sometimes in a search clause it's useful to use scalar refs to do things like:
271 $artist_rs->search({ no1_singles => \'> no1_albums' })
273 This could be specified in the cond hash like so:
279 "cond": { "no1_singles": "\> no1_albums" }
283 So if the value starts with a backslash the value is made a scalar ref before
284 being passed to search.
288 An array of relationships to be used in the cond clause.
294 "cond": { "cds.position": { ">": 4 } },
299 Fetch all artists who have cds with position greater than 4.
303 Must be an array of hashes. Specifies which rels to also dump. For example:
312 "cond": { "position": "2" }
317 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
319 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
320 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
321 as in the set attributes. quantity is necessary for has_many relationships, but
322 not if using for belongs_to or might_have relationships.
326 Specifies whether to fetch has_many rels for this set. Must be a hash
327 containing keys fetch and quantity.
329 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
332 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
336 As with has_many but for might_have relationships. Quantity doesn't do anything
339 This value will be inherited by all fetches in this set. This is not true for
340 the has_many attribute.
344 In some cases your database information might be keys to values in some sort of
345 external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
346 to store blob information on the filesystem. In this case you may wish the ability
347 to backup your external storage in the same way your database data. The L</external>
348 attribute lets you specify a handler for this type of issue. For example:
357 "args": {"path":"__ATTR(photo_dir)__"}
363 This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
364 where the path to a file is specified by the C<file> field of the C<Photo> source.
365 We use the uninflated value of the field so you need to completely handle backup
366 and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
367 and you can create your own custom handlers by placing a '+' in the namespace:
369 "class": "+MyApp::Schema::SomeExternalStorage",
371 Although if possible I'd love to get patches to add some of the other common
372 types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
374 See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
376 =head1 RULE ATTRIBUTES
380 Same as with L</SET ATTRIBUTES>
384 Same as with L</SET ATTRIBUTES>
388 Same as with L</SET ATTRIBUTES>
392 Same as with L</SET ATTRIBUTES>
396 Same as with L</SET ATTRIBUTES>
398 =head1 RULE SUBSTITUTIONS
400 You can provide the following substitution patterns for your rule values. An
401 example of this might be:
406 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
412 Provide a value from %ENV
416 Provide a value from L</config_attrs>
420 Create the path to a file from a list
424 Create the path to a directory from a list
432 =item Arguments: \%$attrs
434 =item Return Value: $fixture_object
438 Returns a new DBIx::Class::Fixture object. %attrs can have the following
445 required. must contain a valid path to the directory in which your .json
450 determines whether to be verbose
452 =item ignore_sql_errors:
454 ignore errors on import of DDL etc
458 A hash of information you can use to do replacements inside your configuration
459 sets. For example, if your set looks like:
467 "quantity": "__ATTR(quantity)__",
472 my $fixtures = DBIx::Class::Fixtures->new( {
473 config_dir => '/home/me/app/fixture_configs'
479 You may wish to do this if you want to let whoever runs the dumps have a bit
484 my $fixtures = DBIx::Class::Fixtures->new( {
485 config_dir => '/home/me/app/fixture_configs'
494 unless (ref $params eq 'HASH') {
495 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
498 unless ($params->{config_dir}) {
499 return DBIx::Class::Exception->throw('config_dir param not specified');
502 my $config_dir = dir($params->{config_dir});
503 unless (-e $params->{config_dir}) {
504 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
508 config_dir => $config_dir,
509 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
510 debug => $params->{debug} || 0,
511 ignore_sql_errors => $params->{ignore_sql_errors},
512 dumped_objects => {},
513 use_create => $params->{use_create} || 0,
514 config_attrs => $params->{config_attrs} || {},
522 =head2 available_config_sets
524 Returns a list of all the config sets found in the L</config_dir>. These will
525 be a list of the json based files containing dump rules.
530 sub available_config_sets {
531 @config_sets = scalar(@config_sets) ? @config_sets : map {
534 -f $_ && $_=~/json$/;
535 } dir((shift)->config_dir)->children;
542 =item Arguments: \%$attrs
544 =item Return Value: 1
549 config => 'set_config.json', # config file to use. must be in the config
550 # directory specified in the constructor
551 schema => $source_dbic_schema,
552 directory => '/home/me/app/fixtures' # output directory
558 all => 1, # just dump everything that's in the schema
559 schema => $source_dbic_schema,
560 directory => '/home/me/app/fixtures' # output directory
563 In this case objects will be dumped to subdirectories in the specified
564 directory. For example:
566 /home/me/app/fixtures/artist/1.fix
567 /home/me/app/fixtures/artist/3.fix
568 /home/me/app/fixtures/producer/5.fix
570 C<schema> and C<directory> are required attributes. Also, one of C<config> or C<all> must
571 be specified. The attributes HashRef can have the following parameters:
577 A boolean which defaults to false. If true, dump everything that is in the schema.
581 Filename or HashRef. One of the C<config> or C<all> attributes must be set.
583 If the HashRef form is used your HashRef should conform to the structure rules
584 defined for the JSON representations.
588 DBIx::Class::Schema object for the data you want to dump
592 directory to store the dumped objects
596 A code reference that will be called for each row returned. It will be provided
597 the Result Source and the row as a HashRef. The row can be modified before being
598 written to the fixture files. For example:
602 predump_hook => sub {
603 my ($source, $data) = @_;
604 if ($source->name eq "ResultSource_X") {
605 $data->{'sensitive_row'} = 'redacted';
618 unless (ref $params eq 'HASH') {
619 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
622 foreach my $param (qw/schema directory/) {
623 unless ($params->{$param}) {
624 return DBIx::Class::Exception->throw($param . ' param not specified');
628 if($params->{excludes} && !$params->{all}) {
629 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
632 if($params->{predump_hook} && ref($params->{predump_hook} ne "CODE")) {
633 return DBIx::Class::Exception->throw('predump_hook param should be a coderef');
636 # Clear dumped_objects in case of subsequent calls to dump with same fixture object
637 $self->{dumped_objects} = {};
639 my $schema = $params->{schema};
641 if ($params->{config}) {
642 $config = ref $params->{config} eq 'HASH' ?
646 my $config_file = $self->config_dir->file($params->{config});
647 $self->load_config_file($config_file);
649 } elsif ($params->{all}) {
650 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
652 might_have => { fetch => 0 },
653 has_many => { fetch => 0 },
654 belongs_to => { fetch => 0 },
657 { class => $_, quantity => 'all' };
663 DBIx::Class::Exception->throw('must pass config or set all');
666 my $output_dir = dir($params->{directory});
667 unless (-e $output_dir) {
668 $output_dir->mkpath ||
669 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
672 $self->msg("generating fixtures");
673 my $tmp_output_dir = tempdir();
675 if (-e $tmp_output_dir) {
676 $self->msg("- clearing existing $tmp_output_dir");
677 $tmp_output_dir->rmtree;
679 $self->msg("- creating $tmp_output_dir");
680 $tmp_output_dir->mkpath;
682 # write version file (for the potential benefit of populate)
683 $tmp_output_dir->file('_dumper_version')
687 # write our current config set
688 $tmp_output_dir->file('_config_set')
690 ->print( Dumper $config );
692 $config->{rules} ||= {};
693 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
695 while ( my ($k,$v) = each %{ $config->{rules} } ) {
696 if ( my $source = eval { $schema->source($k) } ) {
697 $config->{rules}{$source->source_name} = $v;
701 foreach my $source (@sources) {
702 # apply rule to set if specified
703 my $rule = $config->{rules}->{$source->{class}};
704 $source = merge( $source, $rule ) if ($rule);
707 my $rs = $schema->resultset($source->{class});
709 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
710 # if value starts with \ assume it's meant to be passed as a scalar ref
711 # to dbic. ideally this would substitute deeply
714 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
715 : $source->{cond}->{$_}
716 } keys %{$source->{cond}}
720 $rs = $rs->search($source->{cond}, { join => $source->{join} })
723 $self->msg("- dumping $source->{class}");
725 my %source_options = ( set => { %{$config}, %{$source} } );
726 if ($source->{quantity}) {
727 $rs = $rs->search({}, { order_by => $source->{order_by} })
728 if $source->{order_by};
730 if ($source->{quantity} =~ /^\d+$/) {
731 $rs = $rs->search({}, { rows => $source->{quantity} });
732 } elsif ($source->{quantity} ne 'all') {
733 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
736 elsif ($source->{ids} && @{$source->{ids}}) {
737 my @ids = @{$source->{ids}};
738 my (@pks) = $rs->result_source->primary_columns;
739 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
740 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
743 DBIx::Class::Exception->throw('must specify either quantity or ids');
746 $source_options{set_dir} = $tmp_output_dir;
747 $source_options{predump_hook} = $params->{predump_hook} if (exists($params->{predump_hook}));
748 $self->dump_rs($rs, \%source_options );
751 # clear existing output dir
752 foreach my $child ($output_dir->children) {
753 if ($child->is_dir) {
754 next if ($child eq $tmp_output_dir);
755 if (grep { $_ =~ /\.fix/ } $child->children) {
758 } elsif ($child =~ /_dumper_version$/) {
763 $self->msg("- moving temp dir to $output_dir");
764 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
765 for $tmp_output_dir->children;
767 if (-e $output_dir) {
768 $self->msg("- clearing tmp dir $tmp_output_dir");
769 # delete existing fixture set
770 $tmp_output_dir->remove;
778 sub load_config_file {
779 my ($self, $config_file) = @_;
780 DBIx::Class::Exception->throw("config does not exist at $config_file")
781 unless -e $config_file;
783 my $config = Config::Any::JSON->load($config_file);
786 if (my $incs = $config->{includes}) {
788 DBIx::Class::Exception->throw(
789 'includes params of config must be an array ref of hashrefs'
790 ) unless ref $incs eq 'ARRAY';
792 foreach my $include_config (@$incs) {
793 DBIx::Class::Exception->throw(
794 'includes params of config must be an array ref of hashrefs'
795 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
797 my $include_file = $self->config_dir->file($include_config->{file});
799 DBIx::Class::Exception->throw("config does not exist at $include_file")
800 unless -e $include_file;
802 my $include = Config::Any::JSON->load($include_file);
803 $self->msg($include);
804 $config = merge( $config, $include );
806 delete $config->{includes};
810 return DBIx::Class::Exception->throw('config has no sets')
811 unless $config && $config->{sets} &&
812 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
814 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
815 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
816 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
822 my ($self, $rs, $params) = @_;
824 while (my $row = $rs->next) {
825 $self->dump_object($row, $params);
830 my ($self, $object, $params) = @_;
831 my $set = $params->{set};
833 my $v = Data::Visitor::Callback->new(
835 my ($visitor, $data) = @_;
838 my ( $self, $v ) = @_;
839 if (! defined($ENV{$v})) {
847 if(my $attr = $self->config_attrs->{$v}) {
854 my ($self, @args) = @_;
858 my ($self, @args) = @_;
863 my $subsre = join( '|', keys %$subs );
864 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
872 die 'no dir passed to dump_object' unless $params->{set_dir};
873 die 'no object passed to dump_object' unless $object;
875 my @inherited_attrs = @{$self->_inherited_attributes};
878 $object->get_column($_)
879 } $object->primary_columns;
881 my $key = join("\0", @pk_vals);
883 my $src = $object->result_source;
884 my $exists = $self->dumped_objects->{$src->name}{$key}++;
887 # write dir and gen filename
888 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
889 $source_dir->mkpath(0, 0777);
891 # strip dir separators from file name
892 my $file = $source_dir->file(
893 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
898 $self->msg('-- dumping ' . $file->stringify, 2);
899 my %ds = $object->get_columns;
901 if($set->{external}) {
902 foreach my $field (keys %{$set->{external}}) {
903 my $key = $ds{$field};
904 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
905 my $args = $set->{external}->{$field}->{args};
907 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
910 $ds{external}->{$field} =
911 encode_base64( $class
912 ->backup($key => $args));
916 # mess with dates if specified
917 if ($set->{datetime_relative}) {
918 my $formatter= $object->result_source->schema->storage->datetime_parser;
919 unless ($@ || !$formatter) {
921 if ($set->{datetime_relative} eq 'today') {
922 $dt = DateTime->today;
924 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
927 while (my ($col, $value) = each %ds) {
928 my $col_info = $object->result_source->column_info($col);
931 && $col_info->{_inflate_info}
933 (uc($col_info->{data_type}) eq 'DATETIME')
934 or (uc($col_info->{data_type}) eq 'DATE')
935 or (uc($col_info->{data_type}) eq 'TIME')
936 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
937 or (uc($col_info->{data_type}) eq 'INTERVAL')
940 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
943 warn "datetime_relative not supported for this db driver at the moment";
947 # do the actual dumping
948 $params->{predump_hook}->($src, \%ds) if ( exists($params->{predump_hook}) );
949 my $serialized = Dump(\%ds)->Out();
950 $file->openw->print($serialized);
953 # don't bother looking at rels unless we are actually planning to dump at least one type
954 my ($might_have, $belongs_to, $has_many) = map {
955 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
956 } qw/might_have belongs_to has_many/;
958 return unless $might_have
963 # dump rels of object
965 foreach my $name (sort $src->relationships) {
966 my $info = $src->relationship_info($name);
967 my $r_source = $src->related_source($name);
968 # if belongs_to or might_have with might_have param set or has_many with
969 # has_many param set then
971 ( $info->{attrs}{accessor} eq 'single' &&
972 (!$info->{attrs}{join_type} || $might_have)
974 || $info->{attrs}{accessor} eq 'filter'
976 ($info->{attrs}{accessor} eq 'multi' && $has_many)
978 my $related_rs = $object->related_resultset($name);
979 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
980 # these parts of the rule only apply to has_many rels
981 if ($rule && $info->{attrs}{accessor} eq 'multi') {
982 $related_rs = $related_rs->search(
984 { join => $rule->{join} }
985 ) if ($rule->{cond});
987 $related_rs = $related_rs->search(
989 { rows => $rule->{quantity} }
990 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
992 $related_rs = $related_rs->search(
994 { order_by => $rule->{order_by} }
995 ) if ($rule->{order_by});
998 if ($set->{has_many}{quantity} &&
999 $set->{has_many}{quantity} =~ /^\d+$/) {
1000 $related_rs = $related_rs->search(
1002 { rows => $set->{has_many}->{quantity} }
1006 my %c_params = %{$params};
1007 # inherit date param
1008 my %mock_set = map {
1010 } grep { $set->{$_} } @inherited_attrs;
1012 $c_params{set} = \%mock_set;
1013 $c_params{set} = merge( $c_params{set}, $rule)
1014 if $rule && $rule->{fetch};
1016 $self->dump_rs($related_rs, \%c_params);
1021 return unless $set && $set->{fetch};
1022 foreach my $fetch (@{$set->{fetch}}) {
1023 # inherit date param
1024 $fetch->{$_} = $set->{$_} foreach
1025 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
1026 my $related_rs = $object->related_resultset($fetch->{rel});
1027 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
1030 my $info = $object->result_source->relationship_info($fetch->{rel});
1031 if ($info->{attrs}{accessor} eq 'multi') {
1032 $fetch = merge( $fetch, $rule );
1033 } elsif ($rule->{fetch}) {
1034 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
1038 die "relationship $fetch->{rel} does not exist for " . $src->source_name
1039 unless ($related_rs);
1041 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
1042 # if value starts with \ assume it's meant to be passed as a scalar ref
1043 # to dbic. ideally this would substitute deeply
1044 $fetch->{cond} = { map {
1045 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1046 : $fetch->{cond}->{$_}
1047 } keys %{$fetch->{cond}} };
1050 $related_rs = $related_rs->search(
1052 { join => $fetch->{join} }
1053 ) if $fetch->{cond};
1055 $related_rs = $related_rs->search(
1057 { rows => $fetch->{quantity} }
1058 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1059 $related_rs = $related_rs->search(
1061 { order_by => $fetch->{order_by} }
1062 ) if $fetch->{order_by};
1064 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1068 sub _generate_schema {
1070 my $params = shift || {};
1072 $self->msg("\ncreating schema");
1074 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1075 eval "require $schema_class";
1079 my $connection_details = $params->{connection_details};
1081 $namespace_counter++;
1083 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1084 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1086 $pre_schema = $namespace->connect(@{$connection_details});
1087 unless( $pre_schema ) {
1088 return DBIx::Class::Exception->throw('connection details not valid');
1090 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
1091 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1092 my $dbh = $pre_schema->storage->dbh;
1095 $self->msg("- clearing DB of existing tables");
1096 $pre_schema->storage->txn_do(sub {
1097 $pre_schema->storage->with_deferred_fk_checks(sub {
1098 foreach my $table (@tables) {
1100 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1106 # import new ddl file to db
1107 my $ddl_file = $params->{ddl};
1108 $self->msg("- deploying schema using $ddl_file");
1109 my $data = _read_sql($ddl_file);
1111 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1112 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1114 $self->msg("- finished importing DDL into DB");
1116 # load schema object from our new DB
1117 $namespace_counter++;
1118 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1119 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1120 my $schema = $namespace2->connect(@{$connection_details});
1125 my $ddl_file = shift;
1127 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1128 my @data = split(/\n/, join('', <$fh>));
1129 @data = grep(!/^--/, @data);
1130 @data = split(/;/, join('', @data));
1132 @data = grep { $_ && $_ !~ /^-- / } @data;
1136 =head2 dump_config_sets
1138 Works just like L</dump> but instead of specifying a single json config set
1139 located in L</config_dir> we dump each set named in the C<configs> parameter.
1141 The parameters are the same as for L</dump> except instead of a C<directory>
1142 parameter we have a C<directory_template> which is a coderef expected to return
1143 a scalar that is a root directory where we will do the actual dumping. This
1144 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1147 $fixture->dump_all_config_sets({
1149 configs => [qw/one.json other.json/],
1150 directory_template => sub {
1151 my ($fixture, $params, $set) = @_;
1152 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1158 sub dump_config_sets {
1159 my ($self, $params) = @_;
1160 my $available_config_sets = delete $params->{configs};
1161 my $directory_template = delete $params->{directory_template} ||
1162 DBIx::Class::Exception->throw("'directory_template is required parameter");
1164 for my $set (@$available_config_sets) {
1165 my $localparams = $params;
1166 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1167 $localparams->{config} = $set;
1168 $self->dump($localparams);
1169 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1173 =head2 dump_all_config_sets
1175 my %local_params = %$params;
1176 my $local_self = bless { %$self }, ref($self);
1177 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1178 $local_params{config} = $set;
1179 $self->dump(\%local_params);
1182 Works just like L</dump> but instead of specifying a single json config set
1183 located in L</config_dir> we dump each set in turn to the specified directory.
1185 The parameters are the same as for L</dump> except instead of a C<directory>
1186 parameter we have a C<directory_template> which is a coderef expected to return
1187 a scalar that is a root directory where we will do the actual dumping. This
1188 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1191 $fixture->dump_all_config_sets({
1193 directory_template => sub {
1194 my ($fixture, $params, $set) = @_;
1195 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1201 sub dump_all_config_sets {
1202 my ($self, $params) = @_;
1203 $self->dump_config_sets({
1205 configs=>[$self->available_config_sets],
1213 =item Arguments: \%$attrs
1215 =item Return Value: 1
1219 $fixtures->populate( {
1220 # directory to look for fixtures in, as specified to dump
1221 directory => '/home/me/app/fixtures',
1224 ddl => '/home/me/app/sql/ddl.sql',
1226 # database to clear, deploy and then populate
1227 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1229 # DDL to deploy after populating records, ie. FK constraints
1230 post_ddl => '/home/me/app/sql/post_ddl.sql',
1232 # use CASCADE option when dropping tables
1235 # optional, set to 1 to run ddl but not populate
1238 # optional, set to 1 to run each fixture through ->create rather than have
1239 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1240 # that effects the value of column(s).
1243 # Dont try to clean the database, just populate over whats there. Requires
1244 # schema option. Use this if you want to handle removing old data yourself
1249 In this case the database app_dev will be cleared of all tables, then the
1250 specified DDL deployed to it, then finally all fixtures found in
1251 /home/me/app/fixtures will be added to it. populate will generate its own
1252 DBIx::Class schema from the DDL rather than being passed one to use. This is
1253 better as custom insert methods are avoided which can to get in the way. In
1254 some cases you might not have a DDL, and so this method will eventually allow a
1255 $schema object to be passed instead.
1257 If needed, you can specify a post_ddl attribute which is a DDL to be applied
1258 after all the fixtures have been added to the database. A good use of this
1259 option would be to add foreign key constraints since databases like Postgresql
1260 cannot disable foreign key checks.
1262 If your tables have foreign key constraints you may want to use the cascade
1263 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1266 C<directory> is a required attribute.
1268 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1269 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1272 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1273 attribute containing the connected schema you wish to operate on and set the
1274 C<no_deploy> attribute.
1276 If you wish to fix-up data upon populate, you can provide populate a
1277 C<prepopulate_hook> coderef that will be passed the ResultSource and the
1278 row as a HashRef. This is exactly like C<predump_hook>, only called during
1279 C<populate> instead of C<dump>.
1286 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1287 unless ref $params eq 'HASH';
1289 DBIx::Class::Exception->throw('directory param not specified')
1290 unless $params->{directory};
1292 my $fixture_dir = dir(delete $params->{directory});
1293 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1294 unless -d $fixture_dir;
1299 if ($params->{ddl} && $params->{connection_details}) {
1300 $ddl_file = file(delete $params->{ddl});
1301 unless (-e $ddl_file) {
1302 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1304 unless (ref $params->{connection_details} eq 'ARRAY') {
1305 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1307 $schema = $self->_generate_schema({
1309 connection_details => delete $params->{connection_details},
1312 } elsif ($params->{schema} && $params->{no_deploy}) {
1313 $schema = $params->{schema};
1315 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1318 if ($params->{prepopulate_hook} && ref($params->{prepopulate_hook}) ne "CODE") {
1319 DBIx::Class::Exception->throw('prepopulate_hook must be a coderef');
1322 return 1 if $params->{no_populate};
1324 $self->msg("\nimporting fixtures");
1325 my $tmp_fixture_dir = tempdir();
1326 my $version_file = file($fixture_dir, '_dumper_version');
1327 my $config_set_path = file($fixture_dir, '_config_set');
1328 my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1330 my $v = Data::Visitor::Callback->new(
1331 plain_value => sub {
1332 my ($visitor, $data) = @_;
1335 my ( $self, $v ) = @_;
1336 if (! defined($ENV{$v})) {
1343 my ($self, $v) = @_;
1344 if(my $attr = $self->config_attrs->{$v}) {
1351 my ($self, @args) = @_;
1355 my ($self, @args) = @_;
1360 my $subsre = join( '|', keys %$subs );
1361 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1367 $v->visit( $config_set );
1372 %sets_by_src = map { delete($_->{class}) => $_ }
1373 @{$config_set->{sets}}
1376 # DBIx::Class::Exception->throw('no version file found');
1377 # unless -e $version_file;
1379 if (-e $tmp_fixture_dir) {
1380 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1381 $tmp_fixture_dir->rmtree;
1383 $self->msg("- creating temp dir");
1384 $tmp_fixture_dir->mkpath();
1385 for ( map { $schema->source($_)->from } $schema->sources) {
1386 my $from_dir = $fixture_dir->subdir($_);
1387 next unless -e $from_dir;
1388 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1391 unless (-d $tmp_fixture_dir) {
1392 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1396 my $formatter = $schema->storage->datetime_parser;
1397 unless ($@ || !$formatter) {
1399 if ($params->{datetime_relative_to}) {
1400 $callbacks{'DateTime::Duration'} = sub {
1401 $params->{datetime_relative_to}->clone->add_duration($_);
1404 $callbacks{'DateTime::Duration'} = sub {
1405 $formatter->format_datetime(DateTime->today->add_duration($_))
1408 $callbacks{object} ||= "visit_ref";
1409 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1412 $schema->storage->txn_do(sub {
1413 $schema->storage->with_deferred_fk_checks(sub {
1414 foreach my $source (sort $schema->sources) {
1415 $self->msg("- adding " . $source);
1416 my $rs = $schema->resultset($source);
1417 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1418 next unless (-e $source_dir);
1420 while (my $file = $source_dir->next) {
1421 next unless ($file =~ /\.fix$/);
1422 next if $file->is_dir;
1423 my $contents = $file->slurp;
1426 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1427 if(my $external = delete $HASH1->{external}) {
1428 my @fields = keys %{$sets_by_src{$source}->{external}};
1429 foreach my $field(@fields) {
1430 my $key = $HASH1->{$field};
1431 my $content = decode_base64 ($external->{$field});
1432 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1433 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1434 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1436 $class->restore($key, $content, $args);
1440 $params->{prepopulate_hook}->($rs->result_source, $HASH1) if (exists($params->{prepopulate_hook}));
1441 if ( $params->{use_create} ) {
1442 $rs->create( $HASH1 );
1444 push(@rows, $HASH1);
1447 $rs->populate(\@rows) if scalar(@rows);
1449 ## Now we need to do some db specific cleanup
1450 ## this probably belongs in a more isolated space. Right now this is
1451 ## to just handle postgresql SERIAL types that use Sequences
1453 my $table = $rs->result_source->name;
1454 for my $column(my @columns = $rs->result_source->columns) {
1455 my $info = $rs->result_source->column_info($column);
1456 if(my $sequence = $info->{sequence}) {
1457 $self->msg("- updating sequence $sequence");
1458 $rs->result_source->storage->dbh_do(sub {
1459 my ($storage, $dbh, @cols) = @_;
1460 $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
1461 my $sth = $dbh->prepare($sql);
1462 my $rv = $sth->execute or die $sth->errstr;
1463 $self->msg("- $sql");
1471 $self->do_post_ddl( {
1473 post_ddl=>$params->{post_ddl}
1474 } ) if $params->{post_ddl};
1476 $self->msg("- fixtures imported");
1477 $self->msg("- cleaning up");
1478 $tmp_fixture_dir->rmtree;
1483 my ($self, $params) = @_;
1485 my $schema = $params->{schema};
1486 my $data = _read_sql($params->{post_ddl});
1488 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1489 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1491 $self->msg("- finished importing post-populate DDL into DB");
1496 my $subject = shift || return;
1497 my $level = shift || 1;
1498 return unless $self->debug >= $level;
1500 print Dumper($subject);
1502 print $subject . "\n";
1508 Luke Saunders <luke@shadowcatsystems.co.uk>
1510 Initial development sponsored by and (c) Takkle, Inc. 2007
1514 Ash Berlin <ash@shadowcatsystems.co.uk>
1516 Matt S. Trout <mst@shadowcatsystems.co.uk>
1518 Drew Taylor <taylor.andrew.j@gmail.com>
1520 Frank Switalski <fswitalski@gmail.com>
1522 Chris Akins <chris.hexx@gmail.com>
1526 This library is free software under the same license as perl itself