1 package DBIx::Class::Fixtures;
6 use DBIx::Class 0.08100;
7 use DBIx::Class::Exception;
8 use Class::Accessor::Grouped;
9 use Path::Class qw(dir file tempdir);
10 use File::Spec::Functions 'catfile', 'catdir';
11 use Config::Any::JSON;
12 use Data::Dump::Streamer;
13 use Data::Visitor::Callback;
15 use File::Copy::Recursive qw/dircopy/;
16 use File::Copy qw/move/;
17 use Hash::Merge qw( merge );
19 use Class::C3::Componentised;
22 use base qw(Class::Accessor::Grouped);
24 our $namespace_counter = 0;
26 __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
27 _inherited_attributes debug schema_class dumped_objects config_attrs/);
29 our $VERSION = '1.001025';
31 $VERSION = eval $VERSION;
35 DBIx::Class::Fixtures - Dump data and repopulate a database using rules
39 use DBIx::Class::Fixtures;
43 my $fixtures = DBIx::Class::Fixtures->new({
44 config_dir => '/home/me/app/fixture_configs'
48 config => 'set_config.json',
49 schema => $source_dbic_schema,
50 directory => '/home/me/app/fixtures'
54 directory => '/home/me/app/fixtures',
55 ddl => '/home/me/app/sql/ddl.sql',
56 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
57 post_ddl => '/home/me/app/sql/post_ddl.sql',
62 Dump fixtures from source database to filesystem then import to another
63 database (with same schema) at any time. Use as a constant dataset for running
64 tests against or for populating development databases when impractical to use
65 production clones. Describe fixture set using relations and conditions based on
66 your DBIx::Class schema.
68 =head1 DEFINE YOUR FIXTURE SET
70 Fixture sets are currently defined in .json files which must reside in your
71 config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
72 describe which data to pull and dump from the source database.
95 This will fetch artists with primary keys 1 and 3, the producer with primary
96 key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
97 rel from Producer to Artist.
99 The top level attributes are as follows:
103 Sets must be an array of hashes, as in the example given above. Each set
104 defines a set of objects to be included in the fixtures. For details on valid
105 set attributes see L</SET ATTRIBUTES> below.
109 Rules place general conditions on classes. For example if whenever an artist
110 was dumped you also wanted all of their cds dumped too, then you could use a
111 rule to specify this. For example:
140 In this case all the cds of artists 1, 3 and all producer 5's artists will be
141 dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
142 to CD. This is eqivalent to:
169 rules must be a hash keyed by class name.
175 To prevent repetition between configs you can include other configs. For
184 { "file": "base.json" }
188 Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
189 which is the name of another config file in the same directory. The original
190 config is merged with its includes using L<Hash::Merge>.
192 =head2 datetime_relative
194 Only available for MySQL and PostgreSQL at the moment, must be a value that
195 DateTime::Format::* can parse. For example:
199 "class": "RecentItems",
202 "datetime_relative": "2007-10-30 00:00:00"
205 This will work when dumping from a MySQL database and will cause any datetime
206 fields (where datatype => 'datetime' in the column def of the schema class) to
207 be dumped as a DateTime::Duration object relative to the date specified in the
208 datetime_relative value. For example if the RecentItem object had a date field
209 set to 2007-10-25, then when the fixture is imported the field will be set to 5
210 days in the past relative to the current time.
214 Specifies whether to automatically dump might_have relationships. Should be a
215 hash with one attribute - fetch. Set fetch to 1 or 0.
218 "might_have": { "fetch": 1 },
231 Note: belongs_to rels are automatically dumped whether you like it or not, this
232 is to avoid FKs to nowhere when importing. General rules on has_many rels are
233 not accepted at this top level, but you can turn them on for individual sets -
234 see L</SET ATTRIBUTES>.
236 =head1 SET ATTRIBUTES
240 Required attribute. Specifies the DBIx::Class object class you wish to dump.
244 Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
245 If the id is not in the source db then it just won't get dumped, no warnings or
250 Must be either an integer or the string 'all'. Specifying an integer will
251 effectively set the 'rows' attribute on the resultset clause, specifying 'all'
252 will cause the rows attribute to be left off and for all matching rows to be
253 dumped. There's no randomising here, it's just the first x rows.
257 A hash specifying the conditions dumped objects must match. Essentially this is
258 a JSON representation of a DBIx::Class search clause. For example:
264 "cond": { "name": "Dave" }
268 This will dump all artists whose name is 'dave'. Essentially
269 $artist_rs->search({ name => 'Dave' })->all.
271 Sometimes in a search clause it's useful to use scalar refs to do things like:
273 $artist_rs->search({ no1_singles => \'> no1_albums' })
275 This could be specified in the cond hash like so:
281 "cond": { "no1_singles": "\> no1_albums" }
285 So if the value starts with a backslash the value is made a scalar ref before
286 being passed to search.
290 An array of relationships to be used in the cond clause.
296 "cond": { "cds.position": { ">": 4 } },
301 Fetch all artists who have cds with position greater than 4.
305 Must be an array of hashes. Specifies which rels to also dump. For example:
314 "cond": { "position": "2" }
319 Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
321 Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
322 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
323 as in the set attributes. quantity is necessary for has_many relationships, but
324 not if using for belongs_to or might_have relationships.
328 Specifies whether to fetch has_many rels for this set. Must be a hash
329 containing keys fetch and quantity.
331 Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
334 Be careful here, dumping has_many rels can lead to a lot of data being dumped.
338 As with has_many but for might_have relationships. Quantity doesn't do anything
341 This value will be inherited by all fetches in this set. This is not true for
342 the has_many attribute.
346 In some cases your database information might be keys to values in some sort of
347 external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
348 to store blob information on the filesystem. In this case you may wish the ability
349 to backup your external storage in the same way your database data. The L</external>
350 attribute lets you specify a handler for this type of issue. For example:
359 "args": {"path":"__ATTR(photo_dir)__"}
365 This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
366 where the path to a file is specified by the C<file> field of the C<Photo> source.
367 We use the uninflated value of the field so you need to completely handle backup
368 and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
369 and you can create your own custom handlers by placing a '+' in the namespace:
371 "class": "+MyApp::Schema::SomeExternalStorage",
373 Although if possible I'd love to get patches to add some of the other common
374 types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
376 See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
378 =head1 RULE ATTRIBUTES
382 Same as with L</SET ATTRIBUTES>
386 Same as with L</SET ATTRIBUTES>
390 Same as with L</SET ATTRIBUTES>
394 Same as with L</SET ATTRIBUTES>
398 Same as with L</SET ATTRIBUTES>
400 =head1 RULE SUBSTITUTIONS
402 You can provide the following substitution patterns for your rule values. An
403 example of this might be:
408 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
414 Provide a value from %ENV
418 Provide a value from L</config_attrs>
422 Create the path to a file from a list
426 Create the path to a directory from a list
434 =item Arguments: \%$attrs
436 =item Return Value: $fixture_object
440 Returns a new DBIx::Class::Fixture object. %attrs can have the following
447 required. must contain a valid path to the directory in which your .json
452 determines whether to be verbose
454 =item ignore_sql_errors:
456 ignore errors on import of DDL etc
460 A hash of information you can use to do replacements inside your configuration
461 sets. For example, if your set looks like:
469 "quantity": "__ATTR(quantity)__",
474 my $fixtures = DBIx::Class::Fixtures->new( {
475 config_dir => '/home/me/app/fixture_configs'
481 You may wish to do this if you want to let whoever runs the dumps have a bit
486 my $fixtures = DBIx::Class::Fixtures->new( {
487 config_dir => '/home/me/app/fixture_configs'
496 unless (ref $params eq 'HASH') {
497 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
500 unless ($params->{config_dir}) {
501 return DBIx::Class::Exception->throw('config_dir param not specified');
504 my $config_dir = dir($params->{config_dir});
505 unless (-e $params->{config_dir}) {
506 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
510 config_dir => $config_dir,
511 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
512 debug => $params->{debug} || 0,
513 ignore_sql_errors => $params->{ignore_sql_errors},
514 dumped_objects => {},
515 use_create => $params->{use_create} || 0,
516 use_find_or_create => $params->{use_find_or_create} || 0,
517 config_attrs => $params->{config_attrs} || {},
525 =head2 available_config_sets
527 Returns a list of all the config sets found in the L</config_dir>. These will
528 be a list of the json based files containing dump rules.
533 sub available_config_sets {
534 @config_sets = scalar(@config_sets) ? @config_sets : map {
537 -f $_ && $_=~/json$/;
538 } dir((shift)->config_dir)->children;
545 =item Arguments: \%$attrs
547 =item Return Value: 1
552 config => 'set_config.json', # config file to use. must be in the config
553 # directory specified in the constructor
554 schema => $source_dbic_schema,
555 directory => '/home/me/app/fixtures' # output directory
561 all => 1, # just dump everything that's in the schema
562 schema => $source_dbic_schema,
563 directory => '/home/me/app/fixtures' # output directory
566 In this case objects will be dumped to subdirectories in the specified
567 directory. For example:
569 /home/me/app/fixtures/artist/1.fix
570 /home/me/app/fixtures/artist/3.fix
571 /home/me/app/fixtures/producer/5.fix
573 schema and directory are required attributes. also, one of config or all must
576 Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
577 If this form is used your HashRef should conform to the structure rules defined
578 for the JSON representations.
586 unless (ref $params eq 'HASH') {
587 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
590 foreach my $param (qw/schema directory/) {
591 unless ($params->{$param}) {
592 return DBIx::Class::Exception->throw($param . ' param not specified');
596 if($params->{excludes} && !$params->{all}) {
597 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
600 my $schema = $params->{schema};
602 if ($params->{config}) {
603 $config = ref $params->{config} eq 'HASH' ?
607 my $config_file = $self->config_dir->file($params->{config});
608 $self->load_config_file($config_file);
610 } elsif ($params->{all}) {
611 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
613 might_have => { fetch => 0 },
614 has_many => { fetch => 0 },
615 belongs_to => { fetch => 0 },
618 { class => $_, quantity => 'all' };
624 DBIx::Class::Exception->throw('must pass config or set all');
627 my $output_dir = dir($params->{directory});
628 unless (-e $output_dir) {
629 $output_dir->mkpath ||
630 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
633 $self->msg("generating fixtures");
634 my $tmp_output_dir = tempdir();
636 if (-e $tmp_output_dir) {
637 $self->msg("- clearing existing $tmp_output_dir");
638 $tmp_output_dir->rmtree;
640 $self->msg("- creating $tmp_output_dir");
641 $tmp_output_dir->mkpath;
643 # write version file (for the potential benefit of populate)
644 $tmp_output_dir->file('_dumper_version')
648 # write our current config set
649 $tmp_output_dir->file('_config_set')
651 ->print( Dumper $config );
653 $config->{rules} ||= {};
654 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
656 while ( my ($k,$v) = each %{ $config->{rules} } ) {
657 if ( my $source = eval { $schema->source($k) } ) {
658 $config->{rules}{$source->source_name} = $v;
662 foreach my $source (@sources) {
663 # apply rule to set if specified
664 my $rule = $config->{rules}->{$source->{class}};
665 $source = merge( $source, $rule ) if ($rule);
668 my $rs = $schema->resultset($source->{class});
670 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
671 # if value starts with \ assume it's meant to be passed as a scalar ref
672 # to dbic. ideally this would substitute deeply
675 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
676 : $source->{cond}->{$_}
677 } keys %{$source->{cond}}
681 $rs = $rs->search($source->{cond}, { join => $source->{join} })
684 $self->msg("- dumping $source->{class}");
686 my %source_options = ( set => { %{$config}, %{$source} } );
687 if ($source->{quantity}) {
688 $rs = $rs->search({}, { order_by => $source->{order_by} })
689 if $source->{order_by};
691 if ($source->{quantity} =~ /^\d+$/) {
692 $rs = $rs->search({}, { rows => $source->{quantity} });
693 } elsif ($source->{quantity} ne 'all') {
694 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
697 elsif ($source->{ids} && @{$source->{ids}}) {
698 my @ids = @{$source->{ids}};
699 my (@pks) = $rs->result_source->primary_columns;
700 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
701 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
704 DBIx::Class::Exception->throw('must specify either quantity or ids');
707 $source_options{set_dir} = $tmp_output_dir;
708 $self->dump_rs($rs, \%source_options );
711 # clear existing output dir
712 foreach my $child ($output_dir->children) {
713 if ($child->is_dir) {
714 next if ($child eq $tmp_output_dir);
715 if (grep { $_ =~ /\.fix/ } $child->children) {
718 } elsif ($child =~ /_dumper_version$/) {
723 $self->msg("- moving temp dir to $output_dir");
724 dircopy($tmp_output_dir, $output_dir);
726 if (-e $output_dir) {
727 $self->msg("- clearing tmp dir $tmp_output_dir");
728 # delete existing fixture set
729 $tmp_output_dir->remove;
737 sub load_config_file {
738 my ($self, $config_file) = @_;
739 DBIx::Class::Exception->throw("config does not exist at $config_file")
740 unless -e $config_file;
742 my $config = Config::Any::JSON->load($config_file);
745 if (my $incs = $config->{includes}) {
747 DBIx::Class::Exception->throw(
748 'includes params of config must be an array ref of hashrefs'
749 ) unless ref $incs eq 'ARRAY';
751 foreach my $include_config (@$incs) {
752 DBIx::Class::Exception->throw(
753 'includes params of config must be an array ref of hashrefs'
754 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
756 my $include_file = $self->config_dir->file($include_config->{file});
758 DBIx::Class::Exception->throw("config does not exist at $include_file")
759 unless -e $include_file;
761 my $include = Config::Any::JSON->load($include_file);
762 $self->msg($include);
763 $config = merge( $config, $include );
765 delete $config->{includes};
769 return DBIx::Class::Exception->throw('config has no sets')
770 unless $config && $config->{sets} &&
771 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
773 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
774 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
775 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
781 my ($self, $rs, $params) = @_;
783 while (my $row = $rs->next) {
784 $self->dump_object($row, $params);
789 my ($self, $object, $params) = @_;
790 my $set = $params->{set};
792 my $v = Data::Visitor::Callback->new(
794 my ($visitor, $data) = @_;
797 my ( $self, $v ) = @_;
798 if (! defined($ENV{$v})) {
806 if(my $attr = $self->config_attrs->{$v}) {
813 my ($self, @args) = @_;
817 my ($self, @args) = @_;
822 my $subsre = join( '|', keys %$subs );
823 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
831 die 'no dir passed to dump_object' unless $params->{set_dir};
832 die 'no object passed to dump_object' unless $object;
834 my @inherited_attrs = @{$self->_inherited_attributes};
837 $object->get_column($_)
838 } $object->primary_columns;
840 my $key = join("\0", @pk_vals);
842 my $src = $object->result_source;
843 my $exists = $self->dumped_objects->{$src->name}{$key}++;
846 # write dir and gen filename
847 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
848 $source_dir->mkpath(0, 0777);
850 # strip dir separators from file name
851 my $file = $source_dir->file(
852 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
857 $self->msg('-- dumping ' . $file->stringify, 2);
858 my %ds = $object->get_columns;
860 if($set->{external}) {
861 foreach my $field (keys %{$set->{external}}) {
862 my $key = $ds{$field};
863 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
864 my $args = $set->{external}->{$field}->{args};
866 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
869 $ds{external}->{$field} =
870 encode_base64( $class
871 ->backup($key => $args),'');
875 # mess with dates if specified
876 if ($set->{datetime_relative}) {
877 my $formatter= $object->result_source->schema->storage->datetime_parser;
878 unless ($@ || !$formatter) {
880 if ($set->{datetime_relative} eq 'today') {
881 $dt = DateTime->today;
883 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
886 while (my ($col, $value) = each %ds) {
887 my $col_info = $object->result_source->column_info($col);
890 && $col_info->{_inflate_info}
892 (uc($col_info->{data_type}) eq 'DATETIME')
893 or (uc($col_info->{data_type}) eq 'DATE')
894 or (uc($col_info->{data_type}) eq 'TIME')
895 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
896 or (uc($col_info->{data_type}) eq 'INTERVAL')
899 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
902 warn "datetime_relative not supported for this db driver at the moment";
906 # do the actual dumping
907 my $serialized = Dump(\%ds)->Out();
908 $file->openw->print($serialized);
911 # don't bother looking at rels unless we are actually planning to dump at least one type
912 my ($might_have, $belongs_to, $has_many) = map {
913 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
914 } qw/might_have belongs_to has_many/;
916 return unless $might_have
921 # dump rels of object
923 foreach my $name (sort $src->relationships) {
924 my $info = $src->relationship_info($name);
925 my $r_source = $src->related_source($name);
926 # if belongs_to or might_have with might_have param set or has_many with
927 # has_many param set then
929 ( $info->{attrs}{accessor} eq 'single' &&
930 (!$info->{attrs}{join_type} || $might_have)
932 || $info->{attrs}{accessor} eq 'filter'
934 ($info->{attrs}{accessor} eq 'multi' && $has_many)
936 my $related_rs = $object->related_resultset($name);
937 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
938 # these parts of the rule only apply to has_many rels
939 if ($rule && $info->{attrs}{accessor} eq 'multi') {
940 $related_rs = $related_rs->search(
942 { join => $rule->{join} }
943 ) if ($rule->{cond});
945 $related_rs = $related_rs->search(
947 { rows => $rule->{quantity} }
948 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
950 $related_rs = $related_rs->search(
952 { order_by => $rule->{order_by} }
953 ) if ($rule->{order_by});
956 if ($set->{has_many}{quantity} &&
957 $set->{has_many}{quantity} =~ /^\d+$/) {
958 $related_rs = $related_rs->search(
960 { rows => $set->{has_many}->{quantity} }
964 my %c_params = %{$params};
968 } grep { $set->{$_} } @inherited_attrs;
970 $c_params{set} = \%mock_set;
971 $c_params{set} = merge( $c_params{set}, $rule)
972 if $rule && $rule->{fetch};
974 $self->dump_rs($related_rs, \%c_params);
979 return unless $set && $set->{fetch};
980 foreach my $fetch (@{$set->{fetch}}) {
982 $fetch->{$_} = $set->{$_} foreach
983 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
984 my $related_rs = $object->related_resultset($fetch->{rel});
985 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
988 my $info = $object->result_source->relationship_info($fetch->{rel});
989 if ($info->{attrs}{accessor} eq 'multi') {
990 $fetch = merge( $fetch, $rule );
991 } elsif ($rule->{fetch}) {
992 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
996 die "relationship $fetch->{rel} does not exist for " . $src->source_name
997 unless ($related_rs);
999 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
1000 # if value starts with \ assume it's meant to be passed as a scalar ref
1001 # to dbic. ideally this would substitute deeply
1002 $fetch->{cond} = { map {
1003 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1004 : $fetch->{cond}->{$_}
1005 } keys %{$fetch->{cond}} };
1008 $related_rs = $related_rs->search(
1010 { join => $fetch->{join} }
1011 ) if $fetch->{cond};
1013 $related_rs = $related_rs->search(
1015 { rows => $fetch->{quantity} }
1016 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1017 $related_rs = $related_rs->search(
1019 { order_by => $fetch->{order_by} }
1020 ) if $fetch->{order_by};
1022 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1026 sub _generate_schema {
1028 my $params = shift || {};
1030 $self->msg("\ncreating schema");
1032 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1033 eval "require $schema_class";
1037 my $connection_details = $params->{connection_details};
1039 $namespace_counter++;
1041 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1042 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1044 $pre_schema = $namespace->connect(@{$connection_details});
1045 unless( $pre_schema ) {
1046 return DBIx::Class::Exception->throw('connection details not valid');
1048 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
1049 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1050 my $dbh = $pre_schema->storage->dbh;
1053 $self->msg("- clearing DB of existing tables");
1054 $pre_schema->storage->txn_do(sub {
1055 $pre_schema->storage->with_deferred_fk_checks(sub {
1056 foreach my $table (@tables) {
1058 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1064 # import new ddl file to db
1065 my $ddl_file = $params->{ddl};
1066 $self->msg("- deploying schema using $ddl_file");
1067 my $data = _read_sql($ddl_file);
1069 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1070 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1072 $self->msg("- finished importing DDL into DB");
1074 # load schema object from our new DB
1075 $namespace_counter++;
1076 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1077 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1078 my $schema = $namespace2->connect(@{$connection_details});
1083 my $ddl_file = shift;
1085 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1086 my @data = split(/\n/, join('', <$fh>));
1087 @data = grep(!/^--/, @data);
1088 @data = split(/;/, join('', @data));
1090 @data = grep { $_ && $_ !~ /^-- / } @data;
1094 =head2 dump_config_sets
1096 Works just like L</dump> but instead of specifying a single json config set
1097 located in L</config_dir> we dump each set named in the C<configs> parameter.
1099 The parameters are the same as for L</dump> except instead of a C<directory>
1100 parameter we have a C<directory_template> which is a coderef expected to return
1101 a scalar that is a root directory where we will do the actual dumping. This
1102 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1105 $fixture->dump_all_config_sets({
1107 configs => [qw/one.json other.json/],
1108 directory_template => sub {
1109 my ($fixture, $params, $set) = @_;
1110 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1116 sub dump_config_sets {
1117 my ($self, $params) = @_;
1118 my $available_config_sets = delete $params->{configs};
1119 my $directory_template = delete $params->{directory_template} ||
1120 DBIx::Class::Exception->throw("'directory_template is required parameter");
1122 for my $set (@$available_config_sets) {
1123 my $localparams = $params;
1124 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1125 $localparams->{config} = $set;
1126 $self->dump($localparams);
1127 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1131 =head2 dump_all_config_sets
1133 my %local_params = %$params;
1134 my $local_self = bless { %$self }, ref($self);
1135 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1136 $local_params{config} = $set;
1137 $self->dump(\%local_params);
1140 Works just like L</dump> but instead of specifying a single json config set
1141 located in L</config_dir> we dump each set in turn to the specified directory.
1143 The parameters are the same as for L</dump> except instead of a C<directory>
1144 parameter we have a C<directory_template> which is a coderef expected to return
1145 a scalar that is a root directory where we will do the actual dumping. This
1146 coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1149 $fixture->dump_all_config_sets({
1151 directory_template => sub {
1152 my ($fixture, $params, $set) = @_;
1153 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1159 sub dump_all_config_sets {
1160 my ($self, $params) = @_;
1161 $self->dump_config_sets({
1163 configs=>[$self->available_config_sets],
1171 =item Arguments: \%$attrs
1173 =item Return Value: 1
1177 $fixtures->populate( {
1178 # directory to look for fixtures in, as specified to dump
1179 directory => '/home/me/app/fixtures',
1182 ddl => '/home/me/app/sql/ddl.sql',
1184 # database to clear, deploy and then populate
1185 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1187 # DDL to deploy after populating records, ie. FK constraints
1188 post_ddl => '/home/me/app/sql/post_ddl.sql',
1190 # use CASCADE option when dropping tables
1193 # optional, set to 1 to run ddl but not populate
1196 # optional, set to 1 to run each fixture through ->create rather than have
1197 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1198 # that effects the value of column(s).
1201 # optional, same as use_create except with find_or_create.
1202 # Useful if you are populating a persistent data store.
1203 use_find_or_create => 0,
1205 # Dont try to clean the database, just populate over whats there. Requires
1206 # schema option. Use this if you want to handle removing old data yourself
1211 In this case the database app_dev will be cleared of all tables, then the
1212 specified DDL deployed to it, then finally all fixtures found in
1213 /home/me/app/fixtures will be added to it. populate will generate its own
1214 DBIx::Class schema from the DDL rather than being passed one to use. This is
1215 better as custom insert methods are avoided which can to get in the way. In
1216 some cases you might not have a DDL, and so this method will eventually allow a
1217 $schema object to be passed instead.
1219 If needed, you can specify a post_ddl attribute which is a DDL to be applied
1220 after all the fixtures have been added to the database. A good use of this
1221 option would be to add foreign key constraints since databases like Postgresql
1222 cannot disable foreign key checks.
1224 If your tables have foreign key constraints you may want to use the cascade
1225 attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1228 C<directory> is a required attribute.
1230 If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1231 C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1234 If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1235 attribute containing the connected schema you wish to operate on and set the
1236 C<no_deploy> attribute.
1243 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1244 unless ref $params eq 'HASH';
1246 DBIx::Class::Exception->throw('directory param not specified')
1247 unless $params->{directory};
1249 my $fixture_dir = dir(delete $params->{directory});
1250 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1251 unless -d $fixture_dir;
1256 if ($params->{ddl} && $params->{connection_details}) {
1257 $ddl_file = file(delete $params->{ddl});
1258 unless (-e $ddl_file) {
1259 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1261 unless (ref $params->{connection_details} eq 'ARRAY') {
1262 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1264 $schema = $self->_generate_schema({
1266 connection_details => delete $params->{connection_details},
1269 } elsif ($params->{schema} && $params->{no_deploy}) {
1270 $schema = $params->{schema};
1272 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1276 return 1 if $params->{no_populate};
1278 $self->msg("\nimporting fixtures");
1279 my $tmp_fixture_dir = tempdir();
1280 my $version_file = file($fixture_dir, '_dumper_version');
1281 my $config_set_path = file($fixture_dir, '_config_set');
1282 my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1284 my $v = Data::Visitor::Callback->new(
1285 plain_value => sub {
1286 my ($visitor, $data) = @_;
1289 my ( $self, $v ) = @_;
1290 if (! defined($ENV{$v})) {
1297 my ($self, $v) = @_;
1298 if(my $attr = $self->config_attrs->{$v}) {
1305 my ($self, @args) = @_;
1309 my ($self, @args) = @_;
1314 my $subsre = join( '|', keys %$subs );
1315 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1321 $v->visit( $config_set );
1326 %sets_by_src = map { delete($_->{class}) => $_ }
1327 @{$config_set->{sets}}
1330 # DBIx::Class::Exception->throw('no version file found');
1331 # unless -e $version_file;
1333 if (-e $tmp_fixture_dir) {
1334 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1335 $tmp_fixture_dir->rmtree;
1337 $self->msg("- creating temp dir");
1338 $tmp_fixture_dir->mkpath();
1339 for ( map { $schema->source($_)->from } $schema->sources) {
1340 my $from_dir = $fixture_dir->subdir($_);
1341 next unless -e $from_dir;
1342 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1345 unless (-d $tmp_fixture_dir) {
1346 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1350 my $formatter = $schema->storage->datetime_parser;
1351 unless ($@ || !$formatter) {
1353 if ($params->{datetime_relative_to}) {
1354 $callbacks{'DateTime::Duration'} = sub {
1355 $params->{datetime_relative_to}->clone->add_duration($_);
1358 $callbacks{'DateTime::Duration'} = sub {
1359 $formatter->format_datetime(DateTime->today->add_duration($_))
1362 $callbacks{object} ||= "visit_ref";
1363 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1366 $schema->storage->txn_do(sub {
1367 $schema->storage->with_deferred_fk_checks(sub {
1368 foreach my $source (sort $schema->sources) {
1369 $self->msg("- adding " . $source);
1370 my $rs = $schema->resultset($source);
1371 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1372 next unless (-e $source_dir);
1374 while (my $file = $source_dir->next) {
1375 next unless ($file =~ /\.fix$/);
1376 next if $file->is_dir;
1377 my $contents = $file->slurp;
1380 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1381 if(my $external = delete $HASH1->{external}) {
1382 my @fields = keys %{$sets_by_src{$source}->{external}};
1383 foreach my $field(@fields) {
1384 my $key = $HASH1->{$field};
1385 my $content = decode_base64 ($external->{$field});
1386 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1387 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1388 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1390 $class->restore($key, $content, $args);
1393 if ( $params->{use_create} ) {
1394 $rs->create( $HASH1 );
1395 } elsif( $params->{use_find_or_create} ) {
1396 $rs->find_or_create( $HASH1 );
1398 push(@rows, $HASH1);
1401 $rs->populate(\@rows) if scalar(@rows);
1403 ## Now we need to do some db specific cleanup
1404 ## this probably belongs in a more isolated space. Right now this is
1405 ## to just handle postgresql SERIAL types that use Sequences
1407 my $table = $rs->result_source->name;
1408 for my $column(my @columns = $rs->result_source->columns) {
1409 my $info = $rs->result_source->column_info($column);
1410 if(my $sequence = $info->{sequence}) {
1411 $self->msg("- updating sequence $sequence");
1412 $rs->result_source->storage->dbh_do(sub {
1413 my ($storage, $dbh, @cols) = @_;
1414 $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
1415 my $sth = $dbh->prepare($sql);
1416 my $rv = $sth->execute or die $sth->errstr;
1417 $self->msg("- $sql");
1425 $self->do_post_ddl( {
1427 post_ddl=>$params->{post_ddl}
1428 } ) if $params->{post_ddl};
1430 $self->msg("- fixtures imported");
1431 $self->msg("- cleaning up");
1432 $tmp_fixture_dir->rmtree;
1437 my ($self, $params) = @_;
1439 my $schema = $params->{schema};
1440 my $data = _read_sql($params->{post_ddl});
1442 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1443 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1445 $self->msg("- finished importing post-populate DDL into DB");
1450 my $subject = shift || return;
1451 my $level = shift || 1;
1452 return unless $self->debug >= $level;
1454 print Dumper($subject);
1456 print $subject . "\n";
1462 Luke Saunders <luke@shadowcatsystems.co.uk>
1464 Initial development sponsored by and (c) Takkle, Inc. 2007
1468 Ash Berlin <ash@shadowcatsystems.co.uk>
1470 Matt S. Trout <mst@shadowcatsystems.co.uk>
1472 Drew Taylor <taylor.andrew.j@gmail.com>
1474 Frank Switalski <fswitalski@gmail.com>
1476 Chris Akins <chris.hexx@gmail.com>
1480 This library is free software under the same license as perl itself