new tests to check new features
[dbsrgits/DBIx-Class-Fixtures.git] / lib / DBIx / Class / Fixtures.pm
CommitLineData
e5963c1b 1package DBIx::Class::Fixtures;
2
3use strict;
4use warnings;
5
3eecaaa0 6use DBIx::Class 0.08100;
e5963c1b 7use DBIx::Class::Exception;
b099fee9 8use Class::Accessor::Grouped;
e5963c1b 9use Path::Class qw(dir file);
0fc424b7 10use Config::Any::JSON;
11use Data::Dump::Streamer;
4fb695f4 12use Data::Visitor::Callback;
0fc424b7 13use File::Path;
4fb695f4 14use File::Copy::Recursive qw/dircopy/;
6116de11 15use File::Copy qw/move/;
0fc424b7 16use Hash::Merge qw( merge );
e5963c1b 17use Data::Dumper;
aa9f3cc7 18use Class::C3::Componentised;
e5963c1b 19
b099fee9 20use base qw(Class::Accessor::Grouped);
e5963c1b 21
aa9f3cc7 22our $namespace_counter = 0;
0566a82d 23
8a1df391 24__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
25 _inherited_attributes debug schema_class dumped_objects/);
e5963c1b 26
27=head1 VERSION
28
d9b65413 29Version 1.001012
e5963c1b 30
31=cut
32
d9b65413 33our $VERSION = '1.001012';
e5963c1b 34
35=head1 NAME
36
d9b65413 37DBIx::Class::Fixtures - Dump data and repopulate a database using rules
9f96b203 38
e5963c1b 39=head1 SYNOPSIS
40
8a1df391 41 use DBIx::Class::Fixtures;
e5963c1b 42
8a1df391 43 ...
e5963c1b 44
8a1df391 45 my $fixtures = DBIx::Class::Fixtures->new({
46 config_dir => '/home/me/app/fixture_configs'
47 });
e5963c1b 48
8a1df391 49 $fixtures->dump({
50 config => 'set_config.json',
51 schema => $source_dbic_schema,
52 directory => '/home/me/app/fixtures'
53 });
e5963c1b 54
8a1df391 55 $fixtures->populate({
56 directory => '/home/me/app/fixtures',
57 ddl => '/home/me/app/sql/ddl.sql',
58 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
59 post_ddl => '/home/me/app/sql/post_ddl.sql',
60 });
e5963c1b 61
62=head1 DESCRIPTION
63
8a1df391 64Dump fixtures from source database to filesystem then import to another
65database (with same schema) at any time. Use as a constant dataset for running
66tests against or for populating development databases when impractical to use
67production clones. Describe fixture set using relations and conditions based on
68your DBIx::Class schema.
a5561f96 69
70=head1 DEFINE YOUR FIXTURE SET
71
8a1df391 72Fixture sets are currently defined in .json files which must reside in your
73config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
74describe which data to pull and dump from the source database.
a5561f96 75
76For example:
77
8a1df391 78 {
9b7171c7 79 "sets": [
8a1df391 80 {
9b7171c7 81 "class": "Artist",
82 "ids": ["1", "3"]
8a1df391 83 },
84 {
9b7171c7 85 "class": "Producer",
86 "ids": ["5"],
87 "fetch": [
8a1df391 88 {
9b7171c7 89 "rel": "artists",
90 "quantity": "2"
8a1df391 91 }
92 ]
93 }
94 ]
95 }
96
97This will fetch artists with primary keys 1 and 3, the producer with primary
98key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
99rel from Producer to Artist.
a5561f96 100
95566320 101The top level attributes are as follows:
102
a5561f96 103=head2 sets
104
8a1df391 105Sets must be an array of hashes, as in the example given above. Each set
106defines a set of objects to be included in the fixtures. For details on valid
107set attributes see L</SET ATTRIBUTES> below.
a5561f96 108
109=head2 rules
110
8a1df391 111Rules place general conditions on classes. For example if whenever an artist
112was dumped you also wanted all of their cds dumped too, then you could use a
113rule to specify this. For example:
114
115 {
9b7171c7 116 "sets": [
8a1df391 117 {
9b7171c7 118 "class": "Artist",
119 "ids": ["1", "3"]
8a1df391 120 },
121 {
9b7171c7 122 "class": "Producer",
123 "ids": ["5"],
124 "fetch": [
8a1df391 125 {
9b7171c7 126 "rel": "artists",
127 "quantity": "2"
8a1df391 128 }
129 ]
130 }
131 ],
9b7171c7 132 "rules": {
133 "Artist": {
134 "fetch": [ {
135 "rel": "cds",
136 "quantity": "all"
8a1df391 137 } ]
138 }
139 }
140 }
141
142In this case all the cds of artists 1, 3 and all producer 5's artists will be
143dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
144to CD. This is eqivalent to:
145
146 {
9b7171c7 147 "sets": [
a5561f96 148 {
9b7171c7 149 "class": "Artist",
150 "ids": ["1", "3"],
151 "fetch": [ {
152 "rel": "cds",
153 "quantity": "all"
8a1df391 154 } ]
155 },
156 {
9b7171c7 157 "class": "Producer",
158 "ids": ["5"],
159 "fetch": [ {
160 "rel": "artists",
161 "quantity": "2",
162 "fetch": [ {
163 "rel": "cds",
164 "quantity": "all"
8a1df391 165 } ]
166 } ]
167 }
168 ]
169 }
a5561f96 170
171rules must be a hash keyed by class name.
172
95566320 173L</RULE ATTRIBUTES>
174
96f2cd20 175=head2 includes
176
8a1df391 177To prevent repetition between configs you can include other configs. For
178example:
96f2cd20 179
8a1df391 180 {
9b7171c7 181 "sets": [ {
182 "class": "Producer",
183 "ids": ["5"]
8a1df391 184 } ],
9b7171c7 185 "includes": [
186 { "file": "base.json" }
8a1df391 187 ]
188 }
96f2cd20 189
8a1df391 190Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
191which is the name of another config file in the same directory. The original
192config is merged with its includes using L<Hash::Merge>.
96f2cd20 193
95566320 194=head2 datetime_relative
195
8a1df391 196Only available for MySQL and PostgreSQL at the moment, must be a value that
197DateTime::Format::* can parse. For example:
95566320 198
8a1df391 199 {
9b7171c7 200 "sets": [ {
201 "class": "RecentItems",
202 "ids": ["9"]
8a1df391 203 } ],
9b7171c7 204 "datetime_relative": "2007-10-30 00:00:00"
8a1df391 205 }
95566320 206
8a1df391 207This will work when dumping from a MySQL database and will cause any datetime
208fields (where datatype => 'datetime' in the column def of the schema class) to
209be dumped as a DateTime::Duration object relative to the date specified in the
210datetime_relative value. For example if the RecentItem object had a date field
211set to 2007-10-25, then when the fixture is imported the field will be set to 5
212days in the past relative to the current time.
95566320 213
a5561f96 214=head2 might_have
215
8a1df391 216Specifies whether to automatically dump might_have relationships. Should be a
217hash with one attribute - fetch. Set fetch to 1 or 0.
218
219 {
9b7171c7 220 "might_have": { "fetch": 1 },
221 "sets": [
8a1df391 222 {
9b7171c7 223 "class": "Artist",
224 "ids": ["1", "3"]
8a1df391 225 },
226 {
9b7171c7 227 "class": "Producer",
228 "ids": ["5"]
8a1df391 229 }
230 ]
231 }
232
233Note: belongs_to rels are automatically dumped whether you like it or not, this
234is to avoid FKs to nowhere when importing. General rules on has_many rels are
235not accepted at this top level, but you can turn them on for individual sets -
236see L</SET ATTRIBUTES>.
a5561f96 237
238=head1 SET ATTRIBUTES
239
240=head2 class
241
242Required attribute. Specifies the DBIx::Class object class you wish to dump.
243
244=head2 ids
245
8a1df391 246Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
247If the id is not in the source db then it just won't get dumped, no warnings or
248death.
a5561f96 249
250=head2 quantity
251
8a1df391 252Must be either an integer or the string 'all'. Specifying an integer will
253effectively set the 'rows' attribute on the resultset clause, specifying 'all'
254will cause the rows attribute to be left off and for all matching rows to be
255dumped. There's no randomising here, it's just the first x rows.
a5561f96 256
257=head2 cond
258
8a1df391 259A hash specifying the conditions dumped objects must match. Essentially this is
260a JSON representation of a DBIx::Class search clause. For example:
a5561f96 261
8a1df391 262 {
9b7171c7 263 "sets": [{
264 "class": "Artist",
265 "quantiy": "all",
266 "cond": { "name": "Dave" }
8a1df391 267 }]
268 }
a5561f96 269
8a1df391 270This will dump all artists whose name is 'dave'. Essentially
271$artist_rs->search({ name => 'Dave' })->all.
a5561f96 272
273Sometimes in a search clause it's useful to use scalar refs to do things like:
274
8a1df391 275 $artist_rs->search({ no1_singles => \'> no1_albums' })
a5561f96 276
277This could be specified in the cond hash like so:
278
8a1df391 279 {
9b7171c7 280 "sets": [ {
281 "class": "Artist",
282 "quantiy": "all",
283 "cond": { "no1_singles": "\> no1_albums" }
8a1df391 284 } ]
285 }
a5561f96 286
8a1df391 287So if the value starts with a backslash the value is made a scalar ref before
288being passed to search.
a5561f96 289
290=head2 join
291
292An array of relationships to be used in the cond clause.
293
8a1df391 294 {
9b7171c7 295 "sets": [ {
296 "class": "Artist",
297 "quantiy": "all",
298 "cond": { "cds.position": { ">": 4 } },
299 "join": ["cds"]
8a1df391 300 } ]
301 }
a5561f96 302
303Fetch all artists who have cds with position greater than 4.
304
305=head2 fetch
306
307Must be an array of hashes. Specifies which rels to also dump. For example:
308
8a1df391 309 {
9b7171c7 310 "sets": [ {
311 "class": "Artist",
312 "ids": ["1", "3"],
313 "fetch": [ {
314 "rel": "cds",
315 "quantity": "3",
316 "cond": { "position": "2" }
8a1df391 317 } ]
318 } ]
319 }
a5561f96 320
321Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
322
8a1df391 323Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
324'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
325as in the set attributes. quantity is necessary for has_many relationships, but
326not if using for belongs_to or might_have relationships.
a5561f96 327
328=head2 has_many
329
8a1df391 330Specifies whether to fetch has_many rels for this set. Must be a hash
331containing keys fetch and quantity.
a5561f96 332
8a1df391 333Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
334integer.
a5561f96 335
95566320 336Be careful here, dumping has_many rels can lead to a lot of data being dumped.
337
a5561f96 338=head2 might_have
339
8a1df391 340As with has_many but for might_have relationships. Quantity doesn't do anything
341in this case.
a5561f96 342
8a1df391 343This value will be inherited by all fetches in this set. This is not true for
344the has_many attribute.
a5561f96 345
346=head1 RULE ATTRIBUTES
347
348=head2 cond
349
350Same as with L</SET ATTRIBUTES>
351
352=head2 fetch
353
354Same as with L</SET ATTRIBUTES>
355
356=head2 join
357
358Same as with L</SET ATTRIBUTES>
359
360=head2 has_many
361
362Same as with L</SET ATTRIBUTES>
363
364=head2 might_have
365
366Same as with L</SET ATTRIBUTES>
e5963c1b 367
0fc424b7 368=head1 METHODS
369
370=head2 new
e5963c1b 371
a5561f96 372=over 4
373
374=item Arguments: \%$attrs
375
376=item Return Value: $fixture_object
377
378=back
379
8a1df391 380Returns a new DBIx::Class::Fixture object. %attrs can have the following
381parameters:
382
383=over
384
385=item config_dir:
386
387required. must contain a valid path to the directory in which your .json
388configs reside.
389
390=item debug:
391
392determines whether to be verbose
393
394=item ignore_sql_errors:
1ac1b0d7 395
8a1df391 396ignore errors on import of DDL etc
1ac1b0d7 397
8a1df391 398=back
a5561f96 399
8a1df391 400 my $fixtures = DBIx::Class::Fixtures->new( {
401 config_dir => '/home/me/app/fixture_configs'
402 } );
a5561f96 403
0fc424b7 404=cut
e5963c1b 405
406sub new {
407 my $class = shift;
408
409 my ($params) = @_;
410 unless (ref $params eq 'HASH') {
411 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
412 }
413
414 unless ($params->{config_dir}) {
415 return DBIx::Class::Exception->throw('config_dir param not specified');
416 }
417
418 my $config_dir = dir($params->{config_dir});
419 unless (-e $params->{config_dir}) {
420 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
421 }
422
423 my $self = {
0fc424b7 424 config_dir => $config_dir,
14e0a204 425 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
da25ed7c 426 debug => $params->{debug} || 0,
0a54a6e8 427 ignore_sql_errors => $params->{ignore_sql_errors},
65a80d4e 428 dumped_objects => {},
429 use_create => $params->{use_create} || 0
e5963c1b 430 };
431
432 bless $self, $class;
433
434 return $self;
435}
436
d9b65413 437=head2 available_config_sets
438
439Returns a list of all the config sets found in the L</config_dir>. These will
440be a list of the json based files containing dump rules.
441
442=cut
443
444my @config_sets;
445sub available_config_sets {
446 @config_sets = scalar(@config_sets) ? @config_sets : map {
447 $_->basename;
448 } grep {
449 -f $_ && $_=~/json$/;
450 } dir((shift)->config_dir)->children;
451}
452
0fc424b7 453=head2 dump
454
a5561f96 455=over 4
456
457=item Arguments: \%$attrs
458
459=item Return Value: 1
460
461=back
462
8a1df391 463 $fixtures->dump({
464 config => 'set_config.json', # config file to use. must be in the config
465 # directory specified in the constructor
466 schema => $source_dbic_schema,
467 directory => '/home/me/app/fixtures' # output directory
468 });
a5561f96 469
8a1df391 470or
2ef30e95 471
8a1df391 472 $fixtures->dump({
473 all => 1, # just dump everything that's in the schema
474 schema => $source_dbic_schema,
475 directory => '/home/me/app/fixtures' # output directory
476 });
2ef30e95 477
8a1df391 478In this case objects will be dumped to subdirectories in the specified
479directory. For example:
a5561f96 480
8a1df391 481 /home/me/app/fixtures/artist/1.fix
482 /home/me/app/fixtures/artist/3.fix
483 /home/me/app/fixtures/producer/5.fix
a5561f96 484
13ff7633 485schema and directory are required attributes. also, one of config or all must
486be specified.
487
488Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
489If this form is used your HashRef should conform to the structure rules defined
490for the JSON representations.
a5561f96 491
0fc424b7 492=cut
493
494sub dump {
495 my $self = shift;
496
497 my ($params) = @_;
498 unless (ref $params eq 'HASH') {
499 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
500 }
501
2ef30e95 502 foreach my $param (qw/schema directory/) {
0fc424b7 503 unless ($params->{$param}) {
504 return DBIx::Class::Exception->throw($param . ' param not specified');
505 }
506 }
507
47a8ceb9 508 if($params->{excludes} && !$params->{all}) {
509 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
510 }
511
2ef30e95 512 my $schema = $params->{schema};
2ef30e95 513 my $config;
514 if ($params->{config}) {
f4fe4f1f 515 $config = ref $params->{config} eq 'HASH' ?
13ff7633 516 $params->{config} :
517 do {
518 #read config
519 my $config_file = $self->config_dir->file($params->{config});
520 $self->load_config_file($config_file);
521 };
2ef30e95 522 } elsif ($params->{all}) {
47a8ceb9 523 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
8a1df391 524 $config = {
525 might_have => { fetch => 0 },
526 has_many => { fetch => 0 },
527 belongs_to => { fetch => 0 },
47a8ceb9 528 sets => [
529 map {
530 { class => $_, quantity => 'all' };
531 } grep {
532 !$excludes{$_}
533 } $schema->sources],
8a1df391 534 };
2ef30e95 535 } else {
8a1df391 536 DBIx::Class::Exception->throw('must pass config or set all');
0fc424b7 537 }
538
539 my $output_dir = dir($params->{directory});
540 unless (-e $output_dir) {
d85d888e 541 $output_dir->mkpath ||
8a1df391 542 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
0fc424b7 543 }
544
9f96b203 545 $self->msg("generating fixtures");
f251ab7e 546 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
0fc424b7 547
6116de11 548 if (-e $tmp_output_dir) {
0fc424b7 549 $self->msg("- clearing existing $tmp_output_dir");
6116de11 550 $tmp_output_dir->rmtree;
0fc424b7 551 }
6116de11 552 $self->msg("- creating $tmp_output_dir");
553 $tmp_output_dir->mkpath;
0fc424b7 554
555 # write version file (for the potential benefit of populate)
8a1df391 556 $tmp_output_dir->file('_dumper_version')
557 ->openw
558 ->print($VERSION);
0fc424b7 559
560 $config->{rules} ||= {};
561 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
8a1df391 562
06b7a1cc 563 while ( my ($k,$v) = each %{ $config->{rules} } ) {
28acb622 564 if ( my $source = eval { $schema->source($k) } ) {
565 $config->{rules}{$source->source_name} = $v;
06b7a1cc 566 }
567 }
568
0fc424b7 569 foreach my $source (@sources) {
570 # apply rule to set if specified
571 my $rule = $config->{rules}->{$source->{class}};
572 $source = merge( $source, $rule ) if ($rule);
573
574 # fetch objects
2ef30e95 575 my $rs = $schema->resultset($source->{class});
c40935c5 576
577 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
0a54a6e8 578 # if value starts with \ assume it's meant to be passed as a scalar ref
579 # to dbic. ideally this would substitute deeply
580 $source->{cond} = {
581 map {
582 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
583 : $source->{cond}->{$_}
584 } keys %{$source->{cond}}
585 };
c40935c5 586 }
587
0a54a6e8 588 $rs = $rs->search($source->{cond}, { join => $source->{join} })
589 if $source->{cond};
590
0fc424b7 591 $self->msg("- dumping $source->{class}");
0a54a6e8 592
0fc424b7 593 my %source_options = ( set => { %{$config}, %{$source} } );
594 if ($source->{quantity}) {
0a54a6e8 595 $rs = $rs->search({}, { order_by => $source->{order_by} })
596 if $source->{order_by};
597
8a1df391 598 if ($source->{quantity} =~ /^\d+$/) {
599 $rs = $rs->search({}, { rows => $source->{quantity} });
600 } elsif ($source->{quantity} ne 'all') {
0a54a6e8 601 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
0fc424b7 602 }
603 }
8a1df391 604 elsif ($source->{ids} && @{$source->{ids}}) {
0fc424b7 605 my @ids = @{$source->{ids}};
8a1df391 606 my (@pks) = $rs->result_source->primary_columns;
607 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
608 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
0fc424b7 609 }
8a1df391 610 else {
0fc424b7 611 DBIx::Class::Exception->throw('must specify either quantity or ids');
612 }
613
8a1df391 614 $source_options{set_dir} = $tmp_output_dir;
615 $self->dump_rs($rs, \%source_options );
d3ef0865 616 }
617
da25ed7c 618 # clear existing output dir
619 foreach my $child ($output_dir->children) {
620 if ($child->is_dir) {
621 next if ($child eq $tmp_output_dir);
622 if (grep { $_ =~ /\.fix/ } $child->children) {
623 $child->rmtree;
624 }
625 } elsif ($child =~ /_dumper_version$/) {
626 $child->remove;
627 }
0fc424b7 628 }
629
630 $self->msg("- moving temp dir to $output_dir");
8a1df391 631 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
632 for $tmp_output_dir->children;
633
0fc424b7 634 if (-e $output_dir) {
635 $self->msg("- clearing tmp dir $tmp_output_dir");
636 # delete existing fixture set
637 $tmp_output_dir->remove;
638 }
639
640 $self->msg("done");
641
642 return 1;
643}
644
8a1df391 645sub load_config_file {
646 my ($self, $config_file) = @_;
647 DBIx::Class::Exception->throw("config does not exist at $config_file")
648 unless -e $config_file;
649
650 my $config = Config::Any::JSON->load($config_file);
651
652 #process includes
653 if (my $incs = $config->{includes}) {
654 $self->msg($incs);
655 DBIx::Class::Exception->throw(
656 'includes params of config must be an array ref of hashrefs'
657 ) unless ref $incs eq 'ARRAY';
658
659 foreach my $include_config (@$incs) {
660 DBIx::Class::Exception->throw(
661 'includes params of config must be an array ref of hashrefs'
662 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
663
664 my $include_file = $self->config_dir->file($include_config->{file});
665
666 DBIx::Class::Exception->throw("config does not exist at $include_file")
667 unless -e $include_file;
668
669 my $include = Config::Any::JSON->load($include_file);
670 $self->msg($include);
671 $config = merge( $config, $include );
672 }
673 delete $config->{includes};
674 }
675
676 # validate config
677 return DBIx::Class::Exception->throw('config has no sets')
678 unless $config && $config->{sets} &&
679 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
680
681 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
682 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
683 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
684
685 return $config;
686}
687
688sub dump_rs {
689 my ($self, $rs, $params) = @_;
690
691 while (my $row = $rs->next) {
692 $self->dump_object($row, $params);
693 }
694}
695
0fc424b7 696sub dump_object {
8a1df391 697 my ($self, $object, $params) = @_;
0fc424b7 698 my $set = $params->{set};
699 die 'no dir passed to dump_object' unless $params->{set_dir};
700 die 'no object passed to dump_object' unless $object;
701
702 my @inherited_attrs = @{$self->_inherited_attributes};
703
8a1df391 704 my @pk_vals = map {
705 $object->get_column($_)
706 } $object->primary_columns;
707
708 my $key = join("\0", @pk_vals);
709
710 my $src = $object->result_source;
711 my $exists = $self->dumped_objects->{$src->name}{$key}++;
712
713
0fc424b7 714 # write dir and gen filename
8a1df391 715 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
716 $source_dir->mkpath(0, 0777);
5f3da1e0 717
718 # strip dir separators from file name
0a54a6e8 719 my $file = $source_dir->file(
720 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
721 );
8a1df391 722
0fc424b7 723
724 # write file
0fc424b7 725 unless ($exists) {
726 $self->msg('-- dumping ' . $file->stringify, 2);
727 my %ds = $object->get_columns;
728
729 # mess with dates if specified
0566a82d 730 if ($set->{datetime_relative}) {
8a1df391 731 my $formatter= $object->result_source->schema->storage->datetime_parser;
0566a82d 732 unless ($@ || !$formatter) {
733 my $dt;
734 if ($set->{datetime_relative} eq 'today') {
735 $dt = DateTime->today;
736 } else {
737 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
738 }
0fc424b7 739
0566a82d 740 while (my ($col, $value) = each %ds) {
741 my $col_info = $object->result_source->column_info($col);
0fc424b7 742
0566a82d 743 next unless $value
744 && $col_info->{_inflate_info}
745 && uc($col_info->{data_type}) eq 'DATETIME';
0fc424b7 746
0566a82d 747 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
748 }
749 } else {
b099fee9 750 warn "datetime_relative not supported for this db driver at the moment";
0fc424b7 751 }
752 }
753
754 # do the actual dumping
755 my $serialized = Dump(\%ds)->Out();
8a1df391 756 $file->openw->print($serialized);
0fc424b7 757 }
758
2ef30e95 759 # don't bother looking at rels unless we are actually planning to dump at least one type
0a54a6e8 760 my ($might_have, $belongs_to, $has_many) = map {
06b7a1cc 761 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
0a54a6e8 762 } qw/might_have belongs_to has_many/;
763
764 return unless $might_have
765 || $belongs_to
766 || $has_many
8a1df391 767 || $set->{fetch};
2ef30e95 768
0fc424b7 769 # dump rels of object
0fc424b7 770 unless ($exists) {
8a1df391 771 foreach my $name (sort $src->relationships) {
772 my $info = $src->relationship_info($name);
773 my $r_source = $src->related_source($name);
0a54a6e8 774 # if belongs_to or might_have with might_have param set or has_many with
775 # has_many param set then
8a1df391 776 if (
0a54a6e8 777 ( $info->{attrs}{accessor} eq 'single' &&
778 (!$info->{attrs}{join_type} || $might_have)
779 )
780 || $info->{attrs}{accessor} eq 'filter'
781 ||
782 ($info->{attrs}{accessor} eq 'multi' && $has_many)
8a1df391 783 ) {
0fc424b7 784 my $related_rs = $object->related_resultset($name);
785 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
786 # these parts of the rule only apply to has_many rels
787 if ($rule && $info->{attrs}{accessor} eq 'multi') {
0a54a6e8 788 $related_rs = $related_rs->search(
789 $rule->{cond},
790 { join => $rule->{join} }
791 ) if ($rule->{cond});
792
793 $related_rs = $related_rs->search(
794 {},
795 { rows => $rule->{quantity} }
796 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
797
798 $related_rs = $related_rs->search(
799 {},
800 { order_by => $rule->{order_by} }
801 ) if ($rule->{order_by});
802
0fc424b7 803 }
0a54a6e8 804 if ($set->{has_many}{quantity} &&
805 $set->{has_many}{quantity} =~ /^\d+$/) {
806 $related_rs = $related_rs->search(
807 {},
808 { rows => $set->{has_many}->{quantity} }
809 );
0fc424b7 810 }
0a54a6e8 811
0fc424b7 812 my %c_params = %{$params};
813 # inherit date param
0a54a6e8 814 my %mock_set = map {
815 $_ => $set->{$_}
816 } grep { $set->{$_} } @inherited_attrs;
817
0fc424b7 818 $c_params{set} = \%mock_set;
0a54a6e8 819 $c_params{set} = merge( $c_params{set}, $rule)
820 if $rule && $rule->{fetch};
821
8a1df391 822 $self->dump_rs($related_rs, \%c_params);
0fc424b7 823 }
824 }
825 }
826
827 return unless $set && $set->{fetch};
828 foreach my $fetch (@{$set->{fetch}}) {
829 # inherit date param
0a54a6e8 830 $fetch->{$_} = $set->{$_} foreach
831 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
0fc424b7 832 my $related_rs = $object->related_resultset($fetch->{rel});
833 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
8a1df391 834
0fc424b7 835 if ($rule) {
836 my $info = $object->result_source->relationship_info($fetch->{rel});
837 if ($info->{attrs}{accessor} eq 'multi') {
838 $fetch = merge( $fetch, $rule );
839 } elsif ($rule->{fetch}) {
840 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
841 }
842 }
8a1df391 843
0a54a6e8 844 die "relationship $fetch->{rel} does not exist for " . $src->source_name
8a1df391 845 unless ($related_rs);
846
0fc424b7 847 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
0a54a6e8 848 # if value starts with \ assume it's meant to be passed as a scalar ref
849 # to dbic. ideally this would substitute deeply
8a1df391 850 $fetch->{cond} = { map {
851 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
852 : $fetch->{cond}->{$_}
853 } keys %{$fetch->{cond}} };
0fc424b7 854 }
8a1df391 855
0a54a6e8 856 $related_rs = $related_rs->search(
857 $fetch->{cond},
858 { join => $fetch->{join} }
859 ) if $fetch->{cond};
860
861 $related_rs = $related_rs->search(
862 {},
863 { rows => $fetch->{quantity} }
864 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
865 $related_rs = $related_rs->search(
866 {},
867 { order_by => $fetch->{order_by} }
868 ) if $fetch->{order_by};
8a1df391 869
870 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
0fc424b7 871 }
872}
873
384c3f0c 874sub _generate_schema {
875 my $self = shift;
876 my $params = shift || {};
384c3f0c 877 require DBI;
878 $self->msg("\ncreating schema");
384c3f0c 879
c06f7b96 880 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
9a9a7832 881 eval "require $schema_class";
882 die $@ if $@;
883
4fb695f4 884 my $pre_schema;
885 my $connection_details = $params->{connection_details};
8a1df391 886
aa9f3cc7 887 $namespace_counter++;
8a1df391 888
889 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
aa9f3cc7 890 Class::C3::Componentised->inject_base( $namespace => $schema_class );
8a1df391 891
aa9f3cc7 892 $pre_schema = $namespace->connect(@{$connection_details});
893 unless( $pre_schema ) {
384c3f0c 894 return DBIx::Class::Exception->throw('connection details not valid');
895 }
aa9f3cc7 896 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
f81264b2 897 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
4fb695f4 898 my $dbh = $pre_schema->storage->dbh;
384c3f0c 899
900 # clear existing db
901 $self->msg("- clearing DB of existing tables");
7f25d8f8 902 $pre_schema->storage->txn_do(sub {
903 $pre_schema->storage->with_deferred_fk_checks(sub {
904 foreach my $table (@tables) {
905 eval {
906 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
907 };
908 }
909 });
9586eb0c 910 });
384c3f0c 911
912 # import new ddl file to db
913 my $ddl_file = $params->{ddl};
914 $self->msg("- deploying schema using $ddl_file");
f81264b2 915 my $data = _read_sql($ddl_file);
916 foreach (@$data) {
917 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 918 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
384c3f0c 919 }
384c3f0c 920 $self->msg("- finished importing DDL into DB");
921
922 # load schema object from our new DB
b4c67f96 923 $namespace_counter++;
0a54a6e8 924 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
b4c67f96 925 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
926 my $schema = $namespace2->connect(@{$connection_details});
384c3f0c 927 return $schema;
928}
929
f81264b2 930sub _read_sql {
931 my $ddl_file = shift;
932 my $fh;
933 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
934 my @data = split(/\n/, join('', <$fh>));
935 @data = grep(!/^--/, @data);
936 @data = split(/;/, join('', @data));
937 close($fh);
938 @data = grep { $_ && $_ !~ /^-- / } @data;
939 return \@data;
940}
a5561f96 941
d9b65413 942=head2 dump_all_config_sets
943
944Works just like L</dump> but instead of specifying a single json config set
945located in L</config_dir> we dump each set in turn to the specified directory.
946
947The parameters are the same as for L</dump> except instead of a C<directory>
948parameter we have a C<directory_template> which is a coderef expected to return
949a scalar that is a root directory where we will do the actual dumping. This
950coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
951example:
952
953 $fixture->dump_all_config_sets({
954 schema => $schema,
955 directory_template => sub {
956 my ($fixture, $params, $set) = @_;
957 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
958 },
959 });
960
961=cut
962
963sub dump_all_config_sets {
964 my ($self, $params) = @_;
965 my @available_config_sets = $self->available_config_sets;
966 my $directory_template = delete $params->{directory_template} ||
967 DBIx::Class::Exception->throw("'directory_template is required parameter");
968
969 for my $set (@available_config_sets) {
970 local($self,$params);
971 $params->{directory} = $directory_template->($self, $params, $set);
972 $self->dump($params)
973 }
974}
975
a5561f96 976=head2 populate
977
978=over 4
979
980=item Arguments: \%$attrs
981
982=item Return Value: 1
983
984=back
985
8a1df391 986 $fixtures->populate( {
987 # directory to look for fixtures in, as specified to dump
988 directory => '/home/me/app/fixtures',
989
990 # DDL to deploy
991 ddl => '/home/me/app/sql/ddl.sql',
992
993 # database to clear, deploy and then populate
994 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
995
996 # DDL to deploy after populating records, ie. FK constraints
997 post_ddl => '/home/me/app/sql/post_ddl.sql',
998
999 # use CASCADE option when dropping tables
1000 cascade => 1,
1001
1002 # optional, set to 1 to run ddl but not populate
1003 no_populate => 0,
1004
65a80d4e 1005 # optional, set to 1 to run each fixture through ->create rather than have
1006 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1007 # that effects the value of column(s).
1008 use_create => 0,
1009
8a1df391 1010 # Dont try to clean the database, just populate over whats there. Requires
1011 # schema option. Use this if you want to handle removing old data yourself
1012 # no_deploy => 1
1013 # schema => $schema
1014 } );
a5561f96 1015
9e77162b 1016In this case the database app_dev will be cleared of all tables, then the
1017specified DDL deployed to it, then finally all fixtures found in
1018/home/me/app/fixtures will be added to it. populate will generate its own
1019DBIx::Class schema from the DDL rather than being passed one to use. This is
1020better as custom insert methods are avoided which can to get in the way. In
1021some cases you might not have a DDL, and so this method will eventually allow a
1022$schema object to be passed instead.
a5561f96 1023
9e77162b 1024If needed, you can specify a post_ddl attribute which is a DDL to be applied
1025after all the fixtures have been added to the database. A good use of this
1026option would be to add foreign key constraints since databases like Postgresql
1027cannot disable foreign key checks.
f81264b2 1028
9e77162b 1029If your tables have foreign key constraints you may want to use the cascade
1030attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1031$table CASCADE'.
f81264b2 1032
9e77162b 1033C<directory> is a required attribute.
1034
1035If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1036C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1037user and pass).
1038
1039If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1040attribute containing the connected schema you wish to operate on and set the
1041C<no_deploy> attribute.
a5561f96 1042
1043=cut
1044
384c3f0c 1045sub populate {
1046 my $self = shift;
1047 my ($params) = @_;
0a54a6e8 1048 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1049 unless ref $params eq 'HASH';
1050
1051 DBIx::Class::Exception->throw('directory param not specified')
1052 unless $params->{directory};
384c3f0c 1053
9a9a7832 1054 my $fixture_dir = dir(delete $params->{directory});
0a54a6e8 1055 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1056 unless -d $fixture_dir;
384c3f0c 1057
1058 my $ddl_file;
9e77162b 1059 my $dbh;
1060 my $schema;
384c3f0c 1061 if ($params->{ddl} && $params->{connection_details}) {
9a9a7832 1062 $ddl_file = file(delete $params->{ddl});
384c3f0c 1063 unless (-e $ddl_file) {
1064 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1065 }
1066 unless (ref $params->{connection_details} eq 'ARRAY') {
1067 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1068 }
8a1df391 1069 $schema = $self->_generate_schema({
1070 ddl => $ddl_file,
1071 connection_details => delete $params->{connection_details},
1072 %{$params}
1073 });
9e77162b 1074 } elsif ($params->{schema} && $params->{no_deploy}) {
1075 $schema = $params->{schema};
384c3f0c 1076 } else {
0a54a6e8 1077 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
384c3f0c 1078 }
1079
3ad96388 1080
1081 return 1 if $params->{no_populate};
1082
4fb695f4 1083 $self->msg("\nimporting fixtures");
384c3f0c 1084 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
384c3f0c 1085 my $version_file = file($fixture_dir, '_dumper_version');
0a54a6e8 1086# DBIx::Class::Exception->throw('no version file found');
1087# unless -e $version_file;
384c3f0c 1088
1089 if (-e $tmp_fixture_dir) {
1090 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
4fb695f4 1091 $tmp_fixture_dir->rmtree;
384c3f0c 1092 }
1093 $self->msg("- creating temp dir");
51794e1c 1094 $tmp_fixture_dir->mkpath();
0a54a6e8 1095 for ( map { $schema->source($_)->from } $schema->sources) {
1096 my $from_dir = $fixture_dir->subdir($_);
1097 next unless -e $from_dir;
1098 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1099 }
9e77162b 1100
1101 unless (-d $tmp_fixture_dir) {
0a54a6e8 1102 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
9e77162b 1103 }
384c3f0c 1104
384c3f0c 1105 my $fixup_visitor;
0a54a6e8 1106 my $formatter = $schema->storage->datetime_parser;
0566a82d 1107 unless ($@ || !$formatter) {
1108 my %callbacks;
1109 if ($params->{datetime_relative_to}) {
1110 $callbacks{'DateTime::Duration'} = sub {
1111 $params->{datetime_relative_to}->clone->add_duration($_);
1112 };
1113 } else {
1114 $callbacks{'DateTime::Duration'} = sub {
1115 $formatter->format_datetime(DateTime->today->add_duration($_))
1116 };
1117 }
1118 $callbacks{object} ||= "visit_ref";
1119 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
384c3f0c 1120 }
1ac1b0d7 1121
7f25d8f8 1122 $schema->storage->txn_do(sub {
1123 $schema->storage->with_deferred_fk_checks(sub {
1124 foreach my $source (sort $schema->sources) {
1125 $self->msg("- adding " . $source);
1126 my $rs = $schema->resultset($source);
1127 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1128 next unless (-e $source_dir);
1129 my @rows;
1130 while (my $file = $source_dir->next) {
1131 next unless ($file =~ /\.fix$/);
1132 next if $file->is_dir;
1133 my $contents = $file->slurp;
1134 my $HASH1;
1135 eval($contents);
1136 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
65a80d4e 1137 if ( $params->{use_create} ) {
1138 $rs->create( $HASH1 );
1139 } else {
1140 push(@rows, $HASH1);
1141 }
7f25d8f8 1142 }
1143 $rs->populate(\@rows) if scalar(@rows);
1ac1b0d7 1144 }
7f25d8f8 1145 });
1ac1b0d7 1146 });
8a1df391 1147 $self->do_post_ddl( {
1148 schema=>$schema,
1149 post_ddl=>$params->{post_ddl}
1150 } ) if $params->{post_ddl};
f81264b2 1151
384c3f0c 1152 $self->msg("- fixtures imported");
1153 $self->msg("- cleaning up");
1154 $tmp_fixture_dir->rmtree;
b099fee9 1155 return 1;
384c3f0c 1156}
1157
6a05e381 1158sub do_post_ddl {
1159 my ($self, $params) = @_;
1160
1161 my $schema = $params->{schema};
1162 my $data = _read_sql($params->{post_ddl});
1163 foreach (@$data) {
1164 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 1165 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
6a05e381 1166 }
1167 $self->msg("- finished importing post-populate DDL into DB");
1168}
1169
0fc424b7 1170sub msg {
1171 my $self = shift;
1172 my $subject = shift || return;
9a9a7832 1173 my $level = shift || 1;
9a9a7832 1174 return unless $self->debug >= $level;
0fc424b7 1175 if (ref $subject) {
1176 print Dumper($subject);
1177 } else {
1178 print $subject . "\n";
1179 }
1180}
a5561f96 1181
1182=head1 AUTHOR
1183
1184 Luke Saunders <luke@shadowcatsystems.co.uk>
1185
3b4f6e76 1186 Initial development sponsored by and (c) Takkle, Inc. 2007
1187
a5561f96 1188=head1 CONTRIBUTORS
1189
1190 Ash Berlin <ash@shadowcatsystems.co.uk>
8a1df391 1191
a5561f96 1192 Matt S. Trout <mst@shadowcatsystems.co.uk>
8a1df391 1193
fc17c598 1194 Drew Taylor <taylor.andrew.j@gmail.com>
a5561f96 1195
9b7171c7 1196 Frank Switalski <fswitalski@gmail.com>
1197
3b4f6e76 1198=head1 LICENSE
1199
1200 This library is free software under the same license as perl itself
1201
a5561f96 1202=cut
1203
e5963c1b 12041;