Merge branch 'excludes-doc'
[dbsrgits/DBIx-Class-Fixtures.git] / lib / DBIx / Class / Fixtures.pm
CommitLineData
e5963c1b 1package DBIx::Class::Fixtures;
2
3use strict;
4use warnings;
5
3eecaaa0 6use DBIx::Class 0.08100;
e5963c1b 7use DBIx::Class::Exception;
b099fee9 8use Class::Accessor::Grouped;
0fc424b7 9use Config::Any::JSON;
10use Data::Dump::Streamer;
4fb695f4 11use Data::Visitor::Callback;
0fc424b7 12use Hash::Merge qw( merge );
e5963c1b 13use Data::Dumper;
aa9f3cc7 14use Class::C3::Componentised;
c040a9b0 15use MIME::Base64;
924e1009 16use IO::All;
17use File::Temp qw/tempdir/;
e5963c1b 18
b099fee9 19use base qw(Class::Accessor::Grouped);
e5963c1b 20
aa9f3cc7 21our $namespace_counter = 0;
0566a82d 22
8a1df391 23__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
c040a9b0 24 _inherited_attributes debug schema_class dumped_objects config_attrs/);
e5963c1b 25
47397fda 26our $VERSION = '1.001037';
e0f67e5b 27
28$VERSION = eval $VERSION;
e5963c1b 29
30=head1 NAME
31
d9b65413 32DBIx::Class::Fixtures - Dump data and repopulate a database using rules
9f96b203 33
e5963c1b 34=head1 SYNOPSIS
35
8a1df391 36 use DBIx::Class::Fixtures;
e5963c1b 37
8a1df391 38 ...
e5963c1b 39
9f07224d 40 my $fixtures = DBIx::Class::Fixtures->new({
41 config_dir => '/home/me/app/fixture_configs'
8a1df391 42 });
e5963c1b 43
8a1df391 44 $fixtures->dump({
45 config => 'set_config.json',
46 schema => $source_dbic_schema,
47 directory => '/home/me/app/fixtures'
48 });
e5963c1b 49
8a1df391 50 $fixtures->populate({
51 directory => '/home/me/app/fixtures',
52 ddl => '/home/me/app/sql/ddl.sql',
53 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
54 post_ddl => '/home/me/app/sql/post_ddl.sql',
55 });
e5963c1b 56
57=head1 DESCRIPTION
58
8a1df391 59Dump fixtures from source database to filesystem then import to another
60database (with same schema) at any time. Use as a constant dataset for running
61tests against or for populating development databases when impractical to use
62production clones. Describe fixture set using relations and conditions based on
63your DBIx::Class schema.
a5561f96 64
65=head1 DEFINE YOUR FIXTURE SET
66
8a1df391 67Fixture sets are currently defined in .json files which must reside in your
68config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
69describe which data to pull and dump from the source database.
a5561f96 70
71For example:
72
8a1df391 73 {
9b7171c7 74 "sets": [
8a1df391 75 {
9b7171c7 76 "class": "Artist",
77 "ids": ["1", "3"]
8a1df391 78 },
79 {
9b7171c7 80 "class": "Producer",
81 "ids": ["5"],
82 "fetch": [
8a1df391 83 {
9b7171c7 84 "rel": "artists",
85 "quantity": "2"
8a1df391 86 }
87 ]
88 }
9f07224d 89 ]
8a1df391 90 }
91
92This will fetch artists with primary keys 1 and 3, the producer with primary
93key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
94rel from Producer to Artist.
a5561f96 95
95566320 96The top level attributes are as follows:
97
a5561f96 98=head2 sets
99
8a1df391 100Sets must be an array of hashes, as in the example given above. Each set
101defines a set of objects to be included in the fixtures. For details on valid
102set attributes see L</SET ATTRIBUTES> below.
a5561f96 103
104=head2 rules
105
8a1df391 106Rules place general conditions on classes. For example if whenever an artist
107was dumped you also wanted all of their cds dumped too, then you could use a
108rule to specify this. For example:
109
110 {
9b7171c7 111 "sets": [
8a1df391 112 {
9b7171c7 113 "class": "Artist",
114 "ids": ["1", "3"]
9f07224d 115 },
8a1df391 116 {
9b7171c7 117 "class": "Producer",
118 "ids": ["5"],
119 "fetch": [
9f07224d 120 {
9b7171c7 121 "rel": "artists",
122 "quantity": "2"
8a1df391 123 }
124 ]
125 }
126 ],
9b7171c7 127 "rules": {
128 "Artist": {
129 "fetch": [ {
130 "rel": "cds",
131 "quantity": "all"
8a1df391 132 } ]
133 }
134 }
135 }
136
137In this case all the cds of artists 1, 3 and all producer 5's artists will be
138dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
139to CD. This is eqivalent to:
140
141 {
9b7171c7 142 "sets": [
a5561f96 143 {
9b7171c7 144 "class": "Artist",
145 "ids": ["1", "3"],
146 "fetch": [ {
147 "rel": "cds",
148 "quantity": "all"
8a1df391 149 } ]
9f07224d 150 },
8a1df391 151 {
9b7171c7 152 "class": "Producer",
153 "ids": ["5"],
9f07224d 154 "fetch": [ {
9b7171c7 155 "rel": "artists",
156 "quantity": "2",
157 "fetch": [ {
158 "rel": "cds",
159 "quantity": "all"
8a1df391 160 } ]
161 } ]
162 }
163 ]
164 }
a5561f96 165
166rules must be a hash keyed by class name.
167
95566320 168L</RULE ATTRIBUTES>
169
96f2cd20 170=head2 includes
171
8a1df391 172To prevent repetition between configs you can include other configs. For
173example:
96f2cd20 174
8a1df391 175 {
9b7171c7 176 "sets": [ {
177 "class": "Producer",
178 "ids": ["5"]
8a1df391 179 } ],
9b7171c7 180 "includes": [
181 { "file": "base.json" }
8a1df391 182 ]
183 }
96f2cd20 184
8a1df391 185Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
186which is the name of another config file in the same directory. The original
187config is merged with its includes using L<Hash::Merge>.
96f2cd20 188
95566320 189=head2 datetime_relative
190
8a1df391 191Only available for MySQL and PostgreSQL at the moment, must be a value that
192DateTime::Format::* can parse. For example:
95566320 193
8a1df391 194 {
9b7171c7 195 "sets": [ {
196 "class": "RecentItems",
197 "ids": ["9"]
8a1df391 198 } ],
9b7171c7 199 "datetime_relative": "2007-10-30 00:00:00"
8a1df391 200 }
95566320 201
8a1df391 202This will work when dumping from a MySQL database and will cause any datetime
203fields (where datatype => 'datetime' in the column def of the schema class) to
204be dumped as a DateTime::Duration object relative to the date specified in the
205datetime_relative value. For example if the RecentItem object had a date field
206set to 2007-10-25, then when the fixture is imported the field will be set to 5
207days in the past relative to the current time.
95566320 208
a5561f96 209=head2 might_have
210
8a1df391 211Specifies whether to automatically dump might_have relationships. Should be a
212hash with one attribute - fetch. Set fetch to 1 or 0.
213
214 {
9b7171c7 215 "might_have": { "fetch": 1 },
216 "sets": [
8a1df391 217 {
9b7171c7 218 "class": "Artist",
219 "ids": ["1", "3"]
8a1df391 220 },
221 {
9b7171c7 222 "class": "Producer",
223 "ids": ["5"]
8a1df391 224 }
225 ]
226 }
227
228Note: belongs_to rels are automatically dumped whether you like it or not, this
229is to avoid FKs to nowhere when importing. General rules on has_many rels are
230not accepted at this top level, but you can turn them on for individual sets -
231see L</SET ATTRIBUTES>.
a5561f96 232
233=head1 SET ATTRIBUTES
234
235=head2 class
236
237Required attribute. Specifies the DBIx::Class object class you wish to dump.
238
239=head2 ids
240
8a1df391 241Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
242If the id is not in the source db then it just won't get dumped, no warnings or
243death.
a5561f96 244
245=head2 quantity
246
8a1df391 247Must be either an integer or the string 'all'. Specifying an integer will
248effectively set the 'rows' attribute on the resultset clause, specifying 'all'
249will cause the rows attribute to be left off and for all matching rows to be
250dumped. There's no randomising here, it's just the first x rows.
a5561f96 251
252=head2 cond
253
8a1df391 254A hash specifying the conditions dumped objects must match. Essentially this is
255a JSON representation of a DBIx::Class search clause. For example:
a5561f96 256
8a1df391 257 {
9b7171c7 258 "sets": [{
259 "class": "Artist",
260 "quantiy": "all",
261 "cond": { "name": "Dave" }
8a1df391 262 }]
263 }
a5561f96 264
8a1df391 265This will dump all artists whose name is 'dave'. Essentially
266$artist_rs->search({ name => 'Dave' })->all.
a5561f96 267
268Sometimes in a search clause it's useful to use scalar refs to do things like:
269
8a1df391 270 $artist_rs->search({ no1_singles => \'> no1_albums' })
a5561f96 271
272This could be specified in the cond hash like so:
273
8a1df391 274 {
9b7171c7 275 "sets": [ {
276 "class": "Artist",
277 "quantiy": "all",
278 "cond": { "no1_singles": "\> no1_albums" }
8a1df391 279 } ]
280 }
a5561f96 281
8a1df391 282So if the value starts with a backslash the value is made a scalar ref before
283being passed to search.
a5561f96 284
285=head2 join
286
287An array of relationships to be used in the cond clause.
288
8a1df391 289 {
9b7171c7 290 "sets": [ {
291 "class": "Artist",
292 "quantiy": "all",
293 "cond": { "cds.position": { ">": 4 } },
294 "join": ["cds"]
8a1df391 295 } ]
296 }
a5561f96 297
298Fetch all artists who have cds with position greater than 4.
299
300=head2 fetch
301
302Must be an array of hashes. Specifies which rels to also dump. For example:
303
8a1df391 304 {
9b7171c7 305 "sets": [ {
306 "class": "Artist",
307 "ids": ["1", "3"],
308 "fetch": [ {
309 "rel": "cds",
310 "quantity": "3",
311 "cond": { "position": "2" }
8a1df391 312 } ]
313 } ]
314 }
a5561f96 315
316Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
317
8a1df391 318Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
319'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
320as in the set attributes. quantity is necessary for has_many relationships, but
321not if using for belongs_to or might_have relationships.
a5561f96 322
323=head2 has_many
324
8a1df391 325Specifies whether to fetch has_many rels for this set. Must be a hash
9f07224d 326containing keys fetch and quantity.
a5561f96 327
8a1df391 328Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
329integer.
a5561f96 330
95566320 331Be careful here, dumping has_many rels can lead to a lot of data being dumped.
332
a5561f96 333=head2 might_have
334
8a1df391 335As with has_many but for might_have relationships. Quantity doesn't do anything
336in this case.
a5561f96 337
8a1df391 338This value will be inherited by all fetches in this set. This is not true for
339the has_many attribute.
a5561f96 340
c040a9b0 341=head2 external
342
343In some cases your database information might be keys to values in some sort of
344external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
345to store blob information on the filesystem. In this case you may wish the ability
346to backup your external storage in the same way your database data. The L</external>
347attribute lets you specify a handler for this type of issue. For example:
348
349 {
350 "sets": [{
351 "class": "Photo",
352 "quantity": "all",
353 "external": {
354 "file": {
355 "class": "File",
356 "args": {"path":"__ATTR(photo_dir)__"}
357 }
358 }
359 }]
360 }
361
362This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
363where the path to a file is specified by the C<file> field of the C<Photo> source.
364We use the uninflated value of the field so you need to completely handle backup
365and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
366and you can create your own custom handlers by placing a '+' in the namespace:
367
368 "class": "+MyApp::Schema::SomeExternalStorage",
369
370Although if possible I'd love to get patches to add some of the other common
371types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
372
373See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
374
a5561f96 375=head1 RULE ATTRIBUTES
376
377=head2 cond
378
379Same as with L</SET ATTRIBUTES>
380
381=head2 fetch
382
383Same as with L</SET ATTRIBUTES>
384
385=head2 join
386
387Same as with L</SET ATTRIBUTES>
388
389=head2 has_many
390
391Same as with L</SET ATTRIBUTES>
392
393=head2 might_have
394
395Same as with L</SET ATTRIBUTES>
e5963c1b 396
c040a9b0 397=head1 RULE SUBSTITUTIONS
398
399You can provide the following substitution patterns for your rule values. An
400example of this might be:
401
402 {
403 "sets": [{
404 "class": "Photo",
405 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
406 }]
407 }
408
409=head2 ENV
410
411Provide a value from %ENV
412
413=head2 ATTR
414
415Provide a value from L</config_attrs>
416
417=head2 catfile
418
419Create the path to a file from a list
420
72c8a36d 421=head2 catdir
c040a9b0 422
423Create the path to a directory from a list
424
0fc424b7 425=head1 METHODS
426
427=head2 new
e5963c1b 428
a5561f96 429=over 4
430
431=item Arguments: \%$attrs
432
433=item Return Value: $fixture_object
434
435=back
436
8a1df391 437Returns a new DBIx::Class::Fixture object. %attrs can have the following
438parameters:
439
440=over
441
9f07224d 442=item config_dir:
8a1df391 443
444required. must contain a valid path to the directory in which your .json
445configs reside.
446
9f07224d 447=item debug:
8a1df391 448
449determines whether to be verbose
450
9f07224d 451=item ignore_sql_errors:
1ac1b0d7 452
8a1df391 453ignore errors on import of DDL etc
1ac1b0d7 454
c040a9b0 455=item config_attrs
456
457A hash of information you can use to do replacements inside your configuration
458sets. For example, if your set looks like:
459
460 {
461 "sets": [ {
462 "class": "Artist",
463 "ids": ["1", "3"],
464 "fetch": [ {
465 "rel": "cds",
466 "quantity": "__ATTR(quantity)__",
467 } ]
468 } ]
469 }
470
471 my $fixtures = DBIx::Class::Fixtures->new( {
472 config_dir => '/home/me/app/fixture_configs'
473 config_attrs => {
474 quantity => 100,
475 },
476 });
477
478You may wish to do this if you want to let whoever runs the dumps have a bit
479more control
480
8a1df391 481=back
a5561f96 482
8a1df391 483 my $fixtures = DBIx::Class::Fixtures->new( {
484 config_dir => '/home/me/app/fixture_configs'
485 } );
a5561f96 486
0fc424b7 487=cut
e5963c1b 488
489sub new {
490 my $class = shift;
491
492 my ($params) = @_;
493 unless (ref $params eq 'HASH') {
494 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
495 }
496
497 unless ($params->{config_dir}) {
498 return DBIx::Class::Exception->throw('config_dir param not specified');
499 }
500
924e1009 501 my $config_dir = io->dir($params->{config_dir});
e5963c1b 502 unless (-e $params->{config_dir}) {
503 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
504 }
505
506 my $self = {
9f07224d 507 config_dir => $config_dir,
14e0a204 508 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
9f07224d 509 debug => $params->{debug} || 0,
510 ignore_sql_errors => $params->{ignore_sql_errors},
511 dumped_objects => {},
512 use_create => $params->{use_create} || 0,
513 use_find_or_create => $params->{use_find_or_create} || 0,
514 config_attrs => $params->{config_attrs} || {},
e5963c1b 515 };
516
517 bless $self, $class;
518
519 return $self;
520}
521
d9b65413 522=head2 available_config_sets
523
524Returns a list of all the config sets found in the L</config_dir>. These will
525be a list of the json based files containing dump rules.
526
527=cut
528
529my @config_sets;
530sub available_config_sets {
531 @config_sets = scalar(@config_sets) ? @config_sets : map {
924e1009 532 $_->filename;
9f07224d 533 } grep {
924e1009 534 -f "$_" && $_=~/json$/;
9f07224d 535 } shift->config_dir->all;
d9b65413 536}
537
0fc424b7 538=head2 dump
539
a5561f96 540=over 4
541
542=item Arguments: \%$attrs
543
544=item Return Value: 1
545
546=back
547
8a1df391 548 $fixtures->dump({
549 config => 'set_config.json', # config file to use. must be in the config
550 # directory specified in the constructor
551 schema => $source_dbic_schema,
552 directory => '/home/me/app/fixtures' # output directory
553 });
a5561f96 554
8a1df391 555or
2ef30e95 556
8a1df391 557 $fixtures->dump({
558 all => 1, # just dump everything that's in the schema
559 schema => $source_dbic_schema,
861877be 560 directory => '/home/me/app/fixtures', # output directory
561 #excludes => [ qw/Foo MyView/ ], # optionally exclude certain sources
8a1df391 562 });
2ef30e95 563
8a1df391 564In this case objects will be dumped to subdirectories in the specified
565directory. For example:
a5561f96 566
8a1df391 567 /home/me/app/fixtures/artist/1.fix
568 /home/me/app/fixtures/artist/3.fix
569 /home/me/app/fixtures/producer/5.fix
a5561f96 570
861877be 571C<schema> and C<directory> are required attributes. also, one of C<config> or C<all> must
13ff7633 572be specified.
573
861877be 574The optional parameter C<excludes> takes an array ref of source names and can be
627108df 575used to exclude those sources when dumping the whole schema. This is useful if
861877be 576you have views in there, since those do not need fixtures and will currently result
577in an error when they are created and then used with C<populate>.
578
13ff7633 579Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
580If this form is used your HashRef should conform to the structure rules defined
581for the JSON representations.
a5561f96 582
0fc424b7 583=cut
584
585sub dump {
586 my $self = shift;
587
588 my ($params) = @_;
589 unless (ref $params eq 'HASH') {
590 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
591 }
592
2ef30e95 593 foreach my $param (qw/schema directory/) {
0fc424b7 594 unless ($params->{$param}) {
595 return DBIx::Class::Exception->throw($param . ' param not specified');
596 }
597 }
598
47a8ceb9 599 if($params->{excludes} && !$params->{all}) {
600 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
601 }
602
2ef30e95 603 my $schema = $params->{schema};
2ef30e95 604 my $config;
605 if ($params->{config}) {
9f07224d 606 $config = ref $params->{config} eq 'HASH' ?
607 $params->{config} :
13ff7633 608 do {
609 #read config
924e1009 610 my $config_file = io->catfile($self->config_dir, $params->{config});
b2c7b63d 611 $self->load_config_file("$config_file");
13ff7633 612 };
2ef30e95 613 } elsif ($params->{all}) {
47a8ceb9 614 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
9f07224d 615 $config = {
8a1df391 616 might_have => { fetch => 0 },
617 has_many => { fetch => 0 },
618 belongs_to => { fetch => 0 },
47a8ceb9 619 sets => [
620 map {
621 { class => $_, quantity => 'all' };
622 } grep {
623 !$excludes{$_}
624 } $schema->sources],
8a1df391 625 };
2ef30e95 626 } else {
8a1df391 627 DBIx::Class::Exception->throw('must pass config or set all');
0fc424b7 628 }
629
924e1009 630 my $output_dir = io->dir($params->{directory});
631 unless (-e "$output_dir") {
d85d888e 632 $output_dir->mkpath ||
8a1df391 633 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
0fc424b7 634 }
635
9f96b203 636 $self->msg("generating fixtures");
b2c7b63d 637 my $tmp_output_dir = io->dir(tempdir);
0fc424b7 638
924e1009 639 if (-e "$tmp_output_dir") {
0fc424b7 640 $self->msg("- clearing existing $tmp_output_dir");
6116de11 641 $tmp_output_dir->rmtree;
0fc424b7 642 }
6116de11 643 $self->msg("- creating $tmp_output_dir");
644 $tmp_output_dir->mkpath;
0fc424b7 645
646 # write version file (for the potential benefit of populate)
924e1009 647 $tmp_output_dir->file('_dumper_version')->print($VERSION);
0fc424b7 648
c040a9b0 649 # write our current config set
924e1009 650 $tmp_output_dir->file('_config_set')->print( Dumper $config );
c040a9b0 651
0fc424b7 652 $config->{rules} ||= {};
653 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
8a1df391 654
06b7a1cc 655 while ( my ($k,$v) = each %{ $config->{rules} } ) {
28acb622 656 if ( my $source = eval { $schema->source($k) } ) {
657 $config->{rules}{$source->source_name} = $v;
06b7a1cc 658 }
659 }
660
0fc424b7 661 foreach my $source (@sources) {
662 # apply rule to set if specified
663 my $rule = $config->{rules}->{$source->{class}};
664 $source = merge( $source, $rule ) if ($rule);
665
666 # fetch objects
2ef30e95 667 my $rs = $schema->resultset($source->{class});
c40935c5 668
669 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
0a54a6e8 670 # if value starts with \ assume it's meant to be passed as a scalar ref
671 # to dbic. ideally this would substitute deeply
9f07224d 672 $source->{cond} = {
673 map {
674 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
675 : $source->{cond}->{$_}
676 } keys %{$source->{cond}}
0a54a6e8 677 };
c40935c5 678 }
679
9f07224d 680 $rs = $rs->search($source->{cond}, { join => $source->{join} })
0a54a6e8 681 if $source->{cond};
682
0fc424b7 683 $self->msg("- dumping $source->{class}");
0a54a6e8 684
0fc424b7 685 my %source_options = ( set => { %{$config}, %{$source} } );
686 if ($source->{quantity}) {
9f07224d 687 $rs = $rs->search({}, { order_by => $source->{order_by} })
0a54a6e8 688 if $source->{order_by};
689
8a1df391 690 if ($source->{quantity} =~ /^\d+$/) {
691 $rs = $rs->search({}, { rows => $source->{quantity} });
692 } elsif ($source->{quantity} ne 'all') {
0a54a6e8 693 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
0fc424b7 694 }
695 }
8a1df391 696 elsif ($source->{ids} && @{$source->{ids}}) {
0fc424b7 697 my @ids = @{$source->{ids}};
8a1df391 698 my (@pks) = $rs->result_source->primary_columns;
699 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
700 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
0fc424b7 701 }
8a1df391 702 else {
0fc424b7 703 DBIx::Class::Exception->throw('must specify either quantity or ids');
704 }
705
8a1df391 706 $source_options{set_dir} = $tmp_output_dir;
707 $self->dump_rs($rs, \%source_options );
d3ef0865 708 }
709
da25ed7c 710 # clear existing output dir
924e1009 711 foreach my $child ($output_dir->all) {
da25ed7c 712 if ($child->is_dir) {
924e1009 713 next if ("$child" eq "$tmp_output_dir");
714 if (grep { $_ =~ /\.fix/ } $child->all) {
da25ed7c 715 $child->rmtree;
716 }
717 } elsif ($child =~ /_dumper_version$/) {
924e1009 718 $child->unlink;
da25ed7c 719 }
0fc424b7 720 }
721
722 $self->msg("- moving temp dir to $output_dir");
924e1009 723 $tmp_output_dir->copy("$output_dir");
8a1df391 724
924e1009 725 if (-e "$output_dir") {
0fc424b7 726 $self->msg("- clearing tmp dir $tmp_output_dir");
727 # delete existing fixture set
924e1009 728 $tmp_output_dir->rmtree;
0fc424b7 729 }
730
731 $self->msg("done");
732
733 return 1;
734}
735
8a1df391 736sub load_config_file {
737 my ($self, $config_file) = @_;
738 DBIx::Class::Exception->throw("config does not exist at $config_file")
924e1009 739 unless -e "$config_file";
8a1df391 740
741 my $config = Config::Any::JSON->load($config_file);
742
743 #process includes
744 if (my $incs = $config->{includes}) {
745 $self->msg($incs);
746 DBIx::Class::Exception->throw(
747 'includes params of config must be an array ref of hashrefs'
748 ) unless ref $incs eq 'ARRAY';
9f07224d 749
8a1df391 750 foreach my $include_config (@$incs) {
751 DBIx::Class::Exception->throw(
752 'includes params of config must be an array ref of hashrefs'
753 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
9f07224d 754
8a1df391 755 my $include_file = $self->config_dir->file($include_config->{file});
756
757 DBIx::Class::Exception->throw("config does not exist at $include_file")
924e1009 758 unless -e "$include_file";
9f07224d 759
8a1df391 760 my $include = Config::Any::JSON->load($include_file);
761 $self->msg($include);
762 $config = merge( $config, $include );
763 }
764 delete $config->{includes};
765 }
9f07224d 766
8a1df391 767 # validate config
768 return DBIx::Class::Exception->throw('config has no sets')
9f07224d 769 unless $config && $config->{sets} &&
8a1df391 770 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
771
772 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
773 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
774 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
775
776 return $config;
777}
778
779sub dump_rs {
780 my ($self, $rs, $params) = @_;
781
782 while (my $row = $rs->next) {
783 $self->dump_object($row, $params);
784 }
785}
9f07224d 786
0fc424b7 787sub dump_object {
9f07224d 788 my ($self, $object, $params) = @_;
0fc424b7 789 my $set = $params->{set};
c040a9b0 790
791 my $v = Data::Visitor::Callback->new(
792 plain_value => sub {
793 my ($visitor, $data) = @_;
794 my $subs = {
795 ENV => sub {
796 my ( $self, $v ) = @_;
797 if (! defined($ENV{$v})) {
798 return "";
799 } else {
800 return $ENV{ $v };
801 }
802 },
803 ATTR => sub {
804 my ($self, $v) = @_;
805 if(my $attr = $self->config_attrs->{$v}) {
806 return $attr;
807 } else {
808 return "";
809 }
810 },
811 catfile => sub {
812 my ($self, @args) = @_;
b2c7b63d 813 "".io->catfile(@args);
c040a9b0 814 },
815 catdir => sub {
816 my ($self, @args) = @_;
b2c7b63d 817 "".io->catdir(@args);
c040a9b0 818 },
819 };
820
9f07224d 821 my $subsre = join( '|', keys %$subs );
c040a9b0 822 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
823
824 return $_;
825 }
826 );
9f07224d 827
c040a9b0 828 $v->visit( $set );
829
0fc424b7 830 die 'no dir passed to dump_object' unless $params->{set_dir};
831 die 'no object passed to dump_object' unless $object;
832
833 my @inherited_attrs = @{$self->_inherited_attributes};
834
8a1df391 835 my @pk_vals = map {
9f07224d 836 $object->get_column($_)
8a1df391 837 } $object->primary_columns;
838
839 my $key = join("\0", @pk_vals);
840
841 my $src = $object->result_source;
842 my $exists = $self->dumped_objects->{$src->name}{$key}++;
843
844
0fc424b7 845 # write dir and gen filename
924e1009 846 my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
8a1df391 847 $source_dir->mkpath(0, 0777);
5f3da1e0 848
7a8790e2 849 # Convert characters not allowed on windows
850 my $file = io->catfile("$source_dir",
851 join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
0a54a6e8 852 );
8a1df391 853
0fc424b7 854 # write file
0fc424b7 855 unless ($exists) {
924e1009 856 $self->msg('-- dumping ' . "$file", 2);
8b97485a 857
858 # get_columns will return virtual columns; we just want stored columns.
859 # columns_info keys seems to be the actual storage column names, so we'll
860 # use that.
861 my $col_info = $src->columns_info;
862 my @column_names = keys %$col_info;
863 my %columns = $object->get_columns;
864 my %ds; @ds{@column_names} = @columns{@column_names};
0fc424b7 865
c040a9b0 866 if($set->{external}) {
867 foreach my $field (keys %{$set->{external}}) {
868 my $key = $ds{$field};
869 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
870 my $args = $set->{external}->{$field}->{args};
871
872 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
873 eval "use $class";
874
875 $ds{external}->{$field} =
876 encode_base64( $class
b3e8abba 877 ->backup($key => $args),'');
c040a9b0 878 }
879 }
880
0fc424b7 881 # mess with dates if specified
0566a82d 882 if ($set->{datetime_relative}) {
8a1df391 883 my $formatter= $object->result_source->schema->storage->datetime_parser;
0566a82d 884 unless ($@ || !$formatter) {
885 my $dt;
886 if ($set->{datetime_relative} eq 'today') {
887 $dt = DateTime->today;
888 } else {
889 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
890 }
0fc424b7 891
0566a82d 892 while (my ($col, $value) = each %ds) {
893 my $col_info = $object->result_source->column_info($col);
0fc424b7 894
0566a82d 895 next unless $value
896 && $col_info->{_inflate_info}
017d2ab4 897 && (
898 (uc($col_info->{data_type}) eq 'DATETIME')
899 or (uc($col_info->{data_type}) eq 'DATE')
900 or (uc($col_info->{data_type}) eq 'TIME')
901 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
902 or (uc($col_info->{data_type}) eq 'INTERVAL')
903 );
0fc424b7 904
0566a82d 905 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
906 }
907 } else {
b099fee9 908 warn "datetime_relative not supported for this db driver at the moment";
0fc424b7 909 }
910 }
911
912 # do the actual dumping
913 my $serialized = Dump(\%ds)->Out();
593b3c23 914
924e1009 915 $file->print($serialized);
0fc424b7 916 }
917
2ef30e95 918 # don't bother looking at rels unless we are actually planning to dump at least one type
0a54a6e8 919 my ($might_have, $belongs_to, $has_many) = map {
06b7a1cc 920 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
0a54a6e8 921 } qw/might_have belongs_to has_many/;
922
923 return unless $might_have
924 || $belongs_to
925 || $has_many
8a1df391 926 || $set->{fetch};
2ef30e95 927
0fc424b7 928 # dump rels of object
0fc424b7 929 unless ($exists) {
8a1df391 930 foreach my $name (sort $src->relationships) {
931 my $info = $src->relationship_info($name);
932 my $r_source = $src->related_source($name);
0a54a6e8 933 # if belongs_to or might_have with might_have param set or has_many with
934 # has_many param set then
8a1df391 935 if (
9f07224d 936 ( $info->{attrs}{accessor} eq 'single' &&
937 (!$info->{attrs}{join_type} || $might_have)
0a54a6e8 938 )
9f07224d 939 || $info->{attrs}{accessor} eq 'filter'
940 ||
0a54a6e8 941 ($info->{attrs}{accessor} eq 'multi' && $has_many)
8a1df391 942 ) {
9f07224d 943 my $related_rs = $object->related_resultset($name);
0fc424b7 944 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
945 # these parts of the rule only apply to has_many rels
9f07224d 946 if ($rule && $info->{attrs}{accessor} eq 'multi') {
0a54a6e8 947 $related_rs = $related_rs->search(
9f07224d 948 $rule->{cond},
0a54a6e8 949 { join => $rule->{join} }
950 ) if ($rule->{cond});
951
952 $related_rs = $related_rs->search(
953 {},
954 { rows => $rule->{quantity} }
955 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
956
957 $related_rs = $related_rs->search(
9f07224d 958 {},
0a54a6e8 959 { order_by => $rule->{order_by} }
9f07224d 960 ) if ($rule->{order_by});
0a54a6e8 961
0fc424b7 962 }
9f07224d 963 if ($set->{has_many}{quantity} &&
0a54a6e8 964 $set->{has_many}{quantity} =~ /^\d+$/) {
965 $related_rs = $related_rs->search(
9f07224d 966 {},
0a54a6e8 967 { rows => $set->{has_many}->{quantity} }
968 );
0fc424b7 969 }
0a54a6e8 970
0fc424b7 971 my %c_params = %{$params};
972 # inherit date param
9f07224d 973 my %mock_set = map {
974 $_ => $set->{$_}
0a54a6e8 975 } grep { $set->{$_} } @inherited_attrs;
976
0fc424b7 977 $c_params{set} = \%mock_set;
0a54a6e8 978 $c_params{set} = merge( $c_params{set}, $rule)
979 if $rule && $rule->{fetch};
980
8a1df391 981 $self->dump_rs($related_rs, \%c_params);
9f07224d 982 }
0fc424b7 983 }
984 }
9f07224d 985
0fc424b7 986 return unless $set && $set->{fetch};
987 foreach my $fetch (@{$set->{fetch}}) {
988 # inherit date param
9f07224d 989 $fetch->{$_} = $set->{$_} foreach
0a54a6e8 990 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
0fc424b7 991 my $related_rs = $object->related_resultset($fetch->{rel});
992 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
8a1df391 993
0fc424b7 994 if ($rule) {
995 my $info = $object->result_source->relationship_info($fetch->{rel});
996 if ($info->{attrs}{accessor} eq 'multi') {
997 $fetch = merge( $fetch, $rule );
998 } elsif ($rule->{fetch}) {
999 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
1000 }
9f07224d 1001 }
8a1df391 1002
9f07224d 1003 die "relationship $fetch->{rel} does not exist for " . $src->source_name
8a1df391 1004 unless ($related_rs);
1005
0fc424b7 1006 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
0a54a6e8 1007 # if value starts with \ assume it's meant to be passed as a scalar ref
1008 # to dbic. ideally this would substitute deeply
9f07224d 1009 $fetch->{cond} = { map {
1010 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1011 : $fetch->{cond}->{$_}
8a1df391 1012 } keys %{$fetch->{cond}} };
0fc424b7 1013 }
8a1df391 1014
0a54a6e8 1015 $related_rs = $related_rs->search(
9f07224d 1016 $fetch->{cond},
0a54a6e8 1017 { join => $fetch->{join} }
1018 ) if $fetch->{cond};
1019
1020 $related_rs = $related_rs->search(
1021 {},
1022 { rows => $fetch->{quantity} }
1023 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1024 $related_rs = $related_rs->search(
9f07224d 1025 {},
0a54a6e8 1026 { order_by => $fetch->{order_by} }
1027 ) if $fetch->{order_by};
8a1df391 1028
1029 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
0fc424b7 1030 }
1031}
1032
384c3f0c 1033sub _generate_schema {
1034 my $self = shift;
1035 my $params = shift || {};
384c3f0c 1036 require DBI;
1037 $self->msg("\ncreating schema");
384c3f0c 1038
c06f7b96 1039 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
9a9a7832 1040 eval "require $schema_class";
1041 die $@ if $@;
1042
4fb695f4 1043 my $pre_schema;
1044 my $connection_details = $params->{connection_details};
8a1df391 1045
aa9f3cc7 1046 $namespace_counter++;
8a1df391 1047
1048 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
aa9f3cc7 1049 Class::C3::Componentised->inject_base( $namespace => $schema_class );
8a1df391 1050
aa9f3cc7 1051 $pre_schema = $namespace->connect(@{$connection_details});
1052 unless( $pre_schema ) {
384c3f0c 1053 return DBIx::Class::Exception->throw('connection details not valid');
1054 }
dcdf675f 1055 my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources;
f81264b2 1056 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
4fb695f4 1057 my $dbh = $pre_schema->storage->dbh;
384c3f0c 1058
1059 # clear existing db
1060 $self->msg("- clearing DB of existing tables");
7f25d8f8 1061 $pre_schema->storage->txn_do(sub {
1062 $pre_schema->storage->with_deferred_fk_checks(sub {
1063 foreach my $table (@tables) {
9f07224d 1064 eval {
1065 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
7f25d8f8 1066 };
1067 }
1068 });
9586eb0c 1069 });
384c3f0c 1070
1071 # import new ddl file to db
1072 my $ddl_file = $params->{ddl};
1073 $self->msg("- deploying schema using $ddl_file");
f81264b2 1074 my $data = _read_sql($ddl_file);
1075 foreach (@$data) {
1076 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 1077 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
384c3f0c 1078 }
384c3f0c 1079 $self->msg("- finished importing DDL into DB");
1080
1081 # load schema object from our new DB
b4c67f96 1082 $namespace_counter++;
0a54a6e8 1083 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
b4c67f96 1084 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1085 my $schema = $namespace2->connect(@{$connection_details});
384c3f0c 1086 return $schema;
1087}
1088
f81264b2 1089sub _read_sql {
1090 my $ddl_file = shift;
1091 my $fh;
1092 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1093 my @data = split(/\n/, join('', <$fh>));
1094 @data = grep(!/^--/, @data);
1095 @data = split(/;/, join('', @data));
1096 close($fh);
1097 @data = grep { $_ && $_ !~ /^-- / } @data;
1098 return \@data;
1099}
a5561f96 1100
5cc47846 1101=head2 dump_config_sets
d9b65413 1102
1103Works just like L</dump> but instead of specifying a single json config set
5cc47846 1104located in L</config_dir> we dump each set named in the C<configs> parameter.
d9b65413 1105
1106The parameters are the same as for L</dump> except instead of a C<directory>
1107parameter we have a C<directory_template> which is a coderef expected to return
1108a scalar that is a root directory where we will do the actual dumping. This
1109coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1110example:
1111
1112 $fixture->dump_all_config_sets({
1113 schema => $schema,
5cc47846 1114 configs => [qw/one.json other.json/],
d9b65413 1115 directory_template => sub {
1116 my ($fixture, $params, $set) = @_;
924e1009 1117 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
d9b65413 1118 },
1119 });
1120
1121=cut
1122
5cc47846 1123sub dump_config_sets {
d9b65413 1124 my ($self, $params) = @_;
5cc47846 1125 my $available_config_sets = delete $params->{configs};
d9b65413 1126 my $directory_template = delete $params->{directory_template} ||
1127 DBIx::Class::Exception->throw("'directory_template is required parameter");
1128
5cc47846 1129 for my $set (@$available_config_sets) {
5cc47846 1130 my $localparams = $params;
1131 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1132 $localparams->{config} = $set;
1133 $self->dump($localparams);
745efc60 1134 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
d9b65413 1135 }
1136}
1137
5cc47846 1138=head2 dump_all_config_sets
1139
745efc60 1140 my %local_params = %$params;
1141 my $local_self = bless { %$self }, ref($self);
1142 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1143 $local_params{config} = $set;
1144 $self->dump(\%local_params);
1145
1146
5cc47846 1147Works just like L</dump> but instead of specifying a single json config set
1148located in L</config_dir> we dump each set in turn to the specified directory.
1149
1150The parameters are the same as for L</dump> except instead of a C<directory>
1151parameter we have a C<directory_template> which is a coderef expected to return
1152a scalar that is a root directory where we will do the actual dumping. This
1153coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1154example:
1155
1156 $fixture->dump_all_config_sets({
1157 schema => $schema,
1158 directory_template => sub {
1159 my ($fixture, $params, $set) = @_;
924e1009 1160 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
5cc47846 1161 },
1162 });
1163
1164=cut
1165
1166sub dump_all_config_sets {
1167 my ($self, $params) = @_;
1168 $self->dump_config_sets({
1169 %$params,
1170 configs=>[$self->available_config_sets],
1171 });
1172}
1173
a5561f96 1174=head2 populate
1175
1176=over 4
1177
1178=item Arguments: \%$attrs
1179
1180=item Return Value: 1
1181
1182=back
1183
8a1df391 1184 $fixtures->populate( {
1185 # directory to look for fixtures in, as specified to dump
9f07224d 1186 directory => '/home/me/app/fixtures',
8a1df391 1187
1188 # DDL to deploy
9f07224d 1189 ddl => '/home/me/app/sql/ddl.sql',
8a1df391 1190
1191 # database to clear, deploy and then populate
9f07224d 1192 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
8a1df391 1193
1194 # DDL to deploy after populating records, ie. FK constraints
1195 post_ddl => '/home/me/app/sql/post_ddl.sql',
1196
1197 # use CASCADE option when dropping tables
1198 cascade => 1,
1199
9f07224d 1200 # optional, set to 1 to run ddl but not populate
8a1df391 1201 no_populate => 0,
1202
30421f98 1203 # optional, set to 1 to run each fixture through ->create rather than have
65a80d4e 1204 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
30421f98 1205 # that effects the value of column(s).
1206 use_create => 0,
1207
1208 # optional, same as use_create except with find_or_create.
1209 # Useful if you are populating a persistent data store.
1210 use_find_or_create => 0,
65a80d4e 1211
8a1df391 1212 # Dont try to clean the database, just populate over whats there. Requires
1213 # schema option. Use this if you want to handle removing old data yourself
1214 # no_deploy => 1
1215 # schema => $schema
1216 } );
a5561f96 1217
9e77162b 1218In this case the database app_dev will be cleared of all tables, then the
1219specified DDL deployed to it, then finally all fixtures found in
1220/home/me/app/fixtures will be added to it. populate will generate its own
1221DBIx::Class schema from the DDL rather than being passed one to use. This is
1222better as custom insert methods are avoided which can to get in the way. In
1223some cases you might not have a DDL, and so this method will eventually allow a
1224$schema object to be passed instead.
a5561f96 1225
9e77162b 1226If needed, you can specify a post_ddl attribute which is a DDL to be applied
1227after all the fixtures have been added to the database. A good use of this
1228option would be to add foreign key constraints since databases like Postgresql
1229cannot disable foreign key checks.
f81264b2 1230
9e77162b 1231If your tables have foreign key constraints you may want to use the cascade
1232attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1233$table CASCADE'.
f81264b2 1234
9f07224d 1235C<directory> is a required attribute.
9e77162b 1236
1237If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1238C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1239user and pass).
1240
1241If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1242attribute containing the connected schema you wish to operate on and set the
1243C<no_deploy> attribute.
a5561f96 1244
1245=cut
1246
384c3f0c 1247sub populate {
1248 my $self = shift;
1249 my ($params) = @_;
0a54a6e8 1250 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1251 unless ref $params eq 'HASH';
1252
1253 DBIx::Class::Exception->throw('directory param not specified')
1254 unless $params->{directory};
384c3f0c 1255
924e1009 1256 my $fixture_dir = io->dir(delete $params->{directory});
0a54a6e8 1257 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
924e1009 1258 unless -d "$fixture_dir";
384c3f0c 1259
1260 my $ddl_file;
9e77162b 1261 my $dbh;
1262 my $schema;
384c3f0c 1263 if ($params->{ddl} && $params->{connection_details}) {
924e1009 1264 $ddl_file = io->file(delete $params->{ddl});
1265 unless (-e "$ddl_file") {
384c3f0c 1266 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1267 }
1268 unless (ref $params->{connection_details} eq 'ARRAY') {
1269 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1270 }
9f07224d 1271 $schema = $self->_generate_schema({
b2c7b63d 1272 ddl => "$ddl_file",
8a1df391 1273 connection_details => delete $params->{connection_details},
1274 %{$params}
1275 });
9e77162b 1276 } elsif ($params->{schema} && $params->{no_deploy}) {
1277 $schema = $params->{schema};
384c3f0c 1278 } else {
0a54a6e8 1279 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
384c3f0c 1280 }
1281
3ad96388 1282
9f07224d 1283 return 1 if $params->{no_populate};
1284
4fb695f4 1285 $self->msg("\nimporting fixtures");
924e1009 1286 my $tmp_fixture_dir = io->dir(tempdir());
924e1009 1287 my $config_set_path = io->file($fixture_dir, '_config_set');
1288 my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
c040a9b0 1289
1290 my $v = Data::Visitor::Callback->new(
1291 plain_value => sub {
1292 my ($visitor, $data) = @_;
1293 my $subs = {
1294 ENV => sub {
1295 my ( $self, $v ) = @_;
1296 if (! defined($ENV{$v})) {
1297 return "";
1298 } else {
1299 return $ENV{ $v };
1300 }
1301 },
1302 ATTR => sub {
1303 my ($self, $v) = @_;
1304 if(my $attr = $self->config_attrs->{$v}) {
1305 return $attr;
1306 } else {
1307 return "";
1308 }
1309 },
1310 catfile => sub {
1311 my ($self, @args) = @_;
924e1009 1312 io->catfile(@args);
c040a9b0 1313 },
1314 catdir => sub {
1315 my ($self, @args) = @_;
924e1009 1316 io->catdir(@args);
c040a9b0 1317 },
1318 };
1319
9f07224d 1320 my $subsre = join( '|', keys %$subs );
c040a9b0 1321 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1322
1323 return $_;
1324 }
1325 );
9f07224d 1326
c040a9b0 1327 $v->visit( $config_set );
1328
1329
1330 my %sets_by_src;
1331 if($config_set) {
1332 %sets_by_src = map { delete($_->{class}) => $_ }
1333 @{$config_set->{sets}}
1334 }
1335
924e1009 1336 if (-e "$tmp_fixture_dir") {
384c3f0c 1337 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
4fb695f4 1338 $tmp_fixture_dir->rmtree;
384c3f0c 1339 }
1340 $self->msg("- creating temp dir");
51794e1c 1341 $tmp_fixture_dir->mkpath();
dcdf675f 1342 for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
924e1009 1343 my $from_dir = io->catdir($fixture_dir, $_);
1344 next unless -e "$from_dir";
1345 $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
0a54a6e8 1346 }
9e77162b 1347
924e1009 1348 unless (-d "$tmp_fixture_dir") {
0a54a6e8 1349 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
9e77162b 1350 }
384c3f0c 1351
384c3f0c 1352 my $fixup_visitor;
0a54a6e8 1353 my $formatter = $schema->storage->datetime_parser;
0566a82d 1354 unless ($@ || !$formatter) {
1355 my %callbacks;
1356 if ($params->{datetime_relative_to}) {
1357 $callbacks{'DateTime::Duration'} = sub {
1358 $params->{datetime_relative_to}->clone->add_duration($_);
1359 };
1360 } else {
1361 $callbacks{'DateTime::Duration'} = sub {
1362 $formatter->format_datetime(DateTime->today->add_duration($_))
1363 };
1364 }
9f07224d 1365 $callbacks{object} ||= "visit_ref";
0566a82d 1366 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
384c3f0c 1367 }
1ac1b0d7 1368
caafa766 1369 my @sorted_source_names = $self->_get_sorted_sources( $schema );
7f25d8f8 1370 $schema->storage->txn_do(sub {
1371 $schema->storage->with_deferred_fk_checks(sub {
534c9101 1372 foreach my $source (@sorted_source_names) {
7f25d8f8 1373 $self->msg("- adding " . $source);
1374 my $rs = $schema->resultset($source);
924e1009 1375 my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
1376 next unless (-e "$source_dir");
7f25d8f8 1377 my @rows;
1378 while (my $file = $source_dir->next) {
1379 next unless ($file =~ /\.fix$/);
1380 next if $file->is_dir;
1381 my $contents = $file->slurp;
1382 my $HASH1;
1383 eval($contents);
1384 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
c040a9b0 1385 if(my $external = delete $HASH1->{external}) {
1386 my @fields = keys %{$sets_by_src{$source}->{external}};
1387 foreach my $field(@fields) {
1388 my $key = $HASH1->{$field};
1389 my $content = decode_base64 ($external->{$field});
1390 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1391 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1392 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1393 eval "use $class";
1394 $class->restore($key, $content, $args);
1395 }
1396 }
65a80d4e 1397 if ( $params->{use_create} ) {
1398 $rs->create( $HASH1 );
30421f98 1399 } elsif( $params->{use_find_or_create} ) {
1400 $rs->find_or_create( $HASH1 );
65a80d4e 1401 } else {
1402 push(@rows, $HASH1);
1403 }
7f25d8f8 1404 }
1405 $rs->populate(\@rows) if scalar(@rows);
75d9325a 1406
1407 ## Now we need to do some db specific cleanup
1408 ## this probably belongs in a more isolated space. Right now this is
1409 ## to just handle postgresql SERIAL types that use Sequences
5487ad1b 1410 ## Will completely ignore sequences in Oracle due to having to drop
1411 ## and recreate them
75d9325a 1412
1413 my $table = $rs->result_source->name;
1414 for my $column(my @columns = $rs->result_source->columns) {
1415 my $info = $rs->result_source->column_info($column);
1416 if(my $sequence = $info->{sequence}) {
1417 $self->msg("- updating sequence $sequence");
1418 $rs->result_source->storage->dbh_do(sub {
1419 my ($storage, $dbh, @cols) = @_;
5487ad1b 1420 if ( $dbh->{Driver}->{Name} eq "Oracle" ) {
1421 $self->msg("- Cannot change sequence values in Oracle");
1422 } else {
1423 $self->msg(
1424 my $sql = sprintf("SELECT setval(?, (SELECT max(%s) FROM %s));",$dbh->quote_identifier($column),$dbh->quote_identifier($table))
1425 );
1426 my $sth = $dbh->prepare($sql);
1427 $sth->bind_param(1,$sequence);
1428
1429 my $rv = $sth->execute or die $sth->errstr;
1430 $self->msg("- $sql");
1431 }
75d9325a 1432 });
1433 }
1434 }
1435
1ac1b0d7 1436 }
7f25d8f8 1437 });
1ac1b0d7 1438 });
8a1df391 1439 $self->do_post_ddl( {
75d9325a 1440 schema=>$schema,
8a1df391 1441 post_ddl=>$params->{post_ddl}
1442 } ) if $params->{post_ddl};
f81264b2 1443
384c3f0c 1444 $self->msg("- fixtures imported");
1445 $self->msg("- cleaning up");
1446 $tmp_fixture_dir->rmtree;
b099fee9 1447 return 1;
384c3f0c 1448}
1449
a5a045e1 1450# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
1451sub _get_sorted_sources {
1452 my ( $self, $dbicschema ) = @_;
1453
1454
1455 my %table_monikers = map { $_ => 1 } $dbicschema->sources;
1456
1457 my %tables;
1458 foreach my $moniker (sort keys %table_monikers) {
1459 my $source = $dbicschema->source($moniker);
1460
1461 my $table_name = $source->name;
1462 my @primary = $source->primary_columns;
1463 my @rels = $source->relationships();
1464
1465 my %created_FK_rels;
1466 foreach my $rel (sort @rels) {
1467 my $rel_info = $source->relationship_info($rel);
1468
1469 # Ignore any rel cond that isn't a straight hash
1470 next unless ref $rel_info->{cond} eq 'HASH';
1471
1472 my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
1473
1474 # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
1475 my $fk_constraint;
1476 if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
1477 $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
1478 } elsif ( $rel_info->{attrs}{accessor}
1479 && $rel_info->{attrs}{accessor} eq 'multi' ) {
1480 $fk_constraint = 0;
1481 } else {
1482 $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
1483 }
1484
1485 # Dont add a relation if its not constraining
1486 next unless $fk_constraint;
1487
1488 my $rel_table = $source->related_source($rel)->source_name;
1489 # Make sure we don't create the same relation twice
1490 my $key_test = join("\x00", sort @keys);
1491 next if $created_FK_rels{$rel_table}->{$key_test};
1492
1493 if (scalar(@keys)) {
1494 $created_FK_rels{$rel_table}->{$key_test} = 1;
1495
1496 # calculate dependencies: do not consider deferrable constraints and
1497 # self-references for dependency calculations
1498 if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
1499 $tables{$moniker}{$rel_table}++;
1500 }
1501 }
1502 }
1503 $tables{$moniker} = {} unless exists $tables{$moniker};
1504 }
1505
1506 # resolve entire dep tree
1507 my $dependencies = {
1508 map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
1509 };
1510
1511 # return the sorted result
1512 return sort {
1513 keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
1514 ||
1515 $a cmp $b
1516 } (keys %tables);
1517}
1518
1519sub _resolve_deps {
1520 my ( $question, $answers, $seen ) = @_;
1521 my $ret = {};
1522 $seen ||= {};
1523
1524 my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
1525 $seen{$question} = 1;
1526
1527 for my $dep (keys %{ $answers->{$question} }) {
1528 return {} if $seen->{$dep};
1529 my $subdeps = _resolve_deps( $dep, $answers, \%seen );
a5a045e1 1530 $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
1531 ++$ret->{$dep};
1532 }
1533 return $ret;
1534}
1535
6a05e381 1536sub do_post_ddl {
1537 my ($self, $params) = @_;
1538
1539 my $schema = $params->{schema};
1540 my $data = _read_sql($params->{post_ddl});
1541 foreach (@$data) {
1542 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 1543 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
6a05e381 1544 }
1545 $self->msg("- finished importing post-populate DDL into DB");
1546}
1547
0fc424b7 1548sub msg {
1549 my $self = shift;
1550 my $subject = shift || return;
9a9a7832 1551 my $level = shift || 1;
9a9a7832 1552 return unless $self->debug >= $level;
0fc424b7 1553 if (ref $subject) {
1554 print Dumper($subject);
1555 } else {
1556 print $subject . "\n";
1557 }
1558}
a5561f96 1559
dcdf675f 1560# Helper method for ensuring that the name used for a given source
1561# is always the same (This is used to name the fixture directories
1562# for example)
1563
1564sub _name_for_source {
1565 my ($self, $source) = @_;
1566
1567 return ref $source->name ? $source->source_name : $source->name;
1568}
1569
a5561f96 1570=head1 AUTHOR
1571
1572 Luke Saunders <luke@shadowcatsystems.co.uk>
1573
3b4f6e76 1574 Initial development sponsored by and (c) Takkle, Inc. 2007
1575
a5561f96 1576=head1 CONTRIBUTORS
1577
1578 Ash Berlin <ash@shadowcatsystems.co.uk>
8a1df391 1579
a5561f96 1580 Matt S. Trout <mst@shadowcatsystems.co.uk>
8a1df391 1581
bff96109 1582 John Napiorkowski <jjnapiork@cpan.org>
1583
fc17c598 1584 Drew Taylor <taylor.andrew.j@gmail.com>
a5561f96 1585
9b7171c7 1586 Frank Switalski <fswitalski@gmail.com>
1587
bb6d61a7 1588 Chris Akins <chris.hexx@gmail.com>
1589
c69f5953 1590 Tom Bloor <t.bloor@shadowcat.co.uk>
1591
1592 Samuel Kaufman <skaufman@cpan.org>
1593
3b4f6e76 1594=head1 LICENSE
1595
1596 This library is free software under the same license as perl itself
1597
a5561f96 1598=cut
1599
627108df 16001;