if there is a sequence, set it correctly
[dbsrgits/DBIx-Class-Fixtures.git] / lib / DBIx / Class / Fixtures.pm
CommitLineData
e5963c1b 1package DBIx::Class::Fixtures;
2
3use strict;
4use warnings;
5
3eecaaa0 6use DBIx::Class 0.08100;
e5963c1b 7use DBIx::Class::Exception;
b099fee9 8use Class::Accessor::Grouped;
e5963c1b 9use Path::Class qw(dir file);
c040a9b0 10use File::Spec::Functions 'catfile', 'catdir';
0fc424b7 11use Config::Any::JSON;
12use Data::Dump::Streamer;
4fb695f4 13use Data::Visitor::Callback;
0fc424b7 14use File::Path;
4fb695f4 15use File::Copy::Recursive qw/dircopy/;
6116de11 16use File::Copy qw/move/;
0fc424b7 17use Hash::Merge qw( merge );
e5963c1b 18use Data::Dumper;
aa9f3cc7 19use Class::C3::Componentised;
c040a9b0 20use MIME::Base64;
e5963c1b 21
b099fee9 22use base qw(Class::Accessor::Grouped);
e5963c1b 23
aa9f3cc7 24our $namespace_counter = 0;
0566a82d 25
8a1df391 26__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
c040a9b0 27 _inherited_attributes debug schema_class dumped_objects config_attrs/);
e5963c1b 28
29=head1 VERSION
30
a3ebae95 31Version 1.001014
e5963c1b 32
33=cut
34
a4b71bd9 35our $VERSION = '1.001015';
e5963c1b 36
37=head1 NAME
38
d9b65413 39DBIx::Class::Fixtures - Dump data and repopulate a database using rules
9f96b203 40
e5963c1b 41=head1 SYNOPSIS
42
8a1df391 43 use DBIx::Class::Fixtures;
e5963c1b 44
8a1df391 45 ...
e5963c1b 46
8a1df391 47 my $fixtures = DBIx::Class::Fixtures->new({
48 config_dir => '/home/me/app/fixture_configs'
49 });
e5963c1b 50
8a1df391 51 $fixtures->dump({
52 config => 'set_config.json',
53 schema => $source_dbic_schema,
54 directory => '/home/me/app/fixtures'
55 });
e5963c1b 56
8a1df391 57 $fixtures->populate({
58 directory => '/home/me/app/fixtures',
59 ddl => '/home/me/app/sql/ddl.sql',
60 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
61 post_ddl => '/home/me/app/sql/post_ddl.sql',
62 });
e5963c1b 63
64=head1 DESCRIPTION
65
8a1df391 66Dump fixtures from source database to filesystem then import to another
67database (with same schema) at any time. Use as a constant dataset for running
68tests against or for populating development databases when impractical to use
69production clones. Describe fixture set using relations and conditions based on
70your DBIx::Class schema.
a5561f96 71
72=head1 DEFINE YOUR FIXTURE SET
73
8a1df391 74Fixture sets are currently defined in .json files which must reside in your
75config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
76describe which data to pull and dump from the source database.
a5561f96 77
78For example:
79
8a1df391 80 {
9b7171c7 81 "sets": [
8a1df391 82 {
9b7171c7 83 "class": "Artist",
84 "ids": ["1", "3"]
8a1df391 85 },
86 {
9b7171c7 87 "class": "Producer",
88 "ids": ["5"],
89 "fetch": [
8a1df391 90 {
9b7171c7 91 "rel": "artists",
92 "quantity": "2"
8a1df391 93 }
94 ]
95 }
96 ]
97 }
98
99This will fetch artists with primary keys 1 and 3, the producer with primary
100key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
101rel from Producer to Artist.
a5561f96 102
95566320 103The top level attributes are as follows:
104
a5561f96 105=head2 sets
106
8a1df391 107Sets must be an array of hashes, as in the example given above. Each set
108defines a set of objects to be included in the fixtures. For details on valid
109set attributes see L</SET ATTRIBUTES> below.
a5561f96 110
111=head2 rules
112
8a1df391 113Rules place general conditions on classes. For example if whenever an artist
114was dumped you also wanted all of their cds dumped too, then you could use a
115rule to specify this. For example:
116
117 {
9b7171c7 118 "sets": [
8a1df391 119 {
9b7171c7 120 "class": "Artist",
121 "ids": ["1", "3"]
8a1df391 122 },
123 {
9b7171c7 124 "class": "Producer",
125 "ids": ["5"],
126 "fetch": [
8a1df391 127 {
9b7171c7 128 "rel": "artists",
129 "quantity": "2"
8a1df391 130 }
131 ]
132 }
133 ],
9b7171c7 134 "rules": {
135 "Artist": {
136 "fetch": [ {
137 "rel": "cds",
138 "quantity": "all"
8a1df391 139 } ]
140 }
141 }
142 }
143
144In this case all the cds of artists 1, 3 and all producer 5's artists will be
145dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
146to CD. This is eqivalent to:
147
148 {
9b7171c7 149 "sets": [
a5561f96 150 {
9b7171c7 151 "class": "Artist",
152 "ids": ["1", "3"],
153 "fetch": [ {
154 "rel": "cds",
155 "quantity": "all"
8a1df391 156 } ]
157 },
158 {
9b7171c7 159 "class": "Producer",
160 "ids": ["5"],
161 "fetch": [ {
162 "rel": "artists",
163 "quantity": "2",
164 "fetch": [ {
165 "rel": "cds",
166 "quantity": "all"
8a1df391 167 } ]
168 } ]
169 }
170 ]
171 }
a5561f96 172
173rules must be a hash keyed by class name.
174
95566320 175L</RULE ATTRIBUTES>
176
96f2cd20 177=head2 includes
178
8a1df391 179To prevent repetition between configs you can include other configs. For
180example:
96f2cd20 181
8a1df391 182 {
9b7171c7 183 "sets": [ {
184 "class": "Producer",
185 "ids": ["5"]
8a1df391 186 } ],
9b7171c7 187 "includes": [
188 { "file": "base.json" }
8a1df391 189 ]
190 }
96f2cd20 191
8a1df391 192Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
193which is the name of another config file in the same directory. The original
194config is merged with its includes using L<Hash::Merge>.
96f2cd20 195
95566320 196=head2 datetime_relative
197
8a1df391 198Only available for MySQL and PostgreSQL at the moment, must be a value that
199DateTime::Format::* can parse. For example:
95566320 200
8a1df391 201 {
9b7171c7 202 "sets": [ {
203 "class": "RecentItems",
204 "ids": ["9"]
8a1df391 205 } ],
9b7171c7 206 "datetime_relative": "2007-10-30 00:00:00"
8a1df391 207 }
95566320 208
8a1df391 209This will work when dumping from a MySQL database and will cause any datetime
210fields (where datatype => 'datetime' in the column def of the schema class) to
211be dumped as a DateTime::Duration object relative to the date specified in the
212datetime_relative value. For example if the RecentItem object had a date field
213set to 2007-10-25, then when the fixture is imported the field will be set to 5
214days in the past relative to the current time.
95566320 215
a5561f96 216=head2 might_have
217
8a1df391 218Specifies whether to automatically dump might_have relationships. Should be a
219hash with one attribute - fetch. Set fetch to 1 or 0.
220
221 {
9b7171c7 222 "might_have": { "fetch": 1 },
223 "sets": [
8a1df391 224 {
9b7171c7 225 "class": "Artist",
226 "ids": ["1", "3"]
8a1df391 227 },
228 {
9b7171c7 229 "class": "Producer",
230 "ids": ["5"]
8a1df391 231 }
232 ]
233 }
234
235Note: belongs_to rels are automatically dumped whether you like it or not, this
236is to avoid FKs to nowhere when importing. General rules on has_many rels are
237not accepted at this top level, but you can turn them on for individual sets -
238see L</SET ATTRIBUTES>.
a5561f96 239
240=head1 SET ATTRIBUTES
241
242=head2 class
243
244Required attribute. Specifies the DBIx::Class object class you wish to dump.
245
246=head2 ids
247
8a1df391 248Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
249If the id is not in the source db then it just won't get dumped, no warnings or
250death.
a5561f96 251
252=head2 quantity
253
8a1df391 254Must be either an integer or the string 'all'. Specifying an integer will
255effectively set the 'rows' attribute on the resultset clause, specifying 'all'
256will cause the rows attribute to be left off and for all matching rows to be
257dumped. There's no randomising here, it's just the first x rows.
a5561f96 258
259=head2 cond
260
8a1df391 261A hash specifying the conditions dumped objects must match. Essentially this is
262a JSON representation of a DBIx::Class search clause. For example:
a5561f96 263
8a1df391 264 {
9b7171c7 265 "sets": [{
266 "class": "Artist",
267 "quantiy": "all",
268 "cond": { "name": "Dave" }
8a1df391 269 }]
270 }
a5561f96 271
8a1df391 272This will dump all artists whose name is 'dave'. Essentially
273$artist_rs->search({ name => 'Dave' })->all.
a5561f96 274
275Sometimes in a search clause it's useful to use scalar refs to do things like:
276
8a1df391 277 $artist_rs->search({ no1_singles => \'> no1_albums' })
a5561f96 278
279This could be specified in the cond hash like so:
280
8a1df391 281 {
9b7171c7 282 "sets": [ {
283 "class": "Artist",
284 "quantiy": "all",
285 "cond": { "no1_singles": "\> no1_albums" }
8a1df391 286 } ]
287 }
a5561f96 288
8a1df391 289So if the value starts with a backslash the value is made a scalar ref before
290being passed to search.
a5561f96 291
292=head2 join
293
294An array of relationships to be used in the cond clause.
295
8a1df391 296 {
9b7171c7 297 "sets": [ {
298 "class": "Artist",
299 "quantiy": "all",
300 "cond": { "cds.position": { ">": 4 } },
301 "join": ["cds"]
8a1df391 302 } ]
303 }
a5561f96 304
305Fetch all artists who have cds with position greater than 4.
306
307=head2 fetch
308
309Must be an array of hashes. Specifies which rels to also dump. For example:
310
8a1df391 311 {
9b7171c7 312 "sets": [ {
313 "class": "Artist",
314 "ids": ["1", "3"],
315 "fetch": [ {
316 "rel": "cds",
317 "quantity": "3",
318 "cond": { "position": "2" }
8a1df391 319 } ]
320 } ]
321 }
a5561f96 322
323Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
324
8a1df391 325Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
326'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
327as in the set attributes. quantity is necessary for has_many relationships, but
328not if using for belongs_to or might_have relationships.
a5561f96 329
330=head2 has_many
331
8a1df391 332Specifies whether to fetch has_many rels for this set. Must be a hash
333containing keys fetch and quantity.
a5561f96 334
8a1df391 335Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
336integer.
a5561f96 337
95566320 338Be careful here, dumping has_many rels can lead to a lot of data being dumped.
339
a5561f96 340=head2 might_have
341
8a1df391 342As with has_many but for might_have relationships. Quantity doesn't do anything
343in this case.
a5561f96 344
8a1df391 345This value will be inherited by all fetches in this set. This is not true for
346the has_many attribute.
a5561f96 347
c040a9b0 348=head2 external
349
350In some cases your database information might be keys to values in some sort of
351external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
352to store blob information on the filesystem. In this case you may wish the ability
353to backup your external storage in the same way your database data. The L</external>
354attribute lets you specify a handler for this type of issue. For example:
355
356 {
357 "sets": [{
358 "class": "Photo",
359 "quantity": "all",
360 "external": {
361 "file": {
362 "class": "File",
363 "args": {"path":"__ATTR(photo_dir)__"}
364 }
365 }
366 }]
367 }
368
369This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
370where the path to a file is specified by the C<file> field of the C<Photo> source.
371We use the uninflated value of the field so you need to completely handle backup
372and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
373and you can create your own custom handlers by placing a '+' in the namespace:
374
375 "class": "+MyApp::Schema::SomeExternalStorage",
376
377Although if possible I'd love to get patches to add some of the other common
378types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
379
380See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
381
a5561f96 382=head1 RULE ATTRIBUTES
383
384=head2 cond
385
386Same as with L</SET ATTRIBUTES>
387
388=head2 fetch
389
390Same as with L</SET ATTRIBUTES>
391
392=head2 join
393
394Same as with L</SET ATTRIBUTES>
395
396=head2 has_many
397
398Same as with L</SET ATTRIBUTES>
399
400=head2 might_have
401
402Same as with L</SET ATTRIBUTES>
e5963c1b 403
c040a9b0 404=head1 RULE SUBSTITUTIONS
405
406You can provide the following substitution patterns for your rule values. An
407example of this might be:
408
409 {
410 "sets": [{
411 "class": "Photo",
412 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
413 }]
414 }
415
416=head2 ENV
417
418Provide a value from %ENV
419
420=head2 ATTR
421
422Provide a value from L</config_attrs>
423
424=head2 catfile
425
426Create the path to a file from a list
427
428=heade catdir
429
430Create the path to a directory from a list
431
0fc424b7 432=head1 METHODS
433
434=head2 new
e5963c1b 435
a5561f96 436=over 4
437
438=item Arguments: \%$attrs
439
440=item Return Value: $fixture_object
441
442=back
443
8a1df391 444Returns a new DBIx::Class::Fixture object. %attrs can have the following
445parameters:
446
447=over
448
449=item config_dir:
450
451required. must contain a valid path to the directory in which your .json
452configs reside.
453
454=item debug:
455
456determines whether to be verbose
457
458=item ignore_sql_errors:
1ac1b0d7 459
8a1df391 460ignore errors on import of DDL etc
1ac1b0d7 461
c040a9b0 462=item config_attrs
463
464A hash of information you can use to do replacements inside your configuration
465sets. For example, if your set looks like:
466
467 {
468 "sets": [ {
469 "class": "Artist",
470 "ids": ["1", "3"],
471 "fetch": [ {
472 "rel": "cds",
473 "quantity": "__ATTR(quantity)__",
474 } ]
475 } ]
476 }
477
478 my $fixtures = DBIx::Class::Fixtures->new( {
479 config_dir => '/home/me/app/fixture_configs'
480 config_attrs => {
481 quantity => 100,
482 },
483 });
484
485You may wish to do this if you want to let whoever runs the dumps have a bit
486more control
487
8a1df391 488=back
a5561f96 489
8a1df391 490 my $fixtures = DBIx::Class::Fixtures->new( {
491 config_dir => '/home/me/app/fixture_configs'
492 } );
a5561f96 493
0fc424b7 494=cut
e5963c1b 495
496sub new {
497 my $class = shift;
498
499 my ($params) = @_;
500 unless (ref $params eq 'HASH') {
501 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
502 }
503
504 unless ($params->{config_dir}) {
505 return DBIx::Class::Exception->throw('config_dir param not specified');
506 }
507
508 my $config_dir = dir($params->{config_dir});
509 unless (-e $params->{config_dir}) {
510 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
511 }
512
513 my $self = {
0fc424b7 514 config_dir => $config_dir,
14e0a204 515 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
da25ed7c 516 debug => $params->{debug} || 0,
0a54a6e8 517 ignore_sql_errors => $params->{ignore_sql_errors},
65a80d4e 518 dumped_objects => {},
c040a9b0 519 use_create => $params->{use_create} || 0,
520 config_attrs => $params->{config_attrs} || {},
e5963c1b 521 };
522
523 bless $self, $class;
524
525 return $self;
526}
527
d9b65413 528=head2 available_config_sets
529
530Returns a list of all the config sets found in the L</config_dir>. These will
531be a list of the json based files containing dump rules.
532
533=cut
534
535my @config_sets;
536sub available_config_sets {
537 @config_sets = scalar(@config_sets) ? @config_sets : map {
538 $_->basename;
539 } grep {
540 -f $_ && $_=~/json$/;
541 } dir((shift)->config_dir)->children;
542}
543
0fc424b7 544=head2 dump
545
a5561f96 546=over 4
547
548=item Arguments: \%$attrs
549
550=item Return Value: 1
551
552=back
553
8a1df391 554 $fixtures->dump({
555 config => 'set_config.json', # config file to use. must be in the config
556 # directory specified in the constructor
557 schema => $source_dbic_schema,
558 directory => '/home/me/app/fixtures' # output directory
559 });
a5561f96 560
8a1df391 561or
2ef30e95 562
8a1df391 563 $fixtures->dump({
564 all => 1, # just dump everything that's in the schema
565 schema => $source_dbic_schema,
566 directory => '/home/me/app/fixtures' # output directory
567 });
2ef30e95 568
8a1df391 569In this case objects will be dumped to subdirectories in the specified
570directory. For example:
a5561f96 571
8a1df391 572 /home/me/app/fixtures/artist/1.fix
573 /home/me/app/fixtures/artist/3.fix
574 /home/me/app/fixtures/producer/5.fix
a5561f96 575
13ff7633 576schema and directory are required attributes. also, one of config or all must
577be specified.
578
579Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
580If this form is used your HashRef should conform to the structure rules defined
581for the JSON representations.
a5561f96 582
0fc424b7 583=cut
584
585sub dump {
586 my $self = shift;
587
588 my ($params) = @_;
589 unless (ref $params eq 'HASH') {
590 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
591 }
592
2ef30e95 593 foreach my $param (qw/schema directory/) {
0fc424b7 594 unless ($params->{$param}) {
595 return DBIx::Class::Exception->throw($param . ' param not specified');
596 }
597 }
598
47a8ceb9 599 if($params->{excludes} && !$params->{all}) {
600 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
601 }
602
2ef30e95 603 my $schema = $params->{schema};
2ef30e95 604 my $config;
605 if ($params->{config}) {
f4fe4f1f 606 $config = ref $params->{config} eq 'HASH' ?
13ff7633 607 $params->{config} :
608 do {
609 #read config
610 my $config_file = $self->config_dir->file($params->{config});
611 $self->load_config_file($config_file);
612 };
2ef30e95 613 } elsif ($params->{all}) {
47a8ceb9 614 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
8a1df391 615 $config = {
616 might_have => { fetch => 0 },
617 has_many => { fetch => 0 },
618 belongs_to => { fetch => 0 },
47a8ceb9 619 sets => [
620 map {
621 { class => $_, quantity => 'all' };
622 } grep {
623 !$excludes{$_}
624 } $schema->sources],
8a1df391 625 };
2ef30e95 626 } else {
8a1df391 627 DBIx::Class::Exception->throw('must pass config or set all');
0fc424b7 628 }
629
630 my $output_dir = dir($params->{directory});
631 unless (-e $output_dir) {
d85d888e 632 $output_dir->mkpath ||
8a1df391 633 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
0fc424b7 634 }
635
9f96b203 636 $self->msg("generating fixtures");
f251ab7e 637 my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<);
0fc424b7 638
6116de11 639 if (-e $tmp_output_dir) {
0fc424b7 640 $self->msg("- clearing existing $tmp_output_dir");
6116de11 641 $tmp_output_dir->rmtree;
0fc424b7 642 }
6116de11 643 $self->msg("- creating $tmp_output_dir");
644 $tmp_output_dir->mkpath;
0fc424b7 645
646 # write version file (for the potential benefit of populate)
8a1df391 647 $tmp_output_dir->file('_dumper_version')
648 ->openw
649 ->print($VERSION);
0fc424b7 650
c040a9b0 651 # write our current config set
652 $tmp_output_dir->file('_config_set')
653 ->openw
654 ->print( Dumper $config );
655
0fc424b7 656 $config->{rules} ||= {};
657 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
8a1df391 658
06b7a1cc 659 while ( my ($k,$v) = each %{ $config->{rules} } ) {
28acb622 660 if ( my $source = eval { $schema->source($k) } ) {
661 $config->{rules}{$source->source_name} = $v;
06b7a1cc 662 }
663 }
664
0fc424b7 665 foreach my $source (@sources) {
666 # apply rule to set if specified
667 my $rule = $config->{rules}->{$source->{class}};
668 $source = merge( $source, $rule ) if ($rule);
669
670 # fetch objects
2ef30e95 671 my $rs = $schema->resultset($source->{class});
c40935c5 672
673 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
0a54a6e8 674 # if value starts with \ assume it's meant to be passed as a scalar ref
675 # to dbic. ideally this would substitute deeply
676 $source->{cond} = {
677 map {
678 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
679 : $source->{cond}->{$_}
680 } keys %{$source->{cond}}
681 };
c40935c5 682 }
683
0a54a6e8 684 $rs = $rs->search($source->{cond}, { join => $source->{join} })
685 if $source->{cond};
686
0fc424b7 687 $self->msg("- dumping $source->{class}");
0a54a6e8 688
0fc424b7 689 my %source_options = ( set => { %{$config}, %{$source} } );
690 if ($source->{quantity}) {
0a54a6e8 691 $rs = $rs->search({}, { order_by => $source->{order_by} })
692 if $source->{order_by};
693
8a1df391 694 if ($source->{quantity} =~ /^\d+$/) {
695 $rs = $rs->search({}, { rows => $source->{quantity} });
696 } elsif ($source->{quantity} ne 'all') {
0a54a6e8 697 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
0fc424b7 698 }
699 }
8a1df391 700 elsif ($source->{ids} && @{$source->{ids}}) {
0fc424b7 701 my @ids = @{$source->{ids}};
8a1df391 702 my (@pks) = $rs->result_source->primary_columns;
703 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
704 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
0fc424b7 705 }
8a1df391 706 else {
0fc424b7 707 DBIx::Class::Exception->throw('must specify either quantity or ids');
708 }
709
8a1df391 710 $source_options{set_dir} = $tmp_output_dir;
711 $self->dump_rs($rs, \%source_options );
d3ef0865 712 }
713
da25ed7c 714 # clear existing output dir
715 foreach my $child ($output_dir->children) {
716 if ($child->is_dir) {
717 next if ($child eq $tmp_output_dir);
718 if (grep { $_ =~ /\.fix/ } $child->children) {
719 $child->rmtree;
720 }
721 } elsif ($child =~ /_dumper_version$/) {
722 $child->remove;
723 }
0fc424b7 724 }
725
726 $self->msg("- moving temp dir to $output_dir");
8a1df391 727 move($_, dir($output_dir, $_->relative($_->parent)->stringify))
728 for $tmp_output_dir->children;
729
0fc424b7 730 if (-e $output_dir) {
731 $self->msg("- clearing tmp dir $tmp_output_dir");
732 # delete existing fixture set
733 $tmp_output_dir->remove;
734 }
735
736 $self->msg("done");
737
738 return 1;
739}
740
8a1df391 741sub load_config_file {
742 my ($self, $config_file) = @_;
743 DBIx::Class::Exception->throw("config does not exist at $config_file")
744 unless -e $config_file;
745
746 my $config = Config::Any::JSON->load($config_file);
747
748 #process includes
749 if (my $incs = $config->{includes}) {
750 $self->msg($incs);
751 DBIx::Class::Exception->throw(
752 'includes params of config must be an array ref of hashrefs'
753 ) unless ref $incs eq 'ARRAY';
754
755 foreach my $include_config (@$incs) {
756 DBIx::Class::Exception->throw(
757 'includes params of config must be an array ref of hashrefs'
758 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
759
760 my $include_file = $self->config_dir->file($include_config->{file});
761
762 DBIx::Class::Exception->throw("config does not exist at $include_file")
763 unless -e $include_file;
764
765 my $include = Config::Any::JSON->load($include_file);
766 $self->msg($include);
767 $config = merge( $config, $include );
768 }
769 delete $config->{includes};
770 }
771
772 # validate config
773 return DBIx::Class::Exception->throw('config has no sets')
774 unless $config && $config->{sets} &&
775 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
776
777 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
778 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
779 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
780
781 return $config;
782}
783
784sub dump_rs {
785 my ($self, $rs, $params) = @_;
786
787 while (my $row = $rs->next) {
788 $self->dump_object($row, $params);
789 }
790}
791
0fc424b7 792sub dump_object {
8a1df391 793 my ($self, $object, $params) = @_;
0fc424b7 794 my $set = $params->{set};
c040a9b0 795
796 my $v = Data::Visitor::Callback->new(
797 plain_value => sub {
798 my ($visitor, $data) = @_;
799 my $subs = {
800 ENV => sub {
801 my ( $self, $v ) = @_;
802 if (! defined($ENV{$v})) {
803 return "";
804 } else {
805 return $ENV{ $v };
806 }
807 },
808 ATTR => sub {
809 my ($self, $v) = @_;
810 if(my $attr = $self->config_attrs->{$v}) {
811 return $attr;
812 } else {
813 return "";
814 }
815 },
816 catfile => sub {
817 my ($self, @args) = @_;
818 catfile(@args);
819 },
820 catdir => sub {
821 my ($self, @args) = @_;
822 catdir(@args);
823 },
824 };
825
826 my $subsre = join( '|', keys %$subs );
827 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
828
829 return $_;
830 }
831 );
832
833 $v->visit( $set );
834
0fc424b7 835 die 'no dir passed to dump_object' unless $params->{set_dir};
836 die 'no object passed to dump_object' unless $object;
837
838 my @inherited_attrs = @{$self->_inherited_attributes};
839
8a1df391 840 my @pk_vals = map {
841 $object->get_column($_)
842 } $object->primary_columns;
843
844 my $key = join("\0", @pk_vals);
845
846 my $src = $object->result_source;
847 my $exists = $self->dumped_objects->{$src->name}{$key}++;
848
849
0fc424b7 850 # write dir and gen filename
8a1df391 851 my $source_dir = $params->{set_dir}->subdir(lc $src->from);
852 $source_dir->mkpath(0, 0777);
5f3da1e0 853
854 # strip dir separators from file name
0a54a6e8 855 my $file = $source_dir->file(
856 join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix'
857 );
8a1df391 858
0fc424b7 859 # write file
0fc424b7 860 unless ($exists) {
861 $self->msg('-- dumping ' . $file->stringify, 2);
862 my %ds = $object->get_columns;
863
c040a9b0 864 if($set->{external}) {
865 foreach my $field (keys %{$set->{external}}) {
866 my $key = $ds{$field};
867 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
868 my $args = $set->{external}->{$field}->{args};
869
870 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
871 eval "use $class";
872
873 $ds{external}->{$field} =
874 encode_base64( $class
875 ->backup($key => $args));
876 }
877 }
878
0fc424b7 879 # mess with dates if specified
0566a82d 880 if ($set->{datetime_relative}) {
8a1df391 881 my $formatter= $object->result_source->schema->storage->datetime_parser;
0566a82d 882 unless ($@ || !$formatter) {
883 my $dt;
884 if ($set->{datetime_relative} eq 'today') {
885 $dt = DateTime->today;
886 } else {
887 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
888 }
0fc424b7 889
0566a82d 890 while (my ($col, $value) = each %ds) {
891 my $col_info = $object->result_source->column_info($col);
0fc424b7 892
0566a82d 893 next unless $value
894 && $col_info->{_inflate_info}
017d2ab4 895 && (
896 (uc($col_info->{data_type}) eq 'DATETIME')
897 or (uc($col_info->{data_type}) eq 'DATE')
898 or (uc($col_info->{data_type}) eq 'TIME')
899 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
900 or (uc($col_info->{data_type}) eq 'INTERVAL')
901 );
0fc424b7 902
0566a82d 903 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
904 }
905 } else {
b099fee9 906 warn "datetime_relative not supported for this db driver at the moment";
0fc424b7 907 }
908 }
909
910 # do the actual dumping
911 my $serialized = Dump(\%ds)->Out();
8a1df391 912 $file->openw->print($serialized);
0fc424b7 913 }
914
2ef30e95 915 # don't bother looking at rels unless we are actually planning to dump at least one type
0a54a6e8 916 my ($might_have, $belongs_to, $has_many) = map {
06b7a1cc 917 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
0a54a6e8 918 } qw/might_have belongs_to has_many/;
919
920 return unless $might_have
921 || $belongs_to
922 || $has_many
8a1df391 923 || $set->{fetch};
2ef30e95 924
0fc424b7 925 # dump rels of object
0fc424b7 926 unless ($exists) {
8a1df391 927 foreach my $name (sort $src->relationships) {
928 my $info = $src->relationship_info($name);
929 my $r_source = $src->related_source($name);
0a54a6e8 930 # if belongs_to or might_have with might_have param set or has_many with
931 # has_many param set then
8a1df391 932 if (
0a54a6e8 933 ( $info->{attrs}{accessor} eq 'single' &&
934 (!$info->{attrs}{join_type} || $might_have)
935 )
936 || $info->{attrs}{accessor} eq 'filter'
937 ||
938 ($info->{attrs}{accessor} eq 'multi' && $has_many)
8a1df391 939 ) {
0fc424b7 940 my $related_rs = $object->related_resultset($name);
941 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
942 # these parts of the rule only apply to has_many rels
943 if ($rule && $info->{attrs}{accessor} eq 'multi') {
0a54a6e8 944 $related_rs = $related_rs->search(
945 $rule->{cond},
946 { join => $rule->{join} }
947 ) if ($rule->{cond});
948
949 $related_rs = $related_rs->search(
950 {},
951 { rows => $rule->{quantity} }
952 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
953
954 $related_rs = $related_rs->search(
955 {},
956 { order_by => $rule->{order_by} }
957 ) if ($rule->{order_by});
958
0fc424b7 959 }
0a54a6e8 960 if ($set->{has_many}{quantity} &&
961 $set->{has_many}{quantity} =~ /^\d+$/) {
962 $related_rs = $related_rs->search(
963 {},
964 { rows => $set->{has_many}->{quantity} }
965 );
0fc424b7 966 }
0a54a6e8 967
0fc424b7 968 my %c_params = %{$params};
969 # inherit date param
0a54a6e8 970 my %mock_set = map {
971 $_ => $set->{$_}
972 } grep { $set->{$_} } @inherited_attrs;
973
0fc424b7 974 $c_params{set} = \%mock_set;
0a54a6e8 975 $c_params{set} = merge( $c_params{set}, $rule)
976 if $rule && $rule->{fetch};
977
8a1df391 978 $self->dump_rs($related_rs, \%c_params);
0fc424b7 979 }
980 }
981 }
982
983 return unless $set && $set->{fetch};
984 foreach my $fetch (@{$set->{fetch}}) {
985 # inherit date param
0a54a6e8 986 $fetch->{$_} = $set->{$_} foreach
987 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
0fc424b7 988 my $related_rs = $object->related_resultset($fetch->{rel});
989 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
8a1df391 990
0fc424b7 991 if ($rule) {
992 my $info = $object->result_source->relationship_info($fetch->{rel});
993 if ($info->{attrs}{accessor} eq 'multi') {
994 $fetch = merge( $fetch, $rule );
995 } elsif ($rule->{fetch}) {
996 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
997 }
998 }
8a1df391 999
0a54a6e8 1000 die "relationship $fetch->{rel} does not exist for " . $src->source_name
8a1df391 1001 unless ($related_rs);
1002
0fc424b7 1003 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
0a54a6e8 1004 # if value starts with \ assume it's meant to be passed as a scalar ref
1005 # to dbic. ideally this would substitute deeply
8a1df391 1006 $fetch->{cond} = { map {
1007 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1008 : $fetch->{cond}->{$_}
1009 } keys %{$fetch->{cond}} };
0fc424b7 1010 }
8a1df391 1011
0a54a6e8 1012 $related_rs = $related_rs->search(
1013 $fetch->{cond},
1014 { join => $fetch->{join} }
1015 ) if $fetch->{cond};
1016
1017 $related_rs = $related_rs->search(
1018 {},
1019 { rows => $fetch->{quantity} }
1020 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1021 $related_rs = $related_rs->search(
1022 {},
1023 { order_by => $fetch->{order_by} }
1024 ) if $fetch->{order_by};
8a1df391 1025
1026 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
0fc424b7 1027 }
1028}
1029
384c3f0c 1030sub _generate_schema {
1031 my $self = shift;
1032 my $params = shift || {};
384c3f0c 1033 require DBI;
1034 $self->msg("\ncreating schema");
384c3f0c 1035
c06f7b96 1036 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
9a9a7832 1037 eval "require $schema_class";
1038 die $@ if $@;
1039
4fb695f4 1040 my $pre_schema;
1041 my $connection_details = $params->{connection_details};
8a1df391 1042
aa9f3cc7 1043 $namespace_counter++;
8a1df391 1044
1045 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
aa9f3cc7 1046 Class::C3::Componentised->inject_base( $namespace => $schema_class );
8a1df391 1047
aa9f3cc7 1048 $pre_schema = $namespace->connect(@{$connection_details});
1049 unless( $pre_schema ) {
384c3f0c 1050 return DBIx::Class::Exception->throw('connection details not valid');
1051 }
aa9f3cc7 1052 my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources;
f81264b2 1053 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
4fb695f4 1054 my $dbh = $pre_schema->storage->dbh;
384c3f0c 1055
1056 # clear existing db
1057 $self->msg("- clearing DB of existing tables");
7f25d8f8 1058 $pre_schema->storage->txn_do(sub {
1059 $pre_schema->storage->with_deferred_fk_checks(sub {
1060 foreach my $table (@tables) {
1061 eval {
1062 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1063 };
1064 }
1065 });
9586eb0c 1066 });
384c3f0c 1067
1068 # import new ddl file to db
1069 my $ddl_file = $params->{ddl};
1070 $self->msg("- deploying schema using $ddl_file");
f81264b2 1071 my $data = _read_sql($ddl_file);
1072 foreach (@$data) {
1073 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 1074 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
384c3f0c 1075 }
384c3f0c 1076 $self->msg("- finished importing DDL into DB");
1077
1078 # load schema object from our new DB
b4c67f96 1079 $namespace_counter++;
0a54a6e8 1080 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
b4c67f96 1081 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1082 my $schema = $namespace2->connect(@{$connection_details});
384c3f0c 1083 return $schema;
1084}
1085
f81264b2 1086sub _read_sql {
1087 my $ddl_file = shift;
1088 my $fh;
1089 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1090 my @data = split(/\n/, join('', <$fh>));
1091 @data = grep(!/^--/, @data);
1092 @data = split(/;/, join('', @data));
1093 close($fh);
1094 @data = grep { $_ && $_ !~ /^-- / } @data;
1095 return \@data;
1096}
a5561f96 1097
5cc47846 1098=head2 dump_config_sets
d9b65413 1099
1100Works just like L</dump> but instead of specifying a single json config set
5cc47846 1101located in L</config_dir> we dump each set named in the C<configs> parameter.
d9b65413 1102
1103The parameters are the same as for L</dump> except instead of a C<directory>
1104parameter we have a C<directory_template> which is a coderef expected to return
1105a scalar that is a root directory where we will do the actual dumping. This
1106coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1107example:
1108
1109 $fixture->dump_all_config_sets({
1110 schema => $schema,
5cc47846 1111 configs => [qw/one.json other.json/],
d9b65413 1112 directory_template => sub {
1113 my ($fixture, $params, $set) = @_;
1114 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1115 },
1116 });
1117
1118=cut
1119
5cc47846 1120sub dump_config_sets {
d9b65413 1121 my ($self, $params) = @_;
5cc47846 1122 my $available_config_sets = delete $params->{configs};
d9b65413 1123 my $directory_template = delete $params->{directory_template} ||
1124 DBIx::Class::Exception->throw("'directory_template is required parameter");
1125
5cc47846 1126 for my $set (@$available_config_sets) {
5cc47846 1127 my $localparams = $params;
1128 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1129 $localparams->{config} = $set;
1130 $self->dump($localparams);
745efc60 1131 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
d9b65413 1132 }
1133}
1134
5cc47846 1135=head2 dump_all_config_sets
1136
745efc60 1137 my %local_params = %$params;
1138 my $local_self = bless { %$self }, ref($self);
1139 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1140 $local_params{config} = $set;
1141 $self->dump(\%local_params);
1142
1143
5cc47846 1144Works just like L</dump> but instead of specifying a single json config set
1145located in L</config_dir> we dump each set in turn to the specified directory.
1146
1147The parameters are the same as for L</dump> except instead of a C<directory>
1148parameter we have a C<directory_template> which is a coderef expected to return
1149a scalar that is a root directory where we will do the actual dumping. This
1150coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1151example:
1152
1153 $fixture->dump_all_config_sets({
1154 schema => $schema,
1155 directory_template => sub {
1156 my ($fixture, $params, $set) = @_;
1157 return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set);
1158 },
1159 });
1160
1161=cut
1162
1163sub dump_all_config_sets {
1164 my ($self, $params) = @_;
1165 $self->dump_config_sets({
1166 %$params,
1167 configs=>[$self->available_config_sets],
1168 });
1169}
1170
a5561f96 1171=head2 populate
1172
1173=over 4
1174
1175=item Arguments: \%$attrs
1176
1177=item Return Value: 1
1178
1179=back
1180
8a1df391 1181 $fixtures->populate( {
1182 # directory to look for fixtures in, as specified to dump
1183 directory => '/home/me/app/fixtures',
1184
1185 # DDL to deploy
1186 ddl => '/home/me/app/sql/ddl.sql',
1187
1188 # database to clear, deploy and then populate
1189 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1190
1191 # DDL to deploy after populating records, ie. FK constraints
1192 post_ddl => '/home/me/app/sql/post_ddl.sql',
1193
1194 # use CASCADE option when dropping tables
1195 cascade => 1,
1196
1197 # optional, set to 1 to run ddl but not populate
1198 no_populate => 0,
1199
65a80d4e 1200 # optional, set to 1 to run each fixture through ->create rather than have
1201 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1202 # that effects the value of column(s).
1203 use_create => 0,
1204
8a1df391 1205 # Dont try to clean the database, just populate over whats there. Requires
1206 # schema option. Use this if you want to handle removing old data yourself
1207 # no_deploy => 1
1208 # schema => $schema
1209 } );
a5561f96 1210
9e77162b 1211In this case the database app_dev will be cleared of all tables, then the
1212specified DDL deployed to it, then finally all fixtures found in
1213/home/me/app/fixtures will be added to it. populate will generate its own
1214DBIx::Class schema from the DDL rather than being passed one to use. This is
1215better as custom insert methods are avoided which can to get in the way. In
1216some cases you might not have a DDL, and so this method will eventually allow a
1217$schema object to be passed instead.
a5561f96 1218
9e77162b 1219If needed, you can specify a post_ddl attribute which is a DDL to be applied
1220after all the fixtures have been added to the database. A good use of this
1221option would be to add foreign key constraints since databases like Postgresql
1222cannot disable foreign key checks.
f81264b2 1223
9e77162b 1224If your tables have foreign key constraints you may want to use the cascade
1225attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1226$table CASCADE'.
f81264b2 1227
9e77162b 1228C<directory> is a required attribute.
1229
1230If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1231C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1232user and pass).
1233
1234If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1235attribute containing the connected schema you wish to operate on and set the
1236C<no_deploy> attribute.
a5561f96 1237
1238=cut
1239
384c3f0c 1240sub populate {
1241 my $self = shift;
1242 my ($params) = @_;
0a54a6e8 1243 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1244 unless ref $params eq 'HASH';
1245
1246 DBIx::Class::Exception->throw('directory param not specified')
1247 unless $params->{directory};
384c3f0c 1248
9a9a7832 1249 my $fixture_dir = dir(delete $params->{directory});
0a54a6e8 1250 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1251 unless -d $fixture_dir;
384c3f0c 1252
1253 my $ddl_file;
9e77162b 1254 my $dbh;
1255 my $schema;
384c3f0c 1256 if ($params->{ddl} && $params->{connection_details}) {
9a9a7832 1257 $ddl_file = file(delete $params->{ddl});
384c3f0c 1258 unless (-e $ddl_file) {
1259 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1260 }
1261 unless (ref $params->{connection_details} eq 'ARRAY') {
1262 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1263 }
8a1df391 1264 $schema = $self->_generate_schema({
1265 ddl => $ddl_file,
1266 connection_details => delete $params->{connection_details},
1267 %{$params}
1268 });
9e77162b 1269 } elsif ($params->{schema} && $params->{no_deploy}) {
1270 $schema = $params->{schema};
384c3f0c 1271 } else {
0a54a6e8 1272 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
384c3f0c 1273 }
1274
3ad96388 1275
1276 return 1 if $params->{no_populate};
1277
4fb695f4 1278 $self->msg("\nimporting fixtures");
384c3f0c 1279 my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<);
384c3f0c 1280 my $version_file = file($fixture_dir, '_dumper_version');
c040a9b0 1281 my $config_set_path = file($fixture_dir, '_config_set');
1282 my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1283
1284 my $v = Data::Visitor::Callback->new(
1285 plain_value => sub {
1286 my ($visitor, $data) = @_;
1287 my $subs = {
1288 ENV => sub {
1289 my ( $self, $v ) = @_;
1290 if (! defined($ENV{$v})) {
1291 return "";
1292 } else {
1293 return $ENV{ $v };
1294 }
1295 },
1296 ATTR => sub {
1297 my ($self, $v) = @_;
1298 if(my $attr = $self->config_attrs->{$v}) {
1299 return $attr;
1300 } else {
1301 return "";
1302 }
1303 },
1304 catfile => sub {
1305 my ($self, @args) = @_;
1306 catfile(@args);
1307 },
1308 catdir => sub {
1309 my ($self, @args) = @_;
1310 catdir(@args);
1311 },
1312 };
1313
1314 my $subsre = join( '|', keys %$subs );
1315 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1316
1317 return $_;
1318 }
1319 );
1320
1321 $v->visit( $config_set );
1322
1323
1324 my %sets_by_src;
1325 if($config_set) {
1326 %sets_by_src = map { delete($_->{class}) => $_ }
1327 @{$config_set->{sets}}
1328 }
1329
0a54a6e8 1330# DBIx::Class::Exception->throw('no version file found');
1331# unless -e $version_file;
384c3f0c 1332
1333 if (-e $tmp_fixture_dir) {
1334 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
4fb695f4 1335 $tmp_fixture_dir->rmtree;
384c3f0c 1336 }
1337 $self->msg("- creating temp dir");
51794e1c 1338 $tmp_fixture_dir->mkpath();
0a54a6e8 1339 for ( map { $schema->source($_)->from } $schema->sources) {
1340 my $from_dir = $fixture_dir->subdir($_);
1341 next unless -e $from_dir;
1342 dircopy($from_dir, $tmp_fixture_dir->subdir($_) );
1343 }
9e77162b 1344
1345 unless (-d $tmp_fixture_dir) {
0a54a6e8 1346 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
9e77162b 1347 }
384c3f0c 1348
384c3f0c 1349 my $fixup_visitor;
0a54a6e8 1350 my $formatter = $schema->storage->datetime_parser;
0566a82d 1351 unless ($@ || !$formatter) {
1352 my %callbacks;
1353 if ($params->{datetime_relative_to}) {
1354 $callbacks{'DateTime::Duration'} = sub {
1355 $params->{datetime_relative_to}->clone->add_duration($_);
1356 };
1357 } else {
1358 $callbacks{'DateTime::Duration'} = sub {
1359 $formatter->format_datetime(DateTime->today->add_duration($_))
1360 };
1361 }
1362 $callbacks{object} ||= "visit_ref";
1363 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
384c3f0c 1364 }
1ac1b0d7 1365
7f25d8f8 1366 $schema->storage->txn_do(sub {
1367 $schema->storage->with_deferred_fk_checks(sub {
1368 foreach my $source (sort $schema->sources) {
1369 $self->msg("- adding " . $source);
1370 my $rs = $schema->resultset($source);
1371 my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from );
1372 next unless (-e $source_dir);
1373 my @rows;
1374 while (my $file = $source_dir->next) {
1375 next unless ($file =~ /\.fix$/);
1376 next if $file->is_dir;
1377 my $contents = $file->slurp;
1378 my $HASH1;
1379 eval($contents);
1380 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
c040a9b0 1381 if(my $external = delete $HASH1->{external}) {
1382 my @fields = keys %{$sets_by_src{$source}->{external}};
1383 foreach my $field(@fields) {
1384 my $key = $HASH1->{$field};
1385 my $content = decode_base64 ($external->{$field});
1386 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1387 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1388 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1389 eval "use $class";
1390 $class->restore($key, $content, $args);
1391 }
1392 }
65a80d4e 1393 if ( $params->{use_create} ) {
1394 $rs->create( $HASH1 );
1395 } else {
1396 push(@rows, $HASH1);
1397 }
7f25d8f8 1398 }
1399 $rs->populate(\@rows) if scalar(@rows);
75d9325a 1400
1401 ## Now we need to do some db specific cleanup
1402 ## this probably belongs in a more isolated space. Right now this is
1403 ## to just handle postgresql SERIAL types that use Sequences
1404
1405 my $table = $rs->result_source->name;
1406 for my $column(my @columns = $rs->result_source->columns) {
1407 my $info = $rs->result_source->column_info($column);
1408 if(my $sequence = $info->{sequence}) {
1409 $self->msg("- updating sequence $sequence");
1410 $rs->result_source->storage->dbh_do(sub {
1411 my ($storage, $dbh, @cols) = @_;
1412 $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
1413 my $sth = $dbh->prepare($sql);
1414 my $rv = $sth->execute or die $sth->errstr;
1415 $self->msg("- $sql");
1416 });
1417 }
1418 }
1419
1ac1b0d7 1420 }
7f25d8f8 1421 });
1ac1b0d7 1422 });
8a1df391 1423 $self->do_post_ddl( {
75d9325a 1424 schema=>$schema,
8a1df391 1425 post_ddl=>$params->{post_ddl}
1426 } ) if $params->{post_ddl};
f81264b2 1427
384c3f0c 1428 $self->msg("- fixtures imported");
1429 $self->msg("- cleaning up");
1430 $tmp_fixture_dir->rmtree;
b099fee9 1431 return 1;
384c3f0c 1432}
1433
6a05e381 1434sub do_post_ddl {
1435 my ($self, $params) = @_;
1436
1437 my $schema = $params->{schema};
1438 my $data = _read_sql($params->{post_ddl});
1439 foreach (@$data) {
1440 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1ac1b0d7 1441 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
6a05e381 1442 }
1443 $self->msg("- finished importing post-populate DDL into DB");
1444}
1445
0fc424b7 1446sub msg {
1447 my $self = shift;
1448 my $subject = shift || return;
9a9a7832 1449 my $level = shift || 1;
9a9a7832 1450 return unless $self->debug >= $level;
0fc424b7 1451 if (ref $subject) {
1452 print Dumper($subject);
1453 } else {
1454 print $subject . "\n";
1455 }
1456}
a5561f96 1457
1458=head1 AUTHOR
1459
1460 Luke Saunders <luke@shadowcatsystems.co.uk>
1461
3b4f6e76 1462 Initial development sponsored by and (c) Takkle, Inc. 2007
1463
a5561f96 1464=head1 CONTRIBUTORS
1465
1466 Ash Berlin <ash@shadowcatsystems.co.uk>
8a1df391 1467
a5561f96 1468 Matt S. Trout <mst@shadowcatsystems.co.uk>
8a1df391 1469
fc17c598 1470 Drew Taylor <taylor.andrew.j@gmail.com>
a5561f96 1471
9b7171c7 1472 Frank Switalski <fswitalski@gmail.com>
1473
3b4f6e76 1474=head1 LICENSE
1475
1476 This library is free software under the same license as perl itself
1477
a5561f96 1478=cut
1479
e5963c1b 14801;