re-thought the logic, basically stripped the SQLT logic of anything not needed
[dbsrgits/DBIx-Class-Fixtures.git] / lib / DBIx / Class / Fixtures.pmc
CommitLineData
15c3f13e 1package DBIx::Class::Fixtures;
2
3use strict;
4use warnings;
5
6use DBIx::Class 0.08100;
7use DBIx::Class::Exception;
8use Class::Accessor::Grouped;
9use Config::Any::JSON;
10use Data::Dump::Streamer;
11use Data::Visitor::Callback;
12use Hash::Merge qw( merge );
13use Data::Dumper;
14use Class::C3::Componentised;
15use MIME::Base64;
16use IO::All;
17use File::Temp qw/tempdir/;
18
19use base qw(Class::Accessor::Grouped);
20
21our $namespace_counter = 0;
22
23__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
24 _inherited_attributes debug schema_class dumped_objects config_attrs/);
25
26our $VERSION = '1.001028';
27
28$VERSION = eval $VERSION;
29
30=head1 NAME
31
32DBIx::Class::Fixtures - Dump data and repopulate a database using rules
33
34=head1 SYNOPSIS
35
36 use DBIx::Class::Fixtures;
37
38 ...
39
40 my $fixtures = DBIx::Class::Fixtures->new({
41 config_dir => '/home/me/app/fixture_configs'
42 });
43
44 $fixtures->dump({
45 config => 'set_config.json',
46 schema => $source_dbic_schema,
47 directory => '/home/me/app/fixtures'
48 });
49
50 $fixtures->populate({
51 directory => '/home/me/app/fixtures',
52 ddl => '/home/me/app/sql/ddl.sql',
53 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
54 post_ddl => '/home/me/app/sql/post_ddl.sql',
55 });
56
57=head1 DESCRIPTION
58
59Dump fixtures from source database to filesystem then import to another
60database (with same schema) at any time. Use as a constant dataset for running
61tests against or for populating development databases when impractical to use
62production clones. Describe fixture set using relations and conditions based on
63your DBIx::Class schema.
64
65=head1 DEFINE YOUR FIXTURE SET
66
67Fixture sets are currently defined in .json files which must reside in your
68config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
69describe which data to pull and dump from the source database.
70
71For example:
72
73 {
74 "sets": [
75 {
76 "class": "Artist",
77 "ids": ["1", "3"]
78 },
79 {
80 "class": "Producer",
81 "ids": ["5"],
82 "fetch": [
83 {
84 "rel": "artists",
85 "quantity": "2"
86 }
87 ]
88 }
89 ]
90 }
91
92This will fetch artists with primary keys 1 and 3, the producer with primary
93key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
94rel from Producer to Artist.
95
96The top level attributes are as follows:
97
98=head2 sets
99
100Sets must be an array of hashes, as in the example given above. Each set
101defines a set of objects to be included in the fixtures. For details on valid
102set attributes see L</SET ATTRIBUTES> below.
103
104=head2 rules
105
106Rules place general conditions on classes. For example if whenever an artist
107was dumped you also wanted all of their cds dumped too, then you could use a
108rule to specify this. For example:
109
110 {
111 "sets": [
112 {
113 "class": "Artist",
114 "ids": ["1", "3"]
115 },
116 {
117 "class": "Producer",
118 "ids": ["5"],
119 "fetch": [
120 {
121 "rel": "artists",
122 "quantity": "2"
123 }
124 ]
125 }
126 ],
127 "rules": {
128 "Artist": {
129 "fetch": [ {
130 "rel": "cds",
131 "quantity": "all"
132 } ]
133 }
134 }
135 }
136
137In this case all the cds of artists 1, 3 and all producer 5's artists will be
138dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
139to CD. This is eqivalent to:
140
141 {
142 "sets": [
143 {
144 "class": "Artist",
145 "ids": ["1", "3"],
146 "fetch": [ {
147 "rel": "cds",
148 "quantity": "all"
149 } ]
150 },
151 {
152 "class": "Producer",
153 "ids": ["5"],
154 "fetch": [ {
155 "rel": "artists",
156 "quantity": "2",
157 "fetch": [ {
158 "rel": "cds",
159 "quantity": "all"
160 } ]
161 } ]
162 }
163 ]
164 }
165
166rules must be a hash keyed by class name.
167
168L</RULE ATTRIBUTES>
169
170=head2 includes
171
172To prevent repetition between configs you can include other configs. For
173example:
174
175 {
176 "sets": [ {
177 "class": "Producer",
178 "ids": ["5"]
179 } ],
180 "includes": [
181 { "file": "base.json" }
182 ]
183 }
184
185Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
186which is the name of another config file in the same directory. The original
187config is merged with its includes using L<Hash::Merge>.
188
189=head2 datetime_relative
190
191Only available for MySQL and PostgreSQL at the moment, must be a value that
192DateTime::Format::* can parse. For example:
193
194 {
195 "sets": [ {
196 "class": "RecentItems",
197 "ids": ["9"]
198 } ],
199 "datetime_relative": "2007-10-30 00:00:00"
200 }
201
202This will work when dumping from a MySQL database and will cause any datetime
203fields (where datatype => 'datetime' in the column def of the schema class) to
204be dumped as a DateTime::Duration object relative to the date specified in the
205datetime_relative value. For example if the RecentItem object had a date field
206set to 2007-10-25, then when the fixture is imported the field will be set to 5
207days in the past relative to the current time.
208
209=head2 might_have
210
211Specifies whether to automatically dump might_have relationships. Should be a
212hash with one attribute - fetch. Set fetch to 1 or 0.
213
214 {
215 "might_have": { "fetch": 1 },
216 "sets": [
217 {
218 "class": "Artist",
219 "ids": ["1", "3"]
220 },
221 {
222 "class": "Producer",
223 "ids": ["5"]
224 }
225 ]
226 }
227
228Note: belongs_to rels are automatically dumped whether you like it or not, this
229is to avoid FKs to nowhere when importing. General rules on has_many rels are
230not accepted at this top level, but you can turn them on for individual sets -
231see L</SET ATTRIBUTES>.
232
233=head1 SET ATTRIBUTES
234
235=head2 class
236
237Required attribute. Specifies the DBIx::Class object class you wish to dump.
238
239=head2 ids
240
241Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
242If the id is not in the source db then it just won't get dumped, no warnings or
243death.
244
245=head2 quantity
246
247Must be either an integer or the string 'all'. Specifying an integer will
248effectively set the 'rows' attribute on the resultset clause, specifying 'all'
249will cause the rows attribute to be left off and for all matching rows to be
250dumped. There's no randomising here, it's just the first x rows.
251
252=head2 cond
253
254A hash specifying the conditions dumped objects must match. Essentially this is
255a JSON representation of a DBIx::Class search clause. For example:
256
257 {
258 "sets": [{
259 "class": "Artist",
260 "quantiy": "all",
261 "cond": { "name": "Dave" }
262 }]
263 }
264
265This will dump all artists whose name is 'dave'. Essentially
266$artist_rs->search({ name => 'Dave' })->all.
267
268Sometimes in a search clause it's useful to use scalar refs to do things like:
269
270 $artist_rs->search({ no1_singles => \'> no1_albums' })
271
272This could be specified in the cond hash like so:
273
274 {
275 "sets": [ {
276 "class": "Artist",
277 "quantiy": "all",
278 "cond": { "no1_singles": "\> no1_albums" }
279 } ]
280 }
281
282So if the value starts with a backslash the value is made a scalar ref before
283being passed to search.
284
285=head2 join
286
287An array of relationships to be used in the cond clause.
288
289 {
290 "sets": [ {
291 "class": "Artist",
292 "quantiy": "all",
293 "cond": { "cds.position": { ">": 4 } },
294 "join": ["cds"]
295 } ]
296 }
297
298Fetch all artists who have cds with position greater than 4.
299
300=head2 fetch
301
302Must be an array of hashes. Specifies which rels to also dump. For example:
303
304 {
305 "sets": [ {
306 "class": "Artist",
307 "ids": ["1", "3"],
308 "fetch": [ {
309 "rel": "cds",
310 "quantity": "3",
311 "cond": { "position": "2" }
312 } ]
313 } ]
314 }
315
316Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
317
318Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
319'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
320as in the set attributes. quantity is necessary for has_many relationships, but
321not if using for belongs_to or might_have relationships.
322
323=head2 has_many
324
325Specifies whether to fetch has_many rels for this set. Must be a hash
326containing keys fetch and quantity.
327
328Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
329integer.
330
331Be careful here, dumping has_many rels can lead to a lot of data being dumped.
332
333=head2 might_have
334
335As with has_many but for might_have relationships. Quantity doesn't do anything
336in this case.
337
338This value will be inherited by all fetches in this set. This is not true for
339the has_many attribute.
340
341=head2 external
342
343In some cases your database information might be keys to values in some sort of
344external storage. The classic example is you are using L<DBIx::Class::InflateColumn::FS>
345to store blob information on the filesystem. In this case you may wish the ability
346to backup your external storage in the same way your database data. The L</external>
347attribute lets you specify a handler for this type of issue. For example:
348
349 {
350 "sets": [{
351 "class": "Photo",
352 "quantity": "all",
353 "external": {
354 "file": {
355 "class": "File",
356 "args": {"path":"__ATTR(photo_dir)__"}
357 }
358 }
359 }]
360 }
361
362This would use L<DBIx::Class::Fixtures::External::File> to read from a directory
363where the path to a file is specified by the C<file> field of the C<Photo> source.
364We use the uninflated value of the field so you need to completely handle backup
365and restore. For the common case we provide L<DBIx::Class::Fixtures::External::File>
366and you can create your own custom handlers by placing a '+' in the namespace:
367
368 "class": "+MyApp::Schema::SomeExternalStorage",
369
370Although if possible I'd love to get patches to add some of the other common
371types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
372
373See L<DBIx::Class::Fixtures::External::File> for the external handler interface.
374
375=head1 RULE ATTRIBUTES
376
377=head2 cond
378
379Same as with L</SET ATTRIBUTES>
380
381=head2 fetch
382
383Same as with L</SET ATTRIBUTES>
384
385=head2 join
386
387Same as with L</SET ATTRIBUTES>
388
389=head2 has_many
390
391Same as with L</SET ATTRIBUTES>
392
393=head2 might_have
394
395Same as with L</SET ATTRIBUTES>
396
397=head1 RULE SUBSTITUTIONS
398
399You can provide the following substitution patterns for your rule values. An
400example of this might be:
401
402 {
403 "sets": [{
404 "class": "Photo",
405 "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
406 }]
407 }
408
409=head2 ENV
410
411Provide a value from %ENV
412
413=head2 ATTR
414
415Provide a value from L</config_attrs>
416
417=head2 catfile
418
419Create the path to a file from a list
420
421=head2 catdir
422
423Create the path to a directory from a list
424
425=head1 METHODS
426
427=head2 new
428
429=over 4
430
431=item Arguments: \%$attrs
432
433=item Return Value: $fixture_object
434
435=back
436
437Returns a new DBIx::Class::Fixture object. %attrs can have the following
438parameters:
439
440=over
441
442=item config_dir:
443
444required. must contain a valid path to the directory in which your .json
445configs reside.
446
447=item debug:
448
449determines whether to be verbose
450
451=item ignore_sql_errors:
452
453ignore errors on import of DDL etc
454
455=item config_attrs
456
457A hash of information you can use to do replacements inside your configuration
458sets. For example, if your set looks like:
459
460 {
461 "sets": [ {
462 "class": "Artist",
463 "ids": ["1", "3"],
464 "fetch": [ {
465 "rel": "cds",
466 "quantity": "__ATTR(quantity)__",
467 } ]
468 } ]
469 }
470
471 my $fixtures = DBIx::Class::Fixtures->new( {
472 config_dir => '/home/me/app/fixture_configs'
473 config_attrs => {
474 quantity => 100,
475 },
476 });
477
478You may wish to do this if you want to let whoever runs the dumps have a bit
479more control
480
481=back
482
483 my $fixtures = DBIx::Class::Fixtures->new( {
484 config_dir => '/home/me/app/fixture_configs'
485 } );
486
487=cut
488
489sub new {
490 my $class = shift;
491
492 my ($params) = @_;
493 unless (ref $params eq 'HASH') {
494 return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
495 }
496
497 unless ($params->{config_dir}) {
498 return DBIx::Class::Exception->throw('config_dir param not specified');
499 }
500
501 my $config_dir = io->dir($params->{config_dir});
502 unless (-e $params->{config_dir}) {
503 return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
504 }
505
506 my $self = {
507 config_dir => $config_dir,
508 _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
509 debug => $params->{debug} || 0,
510 ignore_sql_errors => $params->{ignore_sql_errors},
511 dumped_objects => {},
512 use_create => $params->{use_create} || 0,
513 use_find_or_create => $params->{use_find_or_create} || 0,
514 config_attrs => $params->{config_attrs} || {},
515 };
516
517 bless $self, $class;
518
519 return $self;
520}
521
522=head2 available_config_sets
523
524Returns a list of all the config sets found in the L</config_dir>. These will
525be a list of the json based files containing dump rules.
526
527=cut
528
529my @config_sets;
530sub available_config_sets {
531 @config_sets = scalar(@config_sets) ? @config_sets : map {
532 $_->filename;
533 } grep {
534 -f "$_" && $_=~/json$/;
535 } shift->config_dir->all;
536}
537
538=head2 dump
539
540=over 4
541
542=item Arguments: \%$attrs
543
544=item Return Value: 1
545
546=back
547
548 $fixtures->dump({
549 config => 'set_config.json', # config file to use. must be in the config
550 # directory specified in the constructor
551 schema => $source_dbic_schema,
552 directory => '/home/me/app/fixtures' # output directory
553 });
554
555or
556
557 $fixtures->dump({
558 all => 1, # just dump everything that's in the schema
559 schema => $source_dbic_schema,
560 directory => '/home/me/app/fixtures' # output directory
561 });
562
563In this case objects will be dumped to subdirectories in the specified
564directory. For example:
565
566 /home/me/app/fixtures/artist/1.fix
567 /home/me/app/fixtures/artist/3.fix
568 /home/me/app/fixtures/producer/5.fix
569
570schema and directory are required attributes. also, one of config or all must
571be specified.
572
573Lastly, the C<config> parameter can be a Perl HashRef instead of a file name.
574If this form is used your HashRef should conform to the structure rules defined
575for the JSON representations.
576
577=cut
578
579sub dump {
580 my $self = shift;
581
582 my ($params) = @_;
583 unless (ref $params eq 'HASH') {
584 return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
585 }
586
587 foreach my $param (qw/schema directory/) {
588 unless ($params->{$param}) {
589 return DBIx::Class::Exception->throw($param . ' param not specified');
590 }
591 }
592
593 if($params->{excludes} && !$params->{all}) {
594 return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
595 }
596
597 my $schema = $params->{schema};
598 my $config;
599 if ($params->{config}) {
600 $config = ref $params->{config} eq 'HASH' ?
601 $params->{config} :
602 do {
603 #read config
604 my $config_file = io->catfile($self->config_dir, $params->{config});
605 $self->load_config_file("$config_file");
606 };
607 } elsif ($params->{all}) {
608 my %excludes = map {$_=>1} @{$params->{excludes}||[]};
609 $config = {
610 might_have => { fetch => 0 },
611 has_many => { fetch => 0 },
612 belongs_to => { fetch => 0 },
613 sets => [
614 map {
615 { class => $_, quantity => 'all' };
616 } grep {
617 !$excludes{$_}
618 } $schema->sources],
619 };
620 } else {
621 DBIx::Class::Exception->throw('must pass config or set all');
622 }
623
624 my $output_dir = io->dir($params->{directory});
625 unless (-e "$output_dir") {
626 $output_dir->mkpath ||
627 DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
628 }
629
630 $self->msg("generating fixtures");
631 my $tmp_output_dir = io->dir(tempdir);
632
633 if (-e "$tmp_output_dir") {
634 $self->msg("- clearing existing $tmp_output_dir");
635 $tmp_output_dir->rmtree;
636 }
637 $self->msg("- creating $tmp_output_dir");
638 $tmp_output_dir->mkpath;
639
640 # write version file (for the potential benefit of populate)
641 $tmp_output_dir->file('_dumper_version')->print($VERSION);
642
643 # write our current config set
644 $tmp_output_dir->file('_config_set')->print( Dumper $config );
645
646 $config->{rules} ||= {};
647 my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}};
648
649 while ( my ($k,$v) = each %{ $config->{rules} } ) {
650 if ( my $source = eval { $schema->source($k) } ) {
651 $config->{rules}{$source->source_name} = $v;
652 }
653 }
654
655 foreach my $source (@sources) {
656 # apply rule to set if specified
657 my $rule = $config->{rules}->{$source->{class}};
658 $source = merge( $source, $rule ) if ($rule);
659
660 # fetch objects
661 my $rs = $schema->resultset($source->{class});
662
663 if ($source->{cond} and ref $source->{cond} eq 'HASH') {
664 # if value starts with \ assume it's meant to be passed as a scalar ref
665 # to dbic. ideally this would substitute deeply
666 $source->{cond} = {
667 map {
668 $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
669 : $source->{cond}->{$_}
670 } keys %{$source->{cond}}
671 };
672 }
673
674 $rs = $rs->search($source->{cond}, { join => $source->{join} })
675 if $source->{cond};
676
677 $self->msg("- dumping $source->{class}");
678
679 my %source_options = ( set => { %{$config}, %{$source} } );
680 if ($source->{quantity}) {
681 $rs = $rs->search({}, { order_by => $source->{order_by} })
682 if $source->{order_by};
683
684 if ($source->{quantity} =~ /^\d+$/) {
685 $rs = $rs->search({}, { rows => $source->{quantity} });
686 } elsif ($source->{quantity} ne 'all') {
687 DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
688 }
689 }
690 elsif ($source->{ids} && @{$source->{ids}}) {
691 my @ids = @{$source->{ids}};
692 my (@pks) = $rs->result_source->primary_columns;
693 die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
694 $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
695 }
696 else {
697 DBIx::Class::Exception->throw('must specify either quantity or ids');
698 }
699
700 $source_options{set_dir} = $tmp_output_dir;
701 $self->dump_rs($rs, \%source_options );
702 }
703
704 # clear existing output dir
705 foreach my $child ($output_dir->all) {
706 if ($child->is_dir) {
707 next if ("$child" eq "$tmp_output_dir");
708 if (grep { $_ =~ /\.fix/ } $child->all) {
709 $child->rmtree;
710 }
711 } elsif ($child =~ /_dumper_version$/) {
712 $child->unlink;
713 }
714 }
715
716 $self->msg("- moving temp dir to $output_dir");
717 $tmp_output_dir->copy("$output_dir");
718
719 if (-e "$output_dir") {
720 $self->msg("- clearing tmp dir $tmp_output_dir");
721 # delete existing fixture set
722 $tmp_output_dir->rmtree;
723 }
724
725 $self->msg("done");
726
727 return 1;
728}
729
730sub load_config_file {
731 my ($self, $config_file) = @_;
732 DBIx::Class::Exception->throw("config does not exist at $config_file")
733 unless -e "$config_file";
734
735 my $config = Config::Any::JSON->load($config_file);
736
737 #process includes
738 if (my $incs = $config->{includes}) {
739 $self->msg($incs);
740 DBIx::Class::Exception->throw(
741 'includes params of config must be an array ref of hashrefs'
742 ) unless ref $incs eq 'ARRAY';
743
744 foreach my $include_config (@$incs) {
745 DBIx::Class::Exception->throw(
746 'includes params of config must be an array ref of hashrefs'
747 ) unless (ref $include_config eq 'HASH') && $include_config->{file};
748
749 my $include_file = $self->config_dir->file($include_config->{file});
750
751 DBIx::Class::Exception->throw("config does not exist at $include_file")
752 unless -e "$include_file";
753
754 my $include = Config::Any::JSON->load($include_file);
755 $self->msg($include);
756 $config = merge( $config, $include );
757 }
758 delete $config->{includes};
759 }
760
761 # validate config
762 return DBIx::Class::Exception->throw('config has no sets')
763 unless $config && $config->{sets} &&
764 ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
765
766 $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
767 $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
768 $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
769
770 return $config;
771}
772
773sub dump_rs {
774 my ($self, $rs, $params) = @_;
775
776 while (my $row = $rs->next) {
777 $self->dump_object($row, $params);
778 }
779}
780
781sub dump_object {
782 my ($self, $object, $params) = @_;
783 my $set = $params->{set};
784
785 my $v = Data::Visitor::Callback->new(
786 plain_value => sub {
787 my ($visitor, $data) = @_;
788 my $subs = {
789 ENV => sub {
790 my ( $self, $v ) = @_;
791 if (! defined($ENV{$v})) {
792 return "";
793 } else {
794 return $ENV{ $v };
795 }
796 },
797 ATTR => sub {
798 my ($self, $v) = @_;
799 if(my $attr = $self->config_attrs->{$v}) {
800 return $attr;
801 } else {
802 return "";
803 }
804 },
805 catfile => sub {
806 my ($self, @args) = @_;
807 "".io->catfile(@args);
808 },
809 catdir => sub {
810 my ($self, @args) = @_;
811 "".io->catdir(@args);
812 },
813 };
814
815 my $subsre = join( '|', keys %$subs );
816 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
817
818 return $_;
819 }
820 );
821
822 $v->visit( $set );
823
824 die 'no dir passed to dump_object' unless $params->{set_dir};
825 die 'no object passed to dump_object' unless $object;
826
827 my @inherited_attrs = @{$self->_inherited_attributes};
828
829 my @pk_vals = map {
830 $object->get_column($_)
831 } $object->primary_columns;
832
833 my $key = join("\0", @pk_vals);
834
835 my $src = $object->result_source;
836 my $exists = $self->dumped_objects->{$src->name}{$key}++;
837
838
839 # write dir and gen filename
840 my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
841 $source_dir->mkpath(0, 0777);
842
843 # Convert characters not allowed on windows
844 my $file = io->catfile("$source_dir",
845 join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
846 );
847
848 # write file
849 unless ($exists) {
850 $self->msg('-- dumping ' . "$file", 2);
851 my %ds = $object->get_columns;
852
853 if($set->{external}) {
854 foreach my $field (keys %{$set->{external}}) {
855 my $key = $ds{$field};
856 my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
857 my $args = $set->{external}->{$field}->{args};
858
859 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
860 eval "use $class";
861
862 $ds{external}->{$field} =
863 encode_base64( $class
864 ->backup($key => $args),'');
865 }
866 }
867
868 # mess with dates if specified
869 if ($set->{datetime_relative}) {
870 my $formatter= $object->result_source->schema->storage->datetime_parser;
871 unless ($@ || !$formatter) {
872 my $dt;
873 if ($set->{datetime_relative} eq 'today') {
874 $dt = DateTime->today;
875 } else {
876 $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
877 }
878
879 while (my ($col, $value) = each %ds) {
880 my $col_info = $object->result_source->column_info($col);
881
882 next unless $value
883 && $col_info->{_inflate_info}
884 && (
885 (uc($col_info->{data_type}) eq 'DATETIME')
886 or (uc($col_info->{data_type}) eq 'DATE')
887 or (uc($col_info->{data_type}) eq 'TIME')
888 or (uc($col_info->{data_type}) eq 'TIMESTAMP')
889 or (uc($col_info->{data_type}) eq 'INTERVAL')
890 );
891
892 $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
893 }
894 } else {
895 warn "datetime_relative not supported for this db driver at the moment";
896 }
897 }
898
899 # do the actual dumping
900 my $serialized = Dump(\%ds)->Out();
901 $file->print($serialized);
902 }
903
904 # don't bother looking at rels unless we are actually planning to dump at least one type
905 my ($might_have, $belongs_to, $has_many) = map {
906 $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
907 } qw/might_have belongs_to has_many/;
908
909 return unless $might_have
910 || $belongs_to
911 || $has_many
912 || $set->{fetch};
913
914 # dump rels of object
915 unless ($exists) {
916 foreach my $name (sort $src->relationships) {
917 my $info = $src->relationship_info($name);
918 my $r_source = $src->related_source($name);
919 # if belongs_to or might_have with might_have param set or has_many with
920 # has_many param set then
921 if (
922 ( $info->{attrs}{accessor} eq 'single' &&
923 (!$info->{attrs}{join_type} || $might_have)
924 )
925 || $info->{attrs}{accessor} eq 'filter'
926 ||
927 ($info->{attrs}{accessor} eq 'multi' && $has_many)
928 ) {
929 my $related_rs = $object->related_resultset($name);
930 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
931 # these parts of the rule only apply to has_many rels
932 if ($rule && $info->{attrs}{accessor} eq 'multi') {
933 $related_rs = $related_rs->search(
934 $rule->{cond},
935 { join => $rule->{join} }
936 ) if ($rule->{cond});
937
938 $related_rs = $related_rs->search(
939 {},
940 { rows => $rule->{quantity} }
941 ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
942
943 $related_rs = $related_rs->search(
944 {},
945 { order_by => $rule->{order_by} }
946 ) if ($rule->{order_by});
947
948 }
949 if ($set->{has_many}{quantity} &&
950 $set->{has_many}{quantity} =~ /^\d+$/) {
951 $related_rs = $related_rs->search(
952 {},
953 { rows => $set->{has_many}->{quantity} }
954 );
955 }
956
957 my %c_params = %{$params};
958 # inherit date param
959 my %mock_set = map {
960 $_ => $set->{$_}
961 } grep { $set->{$_} } @inherited_attrs;
962
963 $c_params{set} = \%mock_set;
964 $c_params{set} = merge( $c_params{set}, $rule)
965 if $rule && $rule->{fetch};
966
967 $self->dump_rs($related_rs, \%c_params);
968 }
969 }
970 }
971
972 return unless $set && $set->{fetch};
973 foreach my $fetch (@{$set->{fetch}}) {
974 # inherit date param
975 $fetch->{$_} = $set->{$_} foreach
976 grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
977 my $related_rs = $object->related_resultset($fetch->{rel});
978 my $rule = $set->{rules}->{$related_rs->result_source->source_name};
979
980 if ($rule) {
981 my $info = $object->result_source->relationship_info($fetch->{rel});
982 if ($info->{attrs}{accessor} eq 'multi') {
983 $fetch = merge( $fetch, $rule );
984 } elsif ($rule->{fetch}) {
985 $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
986 }
987 }
988
989 die "relationship $fetch->{rel} does not exist for " . $src->source_name
990 unless ($related_rs);
991
992 if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
993 # if value starts with \ assume it's meant to be passed as a scalar ref
994 # to dbic. ideally this would substitute deeply
995 $fetch->{cond} = { map {
996 $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
997 : $fetch->{cond}->{$_}
998 } keys %{$fetch->{cond}} };
999 }
1000
1001 $related_rs = $related_rs->search(
1002 $fetch->{cond},
1003 { join => $fetch->{join} }
1004 ) if $fetch->{cond};
1005
1006 $related_rs = $related_rs->search(
1007 {},
1008 { rows => $fetch->{quantity} }
1009 ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1010 $related_rs = $related_rs->search(
1011 {},
1012 { order_by => $fetch->{order_by} }
1013 ) if $fetch->{order_by};
1014
1015 $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1016 }
1017}
1018
1019sub _generate_schema {
1020 my $self = shift;
1021 my $params = shift || {};
1022 require DBI;
1023 $self->msg("\ncreating schema");
1024
1025 my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1026 eval "require $schema_class";
1027 die $@ if $@;
1028
1029 my $pre_schema;
1030 my $connection_details = $params->{connection_details};
1031
1032 $namespace_counter++;
1033
1034 my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1035 Class::C3::Componentised->inject_base( $namespace => $schema_class );
1036
1037 $pre_schema = $namespace->connect(@{$connection_details});
1038 unless( $pre_schema ) {
1039 return DBIx::Class::Exception->throw('connection details not valid');
1040 }
1041 my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources;
1042 $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1043 my $dbh = $pre_schema->storage->dbh;
1044
1045 # clear existing db
1046 $self->msg("- clearing DB of existing tables");
1047 $pre_schema->storage->txn_do(sub {
1048 $pre_schema->storage->with_deferred_fk_checks(sub {
1049 foreach my $table (@tables) {
1050 eval {
1051 $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1052 };
1053 }
1054 });
1055 });
1056
1057 # import new ddl file to db
1058 my $ddl_file = $params->{ddl};
1059 $self->msg("- deploying schema using $ddl_file");
1060 my $data = _read_sql($ddl_file);
1061 foreach (@$data) {
1062 eval { $dbh->do($_) or warn "SQL was:\n $_"};
1063 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1064 }
1065 $self->msg("- finished importing DDL into DB");
1066
1067 # load schema object from our new DB
1068 $namespace_counter++;
1069 my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1070 Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1071 my $schema = $namespace2->connect(@{$connection_details});
1072 return $schema;
1073}
1074
1075sub _read_sql {
1076 my $ddl_file = shift;
1077 my $fh;
1078 open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1079 my @data = split(/\n/, join('', <$fh>));
1080 @data = grep(!/^--/, @data);
1081 @data = split(/;/, join('', @data));
1082 close($fh);
1083 @data = grep { $_ && $_ !~ /^-- / } @data;
1084 return \@data;
1085}
1086
1087=head2 dump_config_sets
1088
1089Works just like L</dump> but instead of specifying a single json config set
1090located in L</config_dir> we dump each set named in the C<configs> parameter.
1091
1092The parameters are the same as for L</dump> except instead of a C<directory>
1093parameter we have a C<directory_template> which is a coderef expected to return
1094a scalar that is a root directory where we will do the actual dumping. This
1095coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1096example:
1097
1098 $fixture->dump_all_config_sets({
1099 schema => $schema,
1100 configs => [qw/one.json other.json/],
1101 directory_template => sub {
1102 my ($fixture, $params, $set) = @_;
1103 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1104 },
1105 });
1106
1107=cut
1108
1109sub dump_config_sets {
1110 my ($self, $params) = @_;
1111 my $available_config_sets = delete $params->{configs};
1112 my $directory_template = delete $params->{directory_template} ||
1113 DBIx::Class::Exception->throw("'directory_template is required parameter");
1114
1115 for my $set (@$available_config_sets) {
1116 my $localparams = $params;
1117 $localparams->{directory} = $directory_template->($self, $localparams, $set);
1118 $localparams->{config} = $set;
1119 $self->dump($localparams);
1120 $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1121 }
1122}
1123
1124=head2 dump_all_config_sets
1125
1126 my %local_params = %$params;
1127 my $local_self = bless { %$self }, ref($self);
1128 $local_params{directory} = $directory_template->($self, \%local_params, $set);
1129 $local_params{config} = $set;
1130 $self->dump(\%local_params);
1131
1132
1133Works just like L</dump> but instead of specifying a single json config set
1134located in L</config_dir> we dump each set in turn to the specified directory.
1135
1136The parameters are the same as for L</dump> except instead of a C<directory>
1137parameter we have a C<directory_template> which is a coderef expected to return
1138a scalar that is a root directory where we will do the actual dumping. This
1139coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1140example:
1141
1142 $fixture->dump_all_config_sets({
1143 schema => $schema,
1144 directory_template => sub {
1145 my ($fixture, $params, $set) = @_;
1146 return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1147 },
1148 });
1149
1150=cut
1151
1152sub dump_all_config_sets {
1153 my ($self, $params) = @_;
1154 $self->dump_config_sets({
1155 %$params,
1156 configs=>[$self->available_config_sets],
1157 });
1158}
1159
1160=head2 populate
1161
1162=over 4
1163
1164=item Arguments: \%$attrs
1165
1166=item Return Value: 1
1167
1168=back
1169
1170 $fixtures->populate( {
1171 # directory to look for fixtures in, as specified to dump
1172 directory => '/home/me/app/fixtures',
1173
1174 # DDL to deploy
1175 ddl => '/home/me/app/sql/ddl.sql',
1176
1177 # database to clear, deploy and then populate
1178 connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1179
1180 # DDL to deploy after populating records, ie. FK constraints
1181 post_ddl => '/home/me/app/sql/post_ddl.sql',
1182
1183 # use CASCADE option when dropping tables
1184 cascade => 1,
1185
1186 # optional, set to 1 to run ddl but not populate
1187 no_populate => 0,
1188
1189 # optional, set to 1 to run each fixture through ->create rather than have
1190 # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1191 # that effects the value of column(s).
1192 use_create => 0,
1193
1194 # optional, same as use_create except with find_or_create.
1195 # Useful if you are populating a persistent data store.
1196 use_find_or_create => 0,
1197
1198 # Dont try to clean the database, just populate over whats there. Requires
1199 # schema option. Use this if you want to handle removing old data yourself
1200 # no_deploy => 1
1201 # schema => $schema
1202 } );
1203
1204In this case the database app_dev will be cleared of all tables, then the
1205specified DDL deployed to it, then finally all fixtures found in
1206/home/me/app/fixtures will be added to it. populate will generate its own
1207DBIx::Class schema from the DDL rather than being passed one to use. This is
1208better as custom insert methods are avoided which can to get in the way. In
1209some cases you might not have a DDL, and so this method will eventually allow a
1210$schema object to be passed instead.
1211
1212If needed, you can specify a post_ddl attribute which is a DDL to be applied
1213after all the fixtures have been added to the database. A good use of this
1214option would be to add foreign key constraints since databases like Postgresql
1215cannot disable foreign key checks.
1216
1217If your tables have foreign key constraints you may want to use the cascade
1218attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1219$table CASCADE'.
1220
1221C<directory> is a required attribute.
1222
1223If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1224C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN,
1225user and pass).
1226
1227If you wish to deal with cleaning the schema yourself, then pass in a C<schema>
1228attribute containing the connected schema you wish to operate on and set the
1229C<no_deploy> attribute.
1230
1231=cut
1232
1233sub populate {
1234 my $self = shift;
1235 my ($params) = @_;
1236 DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1237 unless ref $params eq 'HASH';
1238
1239 DBIx::Class::Exception->throw('directory param not specified')
1240 unless $params->{directory};
1241
1242 my $fixture_dir = io->dir(delete $params->{directory});
1243 DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1244 unless -d "$fixture_dir";
1245
1246 my $ddl_file;
1247 my $dbh;
1248 my $schema;
1249 if ($params->{ddl} && $params->{connection_details}) {
1250 $ddl_file = io->file(delete $params->{ddl});
1251 unless (-e "$ddl_file") {
1252 return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1253 }
1254 unless (ref $params->{connection_details} eq 'ARRAY') {
1255 return DBIx::Class::Exception->throw('connection details must be an arrayref');
1256 }
1257 $schema = $self->_generate_schema({
1258 ddl => "$ddl_file",
1259 connection_details => delete $params->{connection_details},
1260 %{$params}
1261 });
1262 } elsif ($params->{schema} && $params->{no_deploy}) {
1263 $schema = $params->{schema};
1264 } else {
1265 DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1266 }
1267
1268
1269 return 1 if $params->{no_populate};
1270
1271 $self->msg("\nimporting fixtures");
1272 my $tmp_fixture_dir = io->dir(tempdir());
1273 my $config_set_path = io->file($fixture_dir, '_config_set');
1274 my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1275
1276 my $v = Data::Visitor::Callback->new(
1277 plain_value => sub {
1278 my ($visitor, $data) = @_;
1279 my $subs = {
1280 ENV => sub {
1281 my ( $self, $v ) = @_;
1282 if (! defined($ENV{$v})) {
1283 return "";
1284 } else {
1285 return $ENV{ $v };
1286 }
1287 },
1288 ATTR => sub {
1289 my ($self, $v) = @_;
1290 if(my $attr = $self->config_attrs->{$v}) {
1291 return $attr;
1292 } else {
1293 return "";
1294 }
1295 },
1296 catfile => sub {
1297 my ($self, @args) = @_;
1298 io->catfile(@args);
1299 },
1300 catdir => sub {
1301 my ($self, @args) = @_;
1302 io->catdir(@args);
1303 },
1304 };
1305
1306 my $subsre = join( '|', keys %$subs );
1307 $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1308
1309 return $_;
1310 }
1311 );
1312
1313 $v->visit( $config_set );
1314
1315
1316 my %sets_by_src;
1317 if($config_set) {
1318 %sets_by_src = map { delete($_->{class}) => $_ }
1319 @{$config_set->{sets}}
1320 }
1321
1322 if (-e "$tmp_fixture_dir") {
1323 $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1324 $tmp_fixture_dir->rmtree;
1325 }
1326 $self->msg("- creating temp dir");
1327 $tmp_fixture_dir->mkpath();
1328 for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
1329 my $from_dir = io->catdir($fixture_dir, $_);
1330 next unless -e "$from_dir";
1331 $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
1332 }
1333
1334 unless (-d "$tmp_fixture_dir") {
1335 DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1336 }
1337
1338 my $fixup_visitor;
1339 my $formatter = $schema->storage->datetime_parser;
1340 unless ($@ || !$formatter) {
1341 my %callbacks;
1342 if ($params->{datetime_relative_to}) {
1343 $callbacks{'DateTime::Duration'} = sub {
1344 $params->{datetime_relative_to}->clone->add_duration($_);
1345 };
1346 } else {
1347 $callbacks{'DateTime::Duration'} = sub {
1348 $formatter->format_datetime(DateTime->today->add_duration($_))
1349 };
1350 }
1351 $callbacks{object} ||= "visit_ref";
1352 $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1353 }
1354
1355 $schema->storage->txn_do(sub {
1356 $schema->storage->with_deferred_fk_checks(sub {
89f126ea 1357 my @sorted_source_names = $self->_get_sorted_sources( $schema );
15c3f13e 1358 foreach my $source (@sorted_source_names) {
1359 $self->msg("- adding " . $source);
1360 my $rs = $schema->resultset($source);
1361 my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
1362 next unless (-e "$source_dir");
1363 my @rows;
1364 while (my $file = $source_dir->next) {
1365 next unless ($file =~ /\.fix$/);
1366 next if $file->is_dir;
1367 my $contents = $file->slurp;
1368 my $HASH1;
1369 eval($contents);
1370 $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1371 if(my $external = delete $HASH1->{external}) {
1372 my @fields = keys %{$sets_by_src{$source}->{external}};
1373 foreach my $field(@fields) {
1374 my $key = $HASH1->{$field};
1375 my $content = decode_base64 ($external->{$field});
1376 my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1377 my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1378 $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1379 eval "use $class";
1380 $class->restore($key, $content, $args);
1381 }
1382 }
1383 if ( $params->{use_create} ) {
1384 $rs->create( $HASH1 );
1385 } elsif( $params->{use_find_or_create} ) {
1386 $rs->find_or_create( $HASH1 );
1387 } else {
1388 push(@rows, $HASH1);
1389 }
1390 }
1391 $rs->populate(\@rows) if scalar(@rows);
1392
1393 ## Now we need to do some db specific cleanup
1394 ## this probably belongs in a more isolated space. Right now this is
1395 ## to just handle postgresql SERIAL types that use Sequences
1396
1397 my $table = $rs->result_source->name;
1398 for my $column(my @columns = $rs->result_source->columns) {
1399 my $info = $rs->result_source->column_info($column);
1400 if(my $sequence = $info->{sequence}) {
1401 $self->msg("- updating sequence $sequence");
1402 $rs->result_source->storage->dbh_do(sub {
1403 my ($storage, $dbh, @cols) = @_;
1404 $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));");
1405 my $sth = $dbh->prepare($sql);
1406 my $rv = $sth->execute or die $sth->errstr;
1407 $self->msg("- $sql");
1408 });
1409 }
1410 }
1411
1412 }
1413 });
1414 });
1415 $self->do_post_ddl( {
1416 schema=>$schema,
1417 post_ddl=>$params->{post_ddl}
1418 } ) if $params->{post_ddl};
1419
1420 $self->msg("- fixtures imported");
1421 $self->msg("- cleaning up");
1422 $tmp_fixture_dir->rmtree;
1423 return 1;
1424}
1425
89f126ea 1426# the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
1427sub _get_sorted_sources {
1428 my ( $self, $dbicschema ) = @_;
1429
1430
1431 my %table_monikers = map { $_ => 1 } $dbicschema->sources;
1432
1433 my %tables;
1434 foreach my $moniker (sort keys %table_monikers) {
1435 my $source = $dbicschema->source($moniker);
1436
1437 my $table_name = $source->name;
1438 my @primary = $source->primary_columns;
1439 my @rels = $source->relationships();
1440
1441 my %created_FK_rels;
1442 foreach my $rel (sort @rels) {
1443 my $rel_info = $source->relationship_info($rel);
1444
1445 # Ignore any rel cond that isn't a straight hash
1446 next unless ref $rel_info->{cond} eq 'HASH';
1447
1448 my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
1449
1450 # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
1451 my $fk_constraint;
1452 if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
1453 $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
1454 } elsif ( $rel_info->{attrs}{accessor}
1455 && $rel_info->{attrs}{accessor} eq 'multi' ) {
1456 $fk_constraint = 0;
1457 } else {
1458 $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
1459 }
1460
1461 # Dont add a relation if its not constraining
1462 next unless $fk_constraint;
1463
1464 my $rel_table = $source->related_source($rel)->source_name;
1465 # Make sure we don't create the same relation twice
1466 my $key_test = join("\x00", sort @keys);
1467 next if $created_FK_rels{$rel_table}->{$key_test};
1468
1469 if (scalar(@keys)) {
1470 $created_FK_rels{$rel_table}->{$key_test} = 1;
1471
1472 # calculate dependencies: do not consider deferrable constraints and
1473 # self-references for dependency calculations
1474 if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
1475 $tables{$moniker}{$rel_table}++;
1476 }
1477 }
1478 }
1479 $tables{$moniker} = {} unless exists $tables{$moniker};
1480 }
1481
1482 # resolve entire dep tree
1483 my $dependencies = {
1484 map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
1485 };
1486
1487 # return the sorted result
1488 return sort {
1489 keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
1490 ||
1491 $a cmp $b
1492 } (keys %tables);
1493}
1494
1495sub _resolve_deps {
1496 my ( $question, $answers, $seen ) = @_;
1497 my $ret = {};
1498 $seen ||= {};
1499
1500 my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
1501 $seen{$question} = 1;
1502
1503 for my $dep (keys %{ $answers->{$question} }) {
1504 return {} if $seen->{$dep};
1505 my $subdeps = _resolve_deps( $dep, $answers, \%seen );
1506 ::Dwarn $subdeps if $dep eq 'downloads';
1507 $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
1508 ++$ret->{$dep};
1509 }
1510 return $ret;
1511}
1512
15c3f13e 1513sub do_post_ddl {
1514 my ($self, $params) = @_;
1515
1516 my $schema = $params->{schema};
1517 my $data = _read_sql($params->{post_ddl});
1518 foreach (@$data) {
1519 eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1520 if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1521 }
1522 $self->msg("- finished importing post-populate DDL into DB");
1523}
1524
1525sub msg {
1526 my $self = shift;
1527 my $subject = shift || return;
1528 my $level = shift || 1;
1529 return unless $self->debug >= $level;
1530 if (ref $subject) {
1531 print Dumper($subject);
1532 } else {
1533 print $subject . "\n";
1534 }
1535}
1536
1537# Helper method for ensuring that the name used for a given source
1538# is always the same (This is used to name the fixture directories
1539# for example)
1540
1541sub _name_for_source {
1542 my ($self, $source) = @_;
1543
1544 return ref $source->name ? $source->source_name : $source->name;
1545}
1546
1547=head1 AUTHOR
1548
1549 Luke Saunders <luke@shadowcatsystems.co.uk>
1550
1551 Initial development sponsored by and (c) Takkle, Inc. 2007
1552
1553=head1 CONTRIBUTORS
1554
1555 Ash Berlin <ash@shadowcatsystems.co.uk>
1556
1557 Matt S. Trout <mst@shadowcatsystems.co.uk>
1558
1559 Drew Taylor <taylor.andrew.j@gmail.com>
1560
1561 Frank Switalski <fswitalski@gmail.com>
1562
1563 Chris Akins <chris.hexx@gmail.com>
1564
1565=head1 LICENSE
1566
1567 This library is free software under the same license as perl itself
1568
1569=cut
1570
15711;