Commit | Line | Data |
e5963c1b |
1 | package DBIx::Class::Fixtures; |
2 | |
3 | use strict; |
4 | use warnings; |
5 | |
6 | use DBIx::Class::Exception; |
b099fee9 |
7 | use Class::Accessor::Grouped; |
e5963c1b |
8 | use Path::Class qw(dir file); |
6116de11 |
9 | use File::Slurp; |
0fc424b7 |
10 | use Config::Any::JSON; |
11 | use Data::Dump::Streamer; |
4fb695f4 |
12 | use Data::Visitor::Callback; |
0fc424b7 |
13 | use File::Path; |
4fb695f4 |
14 | use File::Copy::Recursive qw/dircopy/; |
6116de11 |
15 | use File::Copy qw/move/; |
0fc424b7 |
16 | use Hash::Merge qw( merge ); |
e5963c1b |
17 | use Data::Dumper; |
aa9f3cc7 |
18 | use Class::C3::Componentised; |
e5963c1b |
19 | |
b099fee9 |
20 | use base qw(Class::Accessor::Grouped); |
e5963c1b |
21 | |
aa9f3cc7 |
22 | our $namespace_counter = 0; |
0566a82d |
23 | |
b099fee9 |
24 | __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir _inherited_attributes debug schema_class/); |
e5963c1b |
25 | |
26 | =head1 VERSION |
27 | |
1f671acf |
28 | Version 1.001000 |
e5963c1b |
29 | |
30 | =cut |
31 | |
1f671acf |
32 | our $VERSION = '1.001000'; |
e5963c1b |
33 | |
34 | =head1 NAME |
35 | |
9f96b203 |
36 | DBIx::Class::Fixtures |
37 | |
e5963c1b |
38 | =head1 SYNOPSIS |
39 | |
40 | use DBIx::Class::Fixtures; |
41 | |
42 | ... |
43 | |
44 | my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' }); |
45 | |
46 | $fixtures->dump({ |
47 | config => 'set_config.json', |
48 | schema => $source_dbic_schema, |
49 | directory => '/home/me/app/fixtures' |
50 | }); |
51 | |
52 | $fixtures->populate({ |
53 | directory => '/home/me/app/fixtures', |
54 | ddl => '/home/me/app/sql/ddl.sql', |
f81264b2 |
55 | connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], |
56 | post_ddl => '/home/me/app/sql/post_ddl.sql', |
e5963c1b |
57 | }); |
58 | |
59 | =head1 DESCRIPTION |
60 | |
b099fee9 |
61 | Dump fixtures from source database to filesystem then import to another database (with same schema) |
62 | at any time. Use as a constant dataset for running tests against or for populating development databases |
63 | when impractical to use production clones. Describe fixture set using relations and conditions based |
64 | on your DBIx::Class schema. |
a5561f96 |
65 | |
66 | =head1 DEFINE YOUR FIXTURE SET |
67 | |
b099fee9 |
68 | Fixture sets are currently defined in .json files which must reside in your config_dir |
69 | (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They describe which data to pull and dump |
70 | from the source database. |
a5561f96 |
71 | |
72 | For example: |
73 | |
74 | { |
75 | sets: [{ |
76 | class: 'Artist', |
77 | ids: ['1', '3'] |
78 | }, { |
79 | class: 'Producer', |
80 | ids: ['5'], |
81 | fetch: [{ |
82 | rel: 'artists', |
83 | quantity: '2' |
84 | }] |
85 | }] |
86 | } |
e5963c1b |
87 | |
b099fee9 |
88 | This will fetch artists with primary keys 1 and 3, the producer with primary key 5 and two of producer 5's |
89 | artists where 'artists' is a has_many DBIx::Class rel from Producer to Artist. |
a5561f96 |
90 | |
95566320 |
91 | The top level attributes are as follows: |
92 | |
a5561f96 |
93 | =head2 sets |
94 | |
b099fee9 |
95 | Sets must be an array of hashes, as in the example given above. Each set defines a set of objects to be |
96 | included in the fixtures. For details on valid set attributes see L</SET ATTRIBUTES> below. |
a5561f96 |
97 | |
98 | =head2 rules |
99 | |
b099fee9 |
100 | Rules place general conditions on classes. For example if whenever an artist was dumped you also wanted all |
101 | of their cds dumped too, then you could use a rule to specify this. For example: |
a5561f96 |
102 | |
103 | { |
104 | sets: [{ |
105 | class: 'Artist', |
106 | ids: ['1', '3'] |
107 | }, { |
108 | class: 'Producer', |
109 | ids: ['5'], |
110 | fetch: [{ |
111 | rel: 'artists', |
112 | quantity: '2' |
113 | }] |
114 | }], |
115 | rules: { |
116 | Artist: { |
117 | fetch: [{ |
118 | rel: 'cds', |
119 | quantity: 'all' |
120 | }] |
121 | } |
122 | } |
123 | } |
6116de11 |
124 | |
b099fee9 |
125 | In this case all the cds of artists 1, 3 and all producer 5's artists will be dumped as well. Note that 'cds' is a |
126 | has_many DBIx::Class relation from Artist to CD. This is eqivalent to: |
a5561f96 |
127 | |
128 | { |
129 | sets: [{ |
130 | class: 'Artist', |
131 | ids: ['1', '3'], |
132 | fetch: [{ |
133 | rel: 'cds', |
134 | quantity: 'all' |
135 | }] |
136 | }, { |
137 | class: 'Producer', |
138 | ids: ['5'], |
139 | fetch: [{ |
140 | rel: 'artists', |
141 | quantity: '2', |
142 | fetch: [{ |
143 | rel: 'cds', |
144 | quantity: 'all' |
145 | }] |
146 | }] |
147 | }] |
148 | } |
149 | |
150 | rules must be a hash keyed by class name. |
151 | |
95566320 |
152 | L</RULE ATTRIBUTES> |
153 | |
96f2cd20 |
154 | =head2 includes |
155 | |
156 | To prevent repetition between configs you can include other configs. For example: |
157 | |
158 | { |
159 | sets: [{ |
160 | class: 'Producer', |
161 | ids: ['5'] |
162 | }], |
163 | includes: [{ |
164 | file: 'base.json' |
165 | }] |
166 | } |
167 | |
168 | Includes must be an arrayref of hashrefs where the hashrefs have key 'file' which is the name of another config |
169 | file in the same directory. The original config is merged with its includes using Hash::Merge. |
170 | |
95566320 |
171 | =head2 datetime_relative |
172 | |
b099fee9 |
173 | Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::* |
174 | can parse. For example: |
95566320 |
175 | |
176 | { |
177 | sets: [{ |
178 | class: 'RecentItems', |
179 | ids: ['9'] |
180 | }], |
181 | datetime_relative : "2007-10-30 00:00:00" |
182 | } |
183 | |
b099fee9 |
184 | This will work when dumping from a MySQL database and will cause any datetime fields (where datatype => 'datetime' |
185 | in the column def of the schema class) to be dumped as a DateTime::Duration object relative to the date specified in |
186 | the datetime_relative value. For example if the RecentItem object had a date field set to 2007-10-25, then when the |
187 | fixture is imported the field will be set to 5 days in the past relative to the current time. |
95566320 |
188 | |
a5561f96 |
189 | =head2 might_have |
190 | |
191 | Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0. |
192 | |
193 | { |
c40935c5 |
194 | might_have: { |
a5561f96 |
195 | fetch: 1 |
196 | }, |
197 | sets: [{ |
198 | class: 'Artist', |
199 | ids: ['1', '3'] |
200 | }, { |
201 | class: 'Producer', |
202 | ids: ['5'] |
203 | }] |
204 | } |
205 | |
b099fee9 |
206 | Note: belongs_to rels are automatically dumped whether you like it or not, this is to avoid FKs to nowhere when importing. |
207 | General rules on has_many rels are not accepted at this top level, but you can turn them on for individual |
208 | sets - see L</SET ATTRIBUTES>. |
a5561f96 |
209 | |
210 | =head1 SET ATTRIBUTES |
211 | |
212 | =head2 class |
213 | |
214 | Required attribute. Specifies the DBIx::Class object class you wish to dump. |
215 | |
216 | =head2 ids |
217 | |
b099fee9 |
218 | Array of primary key ids to fetch, basically causing an $rs->find($_) for each. If the id is not in the source db then it |
219 | just won't get dumped, no warnings or death. |
a5561f96 |
220 | |
221 | =head2 quantity |
222 | |
b099fee9 |
223 | Must be either an integer or the string 'all'. Specifying an integer will effectively set the 'rows' attribute on the resultset clause, |
224 | specifying 'all' will cause the rows attribute to be left off and for all matching rows to be dumped. There's no randomising |
225 | here, it's just the first x rows. |
a5561f96 |
226 | |
227 | =head2 cond |
228 | |
229 | A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example: |
230 | |
231 | { |
232 | sets: [{ |
233 | class: 'Artist', |
234 | quantiy: 'all', |
235 | cond: { name: 'Dave' } |
236 | }] |
237 | } |
238 | |
239 | This will dump all artists whose name is 'dave'. Essentially $artist_rs->search({ name => 'Dave' })->all. |
240 | |
241 | Sometimes in a search clause it's useful to use scalar refs to do things like: |
242 | |
243 | $artist_rs->search({ no1_singles => \'> no1_albums' }) |
244 | |
245 | This could be specified in the cond hash like so: |
246 | |
247 | { |
248 | sets: [{ |
249 | class: 'Artist', |
250 | quantiy: 'all', |
251 | cond: { no1_singles: '\> no1_albums' } |
252 | }] |
253 | } |
254 | |
255 | So if the value starts with a backslash the value is made a scalar ref before being passed to search. |
256 | |
257 | =head2 join |
258 | |
259 | An array of relationships to be used in the cond clause. |
260 | |
261 | { |
262 | sets: [{ |
263 | class: 'Artist', |
264 | quantiy: 'all', |
265 | cond: { 'cds.position': { '>': 4 } }, |
266 | join: ['cds'] |
267 | }] |
268 | } |
269 | |
270 | Fetch all artists who have cds with position greater than 4. |
271 | |
272 | =head2 fetch |
273 | |
274 | Must be an array of hashes. Specifies which rels to also dump. For example: |
275 | |
276 | { |
277 | sets: [{ |
278 | class: 'Artist', |
279 | ids: ['1', '3'], |
280 | fetch: [{ |
281 | rel: 'cds', |
282 | quantity: '3', |
283 | cond: { position: '2' } |
284 | }] |
285 | }] |
286 | } |
287 | |
288 | Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2. |
289 | |
b099fee9 |
290 | Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and 'join'. rel is the name of the DBIx::Class |
291 | rel to follow, the rest are the same as in the set attributes. quantity is necessary for has_many relationships, |
292 | but not if using for belongs_to or might_have relationships. |
a5561f96 |
293 | |
294 | =head2 has_many |
295 | |
296 | Specifies whether to fetch has_many rels for this set. Must be a hash containing keys fetch and quantity. |
297 | |
298 | Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an integer. |
299 | |
95566320 |
300 | Be careful here, dumping has_many rels can lead to a lot of data being dumped. |
301 | |
a5561f96 |
302 | =head2 might_have |
303 | |
304 | As with has_many but for might_have relationships. Quantity doesn't do anything in this case. |
305 | |
306 | This value will be inherited by all fetches in this set. This is not true for the has_many attribute. |
307 | |
308 | =head1 RULE ATTRIBUTES |
309 | |
310 | =head2 cond |
311 | |
312 | Same as with L</SET ATTRIBUTES> |
313 | |
314 | =head2 fetch |
315 | |
316 | Same as with L</SET ATTRIBUTES> |
317 | |
318 | =head2 join |
319 | |
320 | Same as with L</SET ATTRIBUTES> |
321 | |
322 | =head2 has_many |
323 | |
324 | Same as with L</SET ATTRIBUTES> |
325 | |
326 | =head2 might_have |
327 | |
328 | Same as with L</SET ATTRIBUTES> |
e5963c1b |
329 | |
0fc424b7 |
330 | =head1 METHODS |
331 | |
332 | =head2 new |
e5963c1b |
333 | |
a5561f96 |
334 | =over 4 |
335 | |
336 | =item Arguments: \%$attrs |
337 | |
338 | =item Return Value: $fixture_object |
339 | |
340 | =back |
341 | |
1ac1b0d7 |
342 | Returns a new DBIx::Class::Fixture object. %attrs can have the following parameters: |
343 | |
344 | - config_dir: required. must contain a valid path to the directory in which your .json configs reside. |
345 | - debug: determines whether to be verbose |
346 | - ignore_sql_errors: ignore errors on import of DDL etc |
347 | |
a5561f96 |
348 | |
349 | my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' }); |
350 | |
0fc424b7 |
351 | =cut |
e5963c1b |
352 | |
353 | sub new { |
354 | my $class = shift; |
355 | |
356 | my ($params) = @_; |
357 | unless (ref $params eq 'HASH') { |
358 | return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref'); |
359 | } |
360 | |
361 | unless ($params->{config_dir}) { |
362 | return DBIx::Class::Exception->throw('config_dir param not specified'); |
363 | } |
364 | |
365 | my $config_dir = dir($params->{config_dir}); |
366 | unless (-e $params->{config_dir}) { |
367 | return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist'); |
368 | } |
369 | |
370 | my $self = { |
0fc424b7 |
371 | config_dir => $config_dir, |
372 | _inherited_attributes => [qw/datetime_relative might_have rules/], |
da25ed7c |
373 | debug => $params->{debug} || 0, |
1ac1b0d7 |
374 | ignore_sql_errors => $params->{ignore_sql_errors} |
e5963c1b |
375 | }; |
376 | |
377 | bless $self, $class; |
378 | |
379 | return $self; |
380 | } |
381 | |
0fc424b7 |
382 | =head2 dump |
383 | |
a5561f96 |
384 | =over 4 |
385 | |
386 | =item Arguments: \%$attrs |
387 | |
388 | =item Return Value: 1 |
389 | |
390 | =back |
391 | |
392 | $fixtures->dump({ |
393 | config => 'set_config.json', # config file to use. must be in the config directory specified in the constructor |
394 | schema => $source_dbic_schema, |
395 | directory => '/home/me/app/fixtures' # output directory |
396 | }); |
397 | |
2ef30e95 |
398 | or |
399 | |
400 | $fixtures->dump({ |
401 | all => 1, # just dump everything that's in the schema |
402 | schema => $source_dbic_schema, |
403 | directory => '/home/me/app/fixtures' # output directory |
404 | }); |
405 | |
a5561f96 |
406 | In this case objects will be dumped to subdirectories in the specified directory. For example: |
407 | |
408 | /home/me/app/fixtures/artist/1.fix |
409 | /home/me/app/fixtures/artist/3.fix |
410 | /home/me/app/fixtures/producer/5.fix |
411 | |
2ef30e95 |
412 | schema and directory are required attributes. also, one of config or all must be specified. |
a5561f96 |
413 | |
0fc424b7 |
414 | =cut |
415 | |
416 | sub dump { |
417 | my $self = shift; |
418 | |
419 | my ($params) = @_; |
420 | unless (ref $params eq 'HASH') { |
421 | return DBIx::Class::Exception->throw('first arg to dump must be hash ref'); |
422 | } |
423 | |
2ef30e95 |
424 | foreach my $param (qw/schema directory/) { |
0fc424b7 |
425 | unless ($params->{$param}) { |
426 | return DBIx::Class::Exception->throw($param . ' param not specified'); |
427 | } |
428 | } |
429 | |
2ef30e95 |
430 | my $schema = $params->{schema}; |
431 | my $config_file; |
432 | my $config; |
433 | if ($params->{config}) { |
96f2cd20 |
434 | #read config |
2ef30e95 |
435 | $config_file = file($self->config_dir, $params->{config}); |
436 | unless (-e $config_file) { |
437 | return DBIx::Class::Exception->throw('config does not exist at ' . $config_file); |
438 | } |
2ef30e95 |
439 | $config = Config::Any::JSON->load($config_file); |
96f2cd20 |
440 | |
441 | #process includes |
442 | if ($config->{includes}) { |
443 | $self->msg($config->{includes}); |
444 | unless (ref $config->{includes} eq 'ARRAY') { |
445 | return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs'); |
446 | } |
447 | foreach my $include_config (@{$config->{includes}}) { |
448 | unless ((ref $include_config eq 'HASH') && $include_config->{file}) { |
449 | return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs'); |
450 | } |
451 | |
452 | my $include_file = file($self->config_dir, $include_config->{file}); |
453 | unless (-e $include_file) { |
454 | return DBIx::Class::Exception->throw('config does not exist at ' . $include_file); |
455 | } |
456 | my $include = Config::Any::JSON->load($include_file); |
457 | $self->msg($include); |
458 | $config = merge( $config, $include ); |
459 | } |
460 | delete $config->{includes}; |
461 | } |
462 | |
463 | # validate config |
2ef30e95 |
464 | unless ($config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar(@{$config->{sets}})) { |
465 | return DBIx::Class::Exception->throw('config has no sets'); |
466 | } |
96f2cd20 |
467 | |
2ef30e95 |
468 | $config->{might_have} = { fetch => 0 } unless (exists $config->{might_have}); |
469 | $config->{has_many} = { fetch => 0 } unless (exists $config->{has_many}); |
470 | $config->{belongs_to} = { fetch => 1 } unless (exists $config->{belongs_to}); |
471 | } elsif ($params->{all}) { |
472 | $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] }; |
473 | print Dumper($config); |
474 | } else { |
475 | return DBIx::Class::Exception->throw('must pass config or set all'); |
0fc424b7 |
476 | } |
477 | |
478 | my $output_dir = dir($params->{directory}); |
479 | unless (-e $output_dir) { |
d85d888e |
480 | $output_dir->mkpath || |
2ef30e95 |
481 | return DBIx::Class::Exception->throw('output directory does not exist at ' . $output_dir); |
0fc424b7 |
482 | } |
483 | |
9f96b203 |
484 | $self->msg("generating fixtures"); |
f251ab7e |
485 | my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); |
0fc424b7 |
486 | |
6116de11 |
487 | if (-e $tmp_output_dir) { |
0fc424b7 |
488 | $self->msg("- clearing existing $tmp_output_dir"); |
6116de11 |
489 | $tmp_output_dir->rmtree; |
0fc424b7 |
490 | } |
6116de11 |
491 | $self->msg("- creating $tmp_output_dir"); |
492 | $tmp_output_dir->mkpath; |
0fc424b7 |
493 | |
494 | # write version file (for the potential benefit of populate) |
495 | my $version_file = file($tmp_output_dir, '_dumper_version'); |
496 | write_file($version_file->stringify, $VERSION); |
497 | |
498 | $config->{rules} ||= {}; |
499 | my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; |
500 | my %options = ( is_root => 1 ); |
d3ef0865 |
501 | $self->{queue} = []; |
0fc424b7 |
502 | foreach my $source (@sources) { |
503 | # apply rule to set if specified |
504 | my $rule = $config->{rules}->{$source->{class}}; |
505 | $source = merge( $source, $rule ) if ($rule); |
506 | |
507 | # fetch objects |
2ef30e95 |
508 | my $rs = $schema->resultset($source->{class}); |
c40935c5 |
509 | |
510 | if ($source->{cond} and ref $source->{cond} eq 'HASH') { |
511 | # if value starts with / assume it's meant to be passed as a scalar ref to dbic |
512 | # ideally this would substitute deeply |
513 | $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} }; |
514 | } |
515 | |
2ef30e95 |
516 | $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond}); |
0fc424b7 |
517 | $self->msg("- dumping $source->{class}"); |
518 | my @objects; |
519 | my %source_options = ( set => { %{$config}, %{$source} } ); |
520 | if ($source->{quantity}) { |
521 | $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by}); |
522 | if ($source->{quantity} eq 'all') { |
523 | push (@objects, $rs->all); |
524 | } elsif ($source->{quantity} =~ /^\d+$/) { |
525 | push (@objects, $rs->search({}, { rows => $source->{quantity} })); |
526 | } else { |
527 | DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity}); |
528 | } |
529 | } |
530 | if ($source->{ids}) { |
531 | my @ids = @{$source->{ids}}; |
532 | my @id_objects = grep { $_ } map { $rs->find($_) } @ids; |
533 | push (@objects, @id_objects); |
534 | } |
535 | unless ($source->{quantity} || $source->{ids}) { |
536 | DBIx::Class::Exception->throw('must specify either quantity or ids'); |
537 | } |
538 | |
539 | # dump objects |
540 | foreach my $object (@objects) { |
541 | $source_options{set_dir} = $tmp_output_dir; |
542 | $self->dump_object($object, { %options, %source_options } ); |
543 | next; |
544 | } |
545 | } |
546 | |
d3ef0865 |
547 | while (my $entry = shift @{$self->{queue}}) { |
548 | $self->dump_object(@$entry); |
549 | } |
550 | |
da25ed7c |
551 | # clear existing output dir |
552 | foreach my $child ($output_dir->children) { |
553 | if ($child->is_dir) { |
554 | next if ($child eq $tmp_output_dir); |
555 | if (grep { $_ =~ /\.fix/ } $child->children) { |
556 | $child->rmtree; |
557 | } |
558 | } elsif ($child =~ /_dumper_version$/) { |
559 | $child->remove; |
560 | } |
0fc424b7 |
561 | } |
562 | |
563 | $self->msg("- moving temp dir to $output_dir"); |
6116de11 |
564 | move($_, dir($output_dir, $_->relative($_->parent)->stringify)) for $tmp_output_dir->children; |
0fc424b7 |
565 | if (-e $output_dir) { |
566 | $self->msg("- clearing tmp dir $tmp_output_dir"); |
567 | # delete existing fixture set |
568 | $tmp_output_dir->remove; |
569 | } |
570 | |
571 | $self->msg("done"); |
572 | |
573 | return 1; |
574 | } |
575 | |
576 | sub dump_object { |
577 | my ($self, $object, $params, $rr_info) = @_; |
578 | my $set = $params->{set}; |
579 | die 'no dir passed to dump_object' unless $params->{set_dir}; |
580 | die 'no object passed to dump_object' unless $object; |
581 | |
582 | my @inherited_attrs = @{$self->_inherited_attributes}; |
583 | |
584 | # write dir and gen filename |
585 | my $source_dir = dir($params->{set_dir}, lc($object->result_source->from)); |
586 | mkdir($source_dir->stringify, 0777); |
5f3da1e0 |
587 | |
588 | # strip dir separators from file name |
589 | my $file = file($source_dir, join('-', map { |
590 | ( my $a = $object->get_column($_) ) =~ s|[/\\]|_|g; $a; |
591 | } sort $object->primary_columns) . '.fix'); |
0fc424b7 |
592 | |
593 | # write file |
594 | my $exists = (-e $file->stringify) ? 1 : 0; |
595 | unless ($exists) { |
596 | $self->msg('-- dumping ' . $file->stringify, 2); |
597 | my %ds = $object->get_columns; |
598 | |
b099fee9 |
599 | my $formatter= $object->result_source->schema->storage->datetime_parser; |
0fc424b7 |
600 | # mess with dates if specified |
0566a82d |
601 | if ($set->{datetime_relative}) { |
602 | unless ($@ || !$formatter) { |
603 | my $dt; |
604 | if ($set->{datetime_relative} eq 'today') { |
605 | $dt = DateTime->today; |
606 | } else { |
607 | $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@); |
608 | } |
0fc424b7 |
609 | |
0566a82d |
610 | while (my ($col, $value) = each %ds) { |
611 | my $col_info = $object->result_source->column_info($col); |
0fc424b7 |
612 | |
0566a82d |
613 | next unless $value |
614 | && $col_info->{_inflate_info} |
615 | && uc($col_info->{data_type}) eq 'DATETIME'; |
0fc424b7 |
616 | |
0566a82d |
617 | $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); |
618 | } |
619 | } else { |
b099fee9 |
620 | warn "datetime_relative not supported for this db driver at the moment"; |
0fc424b7 |
621 | } |
622 | } |
623 | |
624 | # do the actual dumping |
625 | my $serialized = Dump(\%ds)->Out(); |
626 | write_file($file->stringify, $serialized); |
627 | my $mode = 0777; chmod $mode, $file->stringify; |
628 | } |
629 | |
2ef30e95 |
630 | # don't bother looking at rels unless we are actually planning to dump at least one type |
631 | return unless ($set->{might_have}->{fetch} || $set->{belongs_to}->{fetch} || $set->{has_many}->{fetch} || $set->{fetch}); |
632 | |
0fc424b7 |
633 | # dump rels of object |
634 | my $s = $object->result_source; |
635 | unless ($exists) { |
636 | foreach my $name (sort $s->relationships) { |
637 | my $info = $s->relationship_info($name); |
638 | my $r_source = $s->related_source($name); |
639 | # if belongs_to or might_have with might_have param set or has_many with has_many param set then |
640 | if (($info->{attrs}{accessor} eq 'single' && (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))) || $info->{attrs}{accessor} eq 'filter' || ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))) { |
641 | my $related_rs = $object->related_resultset($name); |
642 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
643 | # these parts of the rule only apply to has_many rels |
644 | if ($rule && $info->{attrs}{accessor} eq 'multi') { |
645 | $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond}); |
646 | $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all'); |
647 | $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by}); |
648 | } |
649 | if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) { |
650 | $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} }); |
651 | } |
652 | my %c_params = %{$params}; |
653 | # inherit date param |
654 | my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs; |
655 | $c_params{set} = \%mock_set; |
656 | # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch}); |
657 | $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch}); |
658 | # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch}); |
5eab44a9 |
659 | $self->dump_object($_, \%c_params) foreach $related_rs->all; |
0fc424b7 |
660 | } |
661 | } |
662 | } |
663 | |
664 | return unless $set && $set->{fetch}; |
665 | foreach my $fetch (@{$set->{fetch}}) { |
666 | # inherit date param |
667 | $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs; |
668 | my $related_rs = $object->related_resultset($fetch->{rel}); |
669 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
670 | if ($rule) { |
671 | my $info = $object->result_source->relationship_info($fetch->{rel}); |
672 | if ($info->{attrs}{accessor} eq 'multi') { |
673 | $fetch = merge( $fetch, $rule ); |
674 | } elsif ($rule->{fetch}) { |
675 | $fetch = merge( $fetch, { fetch => $rule->{fetch} } ); |
676 | } |
677 | } |
678 | die "relationship " . $fetch->{rel} . " does not exist for " . $s->source_name unless ($related_rs); |
679 | if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') { |
680 | # if value starts with / assume it's meant to be passed as a scalar ref to dbic |
681 | # ideally this would substitute deeply |
682 | $fetch->{cond} = { map { $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} }; |
683 | } |
684 | $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} }) if ($fetch->{cond}); |
685 | $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} }) if ($fetch->{quantity} && $fetch->{quantity} ne 'all'); |
686 | $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} }) if ($fetch->{order_by}); |
5eab44a9 |
687 | $self->dump_object($_, { %{$params}, set => $fetch }) foreach $related_rs->all; |
0fc424b7 |
688 | } |
689 | } |
690 | |
384c3f0c |
691 | sub _generate_schema { |
692 | my $self = shift; |
693 | my $params = shift || {}; |
384c3f0c |
694 | require DBI; |
695 | $self->msg("\ncreating schema"); |
696 | # die 'must pass version param to generate_schema_from_ddl' unless $params->{version}; |
697 | |
c06f7b96 |
698 | my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema"; |
9a9a7832 |
699 | eval "require $schema_class"; |
700 | die $@ if $@; |
701 | |
4fb695f4 |
702 | my $pre_schema; |
703 | my $connection_details = $params->{connection_details}; |
aa9f3cc7 |
704 | $namespace_counter++; |
705 | my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter; |
706 | Class::C3::Componentised->inject_base( $namespace => $schema_class ); |
707 | $pre_schema = $namespace->connect(@{$connection_details}); |
708 | unless( $pre_schema ) { |
384c3f0c |
709 | return DBIx::Class::Exception->throw('connection details not valid'); |
710 | } |
aa9f3cc7 |
711 | my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources; |
f81264b2 |
712 | $self->msg("Tables to drop: [". join(', ', sort @tables) . "]"); |
4fb695f4 |
713 | my $dbh = $pre_schema->storage->dbh; |
384c3f0c |
714 | |
715 | # clear existing db |
716 | $self->msg("- clearing DB of existing tables"); |
4fb695f4 |
717 | eval { $dbh->do('SET foreign_key_checks=0') }; |
f81264b2 |
718 | foreach my $table (@tables) { |
719 | eval { $dbh->do('drop table ' . $table . ($params->{cascade} ? ' cascade' : '') ) }; |
720 | } |
384c3f0c |
721 | |
722 | # import new ddl file to db |
723 | my $ddl_file = $params->{ddl}; |
724 | $self->msg("- deploying schema using $ddl_file"); |
f81264b2 |
725 | my $data = _read_sql($ddl_file); |
726 | foreach (@$data) { |
727 | eval { $dbh->do($_) or warn "SQL was:\n $_"}; |
1ac1b0d7 |
728 | if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } |
384c3f0c |
729 | } |
384c3f0c |
730 | $self->msg("- finished importing DDL into DB"); |
731 | |
732 | # load schema object from our new DB |
b4c67f96 |
733 | $namespace_counter++; |
734 | my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter; |
735 | Class::C3::Componentised->inject_base( $namespace2 => $schema_class ); |
736 | my $schema = $namespace2->connect(@{$connection_details}); |
384c3f0c |
737 | return $schema; |
738 | } |
739 | |
f81264b2 |
740 | sub _read_sql { |
741 | my $ddl_file = shift; |
742 | my $fh; |
743 | open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)"); |
744 | my @data = split(/\n/, join('', <$fh>)); |
745 | @data = grep(!/^--/, @data); |
746 | @data = split(/;/, join('', @data)); |
747 | close($fh); |
748 | @data = grep { $_ && $_ !~ /^-- / } @data; |
749 | return \@data; |
750 | } |
a5561f96 |
751 | |
752 | =head2 populate |
753 | |
754 | =over 4 |
755 | |
756 | =item Arguments: \%$attrs |
757 | |
758 | =item Return Value: 1 |
759 | |
760 | =back |
761 | |
762 | $fixtures->populate({ |
763 | directory => '/home/me/app/fixtures', # directory to look for fixtures in, as specified to dump |
764 | ddl => '/home/me/app/sql/ddl.sql', # DDL to deploy |
f81264b2 |
765 | connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], # database to clear, deploy and then populate |
766 | post_ddl => '/home/me/app/sql/post_ddl.sql', # DDL to deploy after populating records, ie. FK constraints |
767 | cascade => 1, # use CASCADE option when dropping tables |
a5561f96 |
768 | }); |
769 | |
95566320 |
770 | In this case the database app_dev will be cleared of all tables, then the specified DDL deployed to it, |
a5561f96 |
771 | then finally all fixtures found in /home/me/app/fixtures will be added to it. populate will generate |
772 | its own DBIx::Class schema from the DDL rather than being passed one to use. This is better as |
95566320 |
773 | custom insert methods are avoided which can to get in the way. In some cases you might not |
a5561f96 |
774 | have a DDL, and so this method will eventually allow a $schema object to be passed instead. |
775 | |
f81264b2 |
776 | If needed, you can specify a post_ddl attribute which is a DDL to be applied after all the fixtures |
777 | have been added to the database. A good use of this option would be to add foreign key constraints |
778 | since databases like Postgresql cannot disable foreign key checks. |
779 | |
780 | If your tables have foreign key constraints you may want to use the cascade attribute which will |
781 | make the drop table functionality cascade, ie 'DROP TABLE $table CASCADE'. |
782 | |
a5561f96 |
783 | directory, dll and connection_details are all required attributes. |
784 | |
785 | =cut |
786 | |
384c3f0c |
787 | sub populate { |
788 | my $self = shift; |
789 | my ($params) = @_; |
790 | unless (ref $params eq 'HASH') { |
791 | return DBIx::Class::Exception->throw('first arg to populate must be hash ref'); |
792 | } |
793 | |
794 | foreach my $param (qw/directory/) { |
795 | unless ($params->{$param}) { |
796 | return DBIx::Class::Exception->throw($param . ' param not specified'); |
797 | } |
798 | } |
9a9a7832 |
799 | my $fixture_dir = dir(delete $params->{directory}); |
384c3f0c |
800 | unless (-e $fixture_dir) { |
801 | return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir); |
802 | } |
803 | |
804 | my $ddl_file; |
805 | my $dbh; |
806 | if ($params->{ddl} && $params->{connection_details}) { |
9a9a7832 |
807 | $ddl_file = file(delete $params->{ddl}); |
384c3f0c |
808 | unless (-e $ddl_file) { |
809 | return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file); |
810 | } |
811 | unless (ref $params->{connection_details} eq 'ARRAY') { |
812 | return DBIx::Class::Exception->throw('connection details must be an arrayref'); |
813 | } |
814 | } elsif ($params->{schema}) { |
815 | return DBIx::Class::Exception->throw('passing a schema is not supported at the moment'); |
816 | } else { |
817 | return DBIx::Class::Exception->throw('you must set the ddl and connection_details params'); |
818 | } |
819 | |
9a9a7832 |
820 | my $schema = $self->_generate_schema({ ddl => $ddl_file, connection_details => delete $params->{connection_details}, %{$params} }); |
4fb695f4 |
821 | $self->msg("\nimporting fixtures"); |
384c3f0c |
822 | my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); |
823 | |
824 | my $version_file = file($fixture_dir, '_dumper_version'); |
825 | unless (-e $version_file) { |
826 | # return DBIx::Class::Exception->throw('no version file found'); |
827 | } |
828 | |
829 | if (-e $tmp_fixture_dir) { |
830 | $self->msg("- deleting existing temp directory $tmp_fixture_dir"); |
4fb695f4 |
831 | $tmp_fixture_dir->rmtree; |
384c3f0c |
832 | } |
833 | $self->msg("- creating temp dir"); |
0caf5ad6 |
834 | dircopy(dir($fixture_dir, $schema->source($_)->from), dir($tmp_fixture_dir, $schema->source($_)->from)) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources; |
384c3f0c |
835 | |
384c3f0c |
836 | my $fixup_visitor; |
b099fee9 |
837 | my $formatter= $schema->storage->datetime_parser; |
0566a82d |
838 | unless ($@ || !$formatter) { |
839 | my %callbacks; |
840 | if ($params->{datetime_relative_to}) { |
841 | $callbacks{'DateTime::Duration'} = sub { |
842 | $params->{datetime_relative_to}->clone->add_duration($_); |
843 | }; |
844 | } else { |
845 | $callbacks{'DateTime::Duration'} = sub { |
846 | $formatter->format_datetime(DateTime->today->add_duration($_)) |
847 | }; |
848 | } |
849 | $callbacks{object} ||= "visit_ref"; |
850 | $fixup_visitor = new Data::Visitor::Callback(%callbacks); |
384c3f0c |
851 | } |
1ac1b0d7 |
852 | |
853 | my $db = $schema->storage->dbh->{Driver}->{Name}; |
854 | my $dbi_class = "DBIx::Class::Fixtures::DBI::$db"; |
855 | |
856 | eval "require $dbi_class"; |
857 | if ($@) { |
858 | $dbi_class = "DBIx::Class::Fixtures::DBI"; |
859 | eval "require $dbi_class"; |
860 | die $@ if $@; |
384c3f0c |
861 | } |
862 | |
1ac1b0d7 |
863 | $dbi_class->do_insert($schema, sub { |
864 | foreach my $source (sort $schema->sources) { |
865 | $self->msg("- adding " . $source); |
866 | my $rs = $schema->resultset($source); |
867 | my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from)); |
868 | next unless (-e $source_dir); |
869 | my @rows; |
870 | while (my $file = $source_dir->next) { |
871 | next unless ($file =~ /\.fix$/); |
872 | next if $file->is_dir; |
873 | my $contents = $file->slurp; |
874 | my $HASH1; |
875 | eval($contents); |
876 | $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; |
877 | push(@rows, $HASH1); |
878 | } |
d3ef0865 |
879 | $rs->populate(\@rows) if (scalar(@rows)); |
1ac1b0d7 |
880 | } |
881 | }); |
882 | |
6a05e381 |
883 | $self->do_post_ddl({schema=>$schema, post_ddl=>$params->{post_ddl}}) if $params->{post_ddl}; |
f81264b2 |
884 | |
384c3f0c |
885 | $self->msg("- fixtures imported"); |
886 | $self->msg("- cleaning up"); |
887 | $tmp_fixture_dir->rmtree; |
4fb695f4 |
888 | eval { $schema->storage->dbh->do('SET foreign_key_checks=1') }; |
b099fee9 |
889 | |
890 | return 1; |
384c3f0c |
891 | } |
892 | |
6a05e381 |
893 | sub do_post_ddl { |
894 | my ($self, $params) = @_; |
895 | |
896 | my $schema = $params->{schema}; |
897 | my $data = _read_sql($params->{post_ddl}); |
898 | foreach (@$data) { |
899 | eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"}; |
1ac1b0d7 |
900 | if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } |
6a05e381 |
901 | } |
902 | $self->msg("- finished importing post-populate DDL into DB"); |
903 | } |
904 | |
0fc424b7 |
905 | sub msg { |
906 | my $self = shift; |
907 | my $subject = shift || return; |
9a9a7832 |
908 | my $level = shift || 1; |
9a9a7832 |
909 | return unless $self->debug >= $level; |
0fc424b7 |
910 | if (ref $subject) { |
911 | print Dumper($subject); |
912 | } else { |
913 | print $subject . "\n"; |
914 | } |
915 | } |
a5561f96 |
916 | |
917 | =head1 AUTHOR |
918 | |
919 | Luke Saunders <luke@shadowcatsystems.co.uk> |
920 | |
3b4f6e76 |
921 | Initial development sponsored by and (c) Takkle, Inc. 2007 |
922 | |
a5561f96 |
923 | =head1 CONTRIBUTORS |
924 | |
925 | Ash Berlin <ash@shadowcatsystems.co.uk> |
926 | Matt S. Trout <mst@shadowcatsystems.co.uk> |
fc17c598 |
927 | Drew Taylor <taylor.andrew.j@gmail.com> |
a5561f96 |
928 | |
3b4f6e76 |
929 | =head1 LICENSE |
930 | |
931 | This library is free software under the same license as perl itself |
932 | |
a5561f96 |
933 | =cut |
934 | |
e5963c1b |
935 | 1; |