Commit | Line | Data |
e5963c1b |
1 | package DBIx::Class::Fixtures; |
2 | |
3 | use strict; |
4 | use warnings; |
5 | |
3eecaaa0 |
6 | use DBIx::Class 0.08100; |
e5963c1b |
7 | use DBIx::Class::Exception; |
b099fee9 |
8 | use Class::Accessor::Grouped; |
e5963c1b |
9 | use Path::Class qw(dir file); |
0fc424b7 |
10 | use Config::Any::JSON; |
11 | use Data::Dump::Streamer; |
4fb695f4 |
12 | use Data::Visitor::Callback; |
0fc424b7 |
13 | use File::Path; |
4fb695f4 |
14 | use File::Copy::Recursive qw/dircopy/; |
6116de11 |
15 | use File::Copy qw/move/; |
0fc424b7 |
16 | use Hash::Merge qw( merge ); |
e5963c1b |
17 | use Data::Dumper; |
aa9f3cc7 |
18 | use Class::C3::Componentised; |
e5963c1b |
19 | |
b099fee9 |
20 | use base qw(Class::Accessor::Grouped); |
e5963c1b |
21 | |
aa9f3cc7 |
22 | our $namespace_counter = 0; |
0566a82d |
23 | |
8a1df391 |
24 | __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir |
25 | _inherited_attributes debug schema_class dumped_objects/); |
e5963c1b |
26 | |
27 | =head1 VERSION |
28 | |
49aac59d |
29 | Version 1.001004 |
e5963c1b |
30 | |
31 | =cut |
32 | |
49aac59d |
33 | our $VERSION = '1.001004'; |
e5963c1b |
34 | |
35 | =head1 NAME |
36 | |
9f96b203 |
37 | DBIx::Class::Fixtures |
38 | |
e5963c1b |
39 | =head1 SYNOPSIS |
40 | |
8a1df391 |
41 | use DBIx::Class::Fixtures; |
e5963c1b |
42 | |
8a1df391 |
43 | ... |
e5963c1b |
44 | |
8a1df391 |
45 | my $fixtures = DBIx::Class::Fixtures->new({ |
46 | config_dir => '/home/me/app/fixture_configs' |
47 | }); |
e5963c1b |
48 | |
8a1df391 |
49 | $fixtures->dump({ |
50 | config => 'set_config.json', |
51 | schema => $source_dbic_schema, |
52 | directory => '/home/me/app/fixtures' |
53 | }); |
e5963c1b |
54 | |
8a1df391 |
55 | $fixtures->populate({ |
56 | directory => '/home/me/app/fixtures', |
57 | ddl => '/home/me/app/sql/ddl.sql', |
58 | connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], |
59 | post_ddl => '/home/me/app/sql/post_ddl.sql', |
60 | }); |
e5963c1b |
61 | |
62 | =head1 DESCRIPTION |
63 | |
8a1df391 |
64 | Dump fixtures from source database to filesystem then import to another |
65 | database (with same schema) at any time. Use as a constant dataset for running |
66 | tests against or for populating development databases when impractical to use |
67 | production clones. Describe fixture set using relations and conditions based on |
68 | your DBIx::Class schema. |
a5561f96 |
69 | |
70 | =head1 DEFINE YOUR FIXTURE SET |
71 | |
8a1df391 |
72 | Fixture sets are currently defined in .json files which must reside in your |
73 | config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They |
74 | describe which data to pull and dump from the source database. |
a5561f96 |
75 | |
76 | For example: |
77 | |
8a1df391 |
78 | { |
79 | sets: [ |
80 | { |
81 | class: 'Artist', |
82 | ids: ['1', '3'] |
83 | }, |
84 | { |
85 | class: 'Producer', |
86 | ids: ['5'], |
87 | fetch: [ |
88 | { |
89 | rel: 'artists', |
90 | quantity: '2' |
91 | } |
92 | ] |
93 | } |
94 | ] |
95 | } |
96 | |
97 | This will fetch artists with primary keys 1 and 3, the producer with primary |
98 | key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class |
99 | rel from Producer to Artist. |
a5561f96 |
100 | |
95566320 |
101 | The top level attributes are as follows: |
102 | |
a5561f96 |
103 | =head2 sets |
104 | |
8a1df391 |
105 | Sets must be an array of hashes, as in the example given above. Each set |
106 | defines a set of objects to be included in the fixtures. For details on valid |
107 | set attributes see L</SET ATTRIBUTES> below. |
a5561f96 |
108 | |
109 | =head2 rules |
110 | |
8a1df391 |
111 | Rules place general conditions on classes. For example if whenever an artist |
112 | was dumped you also wanted all of their cds dumped too, then you could use a |
113 | rule to specify this. For example: |
114 | |
115 | { |
116 | sets: [ |
117 | { |
118 | class: 'Artist', |
119 | ids: ['1', '3'] |
120 | }, |
121 | { |
122 | class: 'Producer', |
123 | ids: ['5'], |
124 | fetch: [ |
125 | { |
126 | rel: 'artists', |
127 | quantity: '2' |
128 | } |
129 | ] |
130 | } |
131 | ], |
132 | rules: { |
133 | Artist: { |
134 | fetch: [ { |
135 | rel: 'cds', |
136 | quantity: 'all' |
137 | } ] |
138 | } |
139 | } |
140 | } |
141 | |
142 | In this case all the cds of artists 1, 3 and all producer 5's artists will be |
143 | dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist |
144 | to CD. This is eqivalent to: |
145 | |
146 | { |
147 | sets: [ |
a5561f96 |
148 | { |
8a1df391 |
149 | class: 'Artist', |
150 | ids: ['1', '3'], |
151 | fetch: [ { |
152 | rel: 'cds', |
153 | quantity: 'all' |
154 | } ] |
155 | }, |
156 | { |
157 | class: 'Producer', |
158 | ids: ['5'], |
159 | fetch: [ { |
160 | rel: 'artists', |
161 | quantity: '2', |
162 | fetch: [ { |
163 | rel: 'cds', |
164 | quantity: 'all' |
165 | } ] |
166 | } ] |
167 | } |
168 | ] |
169 | } |
a5561f96 |
170 | |
171 | rules must be a hash keyed by class name. |
172 | |
95566320 |
173 | L</RULE ATTRIBUTES> |
174 | |
96f2cd20 |
175 | =head2 includes |
176 | |
8a1df391 |
177 | To prevent repetition between configs you can include other configs. For |
178 | example: |
96f2cd20 |
179 | |
8a1df391 |
180 | { |
181 | sets: [ { |
182 | class: 'Producer', |
183 | ids: ['5'] |
184 | } ], |
185 | includes: [ |
186 | { file: 'base.json' } |
187 | ] |
188 | } |
96f2cd20 |
189 | |
8a1df391 |
190 | Includes must be an arrayref of hashrefs where the hashrefs have key 'file' |
191 | which is the name of another config file in the same directory. The original |
192 | config is merged with its includes using L<Hash::Merge>. |
96f2cd20 |
193 | |
95566320 |
194 | =head2 datetime_relative |
195 | |
8a1df391 |
196 | Only available for MySQL and PostgreSQL at the moment, must be a value that |
197 | DateTime::Format::* can parse. For example: |
95566320 |
198 | |
8a1df391 |
199 | { |
200 | sets: [ { |
201 | class: 'RecentItems', |
202 | ids: ['9'] |
203 | } ], |
204 | datetime_relative : "2007-10-30 00:00:00" |
205 | } |
95566320 |
206 | |
8a1df391 |
207 | This will work when dumping from a MySQL database and will cause any datetime |
208 | fields (where datatype => 'datetime' in the column def of the schema class) to |
209 | be dumped as a DateTime::Duration object relative to the date specified in the |
210 | datetime_relative value. For example if the RecentItem object had a date field |
211 | set to 2007-10-25, then when the fixture is imported the field will be set to 5 |
212 | days in the past relative to the current time. |
95566320 |
213 | |
a5561f96 |
214 | =head2 might_have |
215 | |
8a1df391 |
216 | Specifies whether to automatically dump might_have relationships. Should be a |
217 | hash with one attribute - fetch. Set fetch to 1 or 0. |
218 | |
219 | { |
220 | might_have: { fetch: 1 }, |
221 | sets: [ |
222 | { |
223 | class: 'Artist', |
224 | ids: ['1', '3'] |
225 | }, |
226 | { |
227 | class: 'Producer', |
228 | ids: ['5'] |
229 | } |
230 | ] |
231 | } |
232 | |
233 | Note: belongs_to rels are automatically dumped whether you like it or not, this |
234 | is to avoid FKs to nowhere when importing. General rules on has_many rels are |
235 | not accepted at this top level, but you can turn them on for individual sets - |
236 | see L</SET ATTRIBUTES>. |
a5561f96 |
237 | |
238 | =head1 SET ATTRIBUTES |
239 | |
240 | =head2 class |
241 | |
242 | Required attribute. Specifies the DBIx::Class object class you wish to dump. |
243 | |
244 | =head2 ids |
245 | |
8a1df391 |
246 | Array of primary key ids to fetch, basically causing an $rs->find($_) for each. |
247 | If the id is not in the source db then it just won't get dumped, no warnings or |
248 | death. |
a5561f96 |
249 | |
250 | =head2 quantity |
251 | |
8a1df391 |
252 | Must be either an integer or the string 'all'. Specifying an integer will |
253 | effectively set the 'rows' attribute on the resultset clause, specifying 'all' |
254 | will cause the rows attribute to be left off and for all matching rows to be |
255 | dumped. There's no randomising here, it's just the first x rows. |
a5561f96 |
256 | |
257 | =head2 cond |
258 | |
8a1df391 |
259 | A hash specifying the conditions dumped objects must match. Essentially this is |
260 | a JSON representation of a DBIx::Class search clause. For example: |
a5561f96 |
261 | |
8a1df391 |
262 | { |
263 | sets: [{ |
264 | class: 'Artist', |
265 | quantiy: 'all', |
266 | cond: { name: 'Dave' } |
267 | }] |
268 | } |
a5561f96 |
269 | |
8a1df391 |
270 | This will dump all artists whose name is 'dave'. Essentially |
271 | $artist_rs->search({ name => 'Dave' })->all. |
a5561f96 |
272 | |
273 | Sometimes in a search clause it's useful to use scalar refs to do things like: |
274 | |
8a1df391 |
275 | $artist_rs->search({ no1_singles => \'> no1_albums' }) |
a5561f96 |
276 | |
277 | This could be specified in the cond hash like so: |
278 | |
8a1df391 |
279 | { |
280 | sets: [ { |
281 | class: 'Artist', |
282 | quantiy: 'all', |
283 | cond: { no1_singles: '\> no1_albums' } |
284 | } ] |
285 | } |
a5561f96 |
286 | |
8a1df391 |
287 | So if the value starts with a backslash the value is made a scalar ref before |
288 | being passed to search. |
a5561f96 |
289 | |
290 | =head2 join |
291 | |
292 | An array of relationships to be used in the cond clause. |
293 | |
8a1df391 |
294 | { |
295 | sets: [ { |
296 | class: 'Artist', |
297 | quantiy: 'all', |
298 | cond: { 'cds.position': { '>': 4 } }, |
299 | join: ['cds'] |
300 | } ] |
301 | } |
a5561f96 |
302 | |
303 | Fetch all artists who have cds with position greater than 4. |
304 | |
305 | =head2 fetch |
306 | |
307 | Must be an array of hashes. Specifies which rels to also dump. For example: |
308 | |
8a1df391 |
309 | { |
310 | sets: [ { |
311 | class: 'Artist', |
312 | ids: ['1', '3'], |
313 | fetch: [ { |
314 | rel: 'cds', |
315 | quantity: '3', |
316 | cond: { position: '2' } |
317 | } ] |
318 | } ] |
319 | } |
a5561f96 |
320 | |
321 | Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2. |
322 | |
8a1df391 |
323 | Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and |
324 | 'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same |
325 | as in the set attributes. quantity is necessary for has_many relationships, but |
326 | not if using for belongs_to or might_have relationships. |
a5561f96 |
327 | |
328 | =head2 has_many |
329 | |
8a1df391 |
330 | Specifies whether to fetch has_many rels for this set. Must be a hash |
331 | containing keys fetch and quantity. |
a5561f96 |
332 | |
8a1df391 |
333 | Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an |
334 | integer. |
a5561f96 |
335 | |
95566320 |
336 | Be careful here, dumping has_many rels can lead to a lot of data being dumped. |
337 | |
a5561f96 |
338 | =head2 might_have |
339 | |
8a1df391 |
340 | As with has_many but for might_have relationships. Quantity doesn't do anything |
341 | in this case. |
a5561f96 |
342 | |
8a1df391 |
343 | This value will be inherited by all fetches in this set. This is not true for |
344 | the has_many attribute. |
a5561f96 |
345 | |
346 | =head1 RULE ATTRIBUTES |
347 | |
348 | =head2 cond |
349 | |
350 | Same as with L</SET ATTRIBUTES> |
351 | |
352 | =head2 fetch |
353 | |
354 | Same as with L</SET ATTRIBUTES> |
355 | |
356 | =head2 join |
357 | |
358 | Same as with L</SET ATTRIBUTES> |
359 | |
360 | =head2 has_many |
361 | |
362 | Same as with L</SET ATTRIBUTES> |
363 | |
364 | =head2 might_have |
365 | |
366 | Same as with L</SET ATTRIBUTES> |
e5963c1b |
367 | |
0fc424b7 |
368 | =head1 METHODS |
369 | |
370 | =head2 new |
e5963c1b |
371 | |
a5561f96 |
372 | =over 4 |
373 | |
374 | =item Arguments: \%$attrs |
375 | |
376 | =item Return Value: $fixture_object |
377 | |
378 | =back |
379 | |
8a1df391 |
380 | Returns a new DBIx::Class::Fixture object. %attrs can have the following |
381 | parameters: |
382 | |
383 | =over |
384 | |
385 | =item config_dir: |
386 | |
387 | required. must contain a valid path to the directory in which your .json |
388 | configs reside. |
389 | |
390 | =item debug: |
391 | |
392 | determines whether to be verbose |
393 | |
394 | =item ignore_sql_errors: |
1ac1b0d7 |
395 | |
8a1df391 |
396 | ignore errors on import of DDL etc |
1ac1b0d7 |
397 | |
8a1df391 |
398 | =back |
a5561f96 |
399 | |
8a1df391 |
400 | my $fixtures = DBIx::Class::Fixtures->new( { |
401 | config_dir => '/home/me/app/fixture_configs' |
402 | } ); |
a5561f96 |
403 | |
0fc424b7 |
404 | =cut |
e5963c1b |
405 | |
406 | sub new { |
407 | my $class = shift; |
408 | |
409 | my ($params) = @_; |
410 | unless (ref $params eq 'HASH') { |
411 | return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref'); |
412 | } |
413 | |
414 | unless ($params->{config_dir}) { |
415 | return DBIx::Class::Exception->throw('config_dir param not specified'); |
416 | } |
417 | |
418 | my $config_dir = dir($params->{config_dir}); |
419 | unless (-e $params->{config_dir}) { |
420 | return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist'); |
421 | } |
422 | |
423 | my $self = { |
0fc424b7 |
424 | config_dir => $config_dir, |
14e0a204 |
425 | _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/], |
da25ed7c |
426 | debug => $params->{debug} || 0, |
0a54a6e8 |
427 | ignore_sql_errors => $params->{ignore_sql_errors}, |
428 | dumped_objects => {} |
e5963c1b |
429 | }; |
430 | |
431 | bless $self, $class; |
432 | |
433 | return $self; |
434 | } |
435 | |
0fc424b7 |
436 | =head2 dump |
437 | |
a5561f96 |
438 | =over 4 |
439 | |
440 | =item Arguments: \%$attrs |
441 | |
442 | =item Return Value: 1 |
443 | |
444 | =back |
445 | |
8a1df391 |
446 | $fixtures->dump({ |
447 | config => 'set_config.json', # config file to use. must be in the config |
448 | # directory specified in the constructor |
449 | schema => $source_dbic_schema, |
450 | directory => '/home/me/app/fixtures' # output directory |
451 | }); |
a5561f96 |
452 | |
8a1df391 |
453 | or |
2ef30e95 |
454 | |
8a1df391 |
455 | $fixtures->dump({ |
456 | all => 1, # just dump everything that's in the schema |
457 | schema => $source_dbic_schema, |
458 | directory => '/home/me/app/fixtures' # output directory |
459 | }); |
2ef30e95 |
460 | |
8a1df391 |
461 | In this case objects will be dumped to subdirectories in the specified |
462 | directory. For example: |
a5561f96 |
463 | |
8a1df391 |
464 | /home/me/app/fixtures/artist/1.fix |
465 | /home/me/app/fixtures/artist/3.fix |
466 | /home/me/app/fixtures/producer/5.fix |
a5561f96 |
467 | |
2ef30e95 |
468 | schema and directory are required attributes. also, one of config or all must be specified. |
a5561f96 |
469 | |
0fc424b7 |
470 | =cut |
471 | |
472 | sub dump { |
473 | my $self = shift; |
474 | |
475 | my ($params) = @_; |
476 | unless (ref $params eq 'HASH') { |
477 | return DBIx::Class::Exception->throw('first arg to dump must be hash ref'); |
478 | } |
479 | |
2ef30e95 |
480 | foreach my $param (qw/schema directory/) { |
0fc424b7 |
481 | unless ($params->{$param}) { |
482 | return DBIx::Class::Exception->throw($param . ' param not specified'); |
483 | } |
484 | } |
485 | |
2ef30e95 |
486 | my $schema = $params->{schema}; |
2ef30e95 |
487 | my $config; |
488 | if ($params->{config}) { |
96f2cd20 |
489 | #read config |
8a1df391 |
490 | my $config_file = $self->config_dir->file($params->{config}); |
491 | $config = $self->load_config_file($config_file); |
2ef30e95 |
492 | } elsif ($params->{all}) { |
8a1df391 |
493 | $config = { |
494 | might_have => { fetch => 0 }, |
495 | has_many => { fetch => 0 }, |
496 | belongs_to => { fetch => 0 }, |
497 | sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] |
498 | }; |
2ef30e95 |
499 | } else { |
8a1df391 |
500 | DBIx::Class::Exception->throw('must pass config or set all'); |
0fc424b7 |
501 | } |
502 | |
503 | my $output_dir = dir($params->{directory}); |
504 | unless (-e $output_dir) { |
d85d888e |
505 | $output_dir->mkpath || |
8a1df391 |
506 | DBIx::Class::Exception->throw("output directory does not exist at $output_dir"); |
0fc424b7 |
507 | } |
508 | |
9f96b203 |
509 | $self->msg("generating fixtures"); |
f251ab7e |
510 | my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); |
0fc424b7 |
511 | |
6116de11 |
512 | if (-e $tmp_output_dir) { |
0fc424b7 |
513 | $self->msg("- clearing existing $tmp_output_dir"); |
6116de11 |
514 | $tmp_output_dir->rmtree; |
0fc424b7 |
515 | } |
6116de11 |
516 | $self->msg("- creating $tmp_output_dir"); |
517 | $tmp_output_dir->mkpath; |
0fc424b7 |
518 | |
519 | # write version file (for the potential benefit of populate) |
8a1df391 |
520 | $tmp_output_dir->file('_dumper_version') |
521 | ->openw |
522 | ->print($VERSION); |
0fc424b7 |
523 | |
524 | $config->{rules} ||= {}; |
525 | my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; |
8a1df391 |
526 | |
0fc424b7 |
527 | foreach my $source (@sources) { |
528 | # apply rule to set if specified |
529 | my $rule = $config->{rules}->{$source->{class}}; |
530 | $source = merge( $source, $rule ) if ($rule); |
531 | |
532 | # fetch objects |
2ef30e95 |
533 | my $rs = $schema->resultset($source->{class}); |
c40935c5 |
534 | |
535 | if ($source->{cond} and ref $source->{cond} eq 'HASH') { |
0a54a6e8 |
536 | # if value starts with \ assume it's meant to be passed as a scalar ref |
537 | # to dbic. ideally this would substitute deeply |
538 | $source->{cond} = { |
539 | map { |
540 | $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} |
541 | : $source->{cond}->{$_} |
542 | } keys %{$source->{cond}} |
543 | }; |
c40935c5 |
544 | } |
545 | |
0a54a6e8 |
546 | $rs = $rs->search($source->{cond}, { join => $source->{join} }) |
547 | if $source->{cond}; |
548 | |
0fc424b7 |
549 | $self->msg("- dumping $source->{class}"); |
0a54a6e8 |
550 | |
0fc424b7 |
551 | my %source_options = ( set => { %{$config}, %{$source} } ); |
552 | if ($source->{quantity}) { |
0a54a6e8 |
553 | $rs = $rs->search({}, { order_by => $source->{order_by} }) |
554 | if $source->{order_by}; |
555 | |
8a1df391 |
556 | if ($source->{quantity} =~ /^\d+$/) { |
557 | $rs = $rs->search({}, { rows => $source->{quantity} }); |
558 | } elsif ($source->{quantity} ne 'all') { |
0a54a6e8 |
559 | DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}"); |
0fc424b7 |
560 | } |
561 | } |
8a1df391 |
562 | elsif ($source->{ids} && @{$source->{ids}}) { |
0fc424b7 |
563 | my @ids = @{$source->{ids}}; |
8a1df391 |
564 | my (@pks) = $rs->result_source->primary_columns; |
565 | die "Can't dump multiple col-pks using 'id' option" if @pks > 1; |
566 | $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } ); |
0fc424b7 |
567 | } |
8a1df391 |
568 | else { |
0fc424b7 |
569 | DBIx::Class::Exception->throw('must specify either quantity or ids'); |
570 | } |
571 | |
8a1df391 |
572 | $source_options{set_dir} = $tmp_output_dir; |
573 | $self->dump_rs($rs, \%source_options ); |
d3ef0865 |
574 | } |
575 | |
da25ed7c |
576 | # clear existing output dir |
577 | foreach my $child ($output_dir->children) { |
578 | if ($child->is_dir) { |
579 | next if ($child eq $tmp_output_dir); |
580 | if (grep { $_ =~ /\.fix/ } $child->children) { |
581 | $child->rmtree; |
582 | } |
583 | } elsif ($child =~ /_dumper_version$/) { |
584 | $child->remove; |
585 | } |
0fc424b7 |
586 | } |
587 | |
588 | $self->msg("- moving temp dir to $output_dir"); |
8a1df391 |
589 | move($_, dir($output_dir, $_->relative($_->parent)->stringify)) |
590 | for $tmp_output_dir->children; |
591 | |
0fc424b7 |
592 | if (-e $output_dir) { |
593 | $self->msg("- clearing tmp dir $tmp_output_dir"); |
594 | # delete existing fixture set |
595 | $tmp_output_dir->remove; |
596 | } |
597 | |
598 | $self->msg("done"); |
599 | |
600 | return 1; |
601 | } |
602 | |
8a1df391 |
603 | sub load_config_file { |
604 | my ($self, $config_file) = @_; |
605 | DBIx::Class::Exception->throw("config does not exist at $config_file") |
606 | unless -e $config_file; |
607 | |
608 | my $config = Config::Any::JSON->load($config_file); |
609 | |
610 | #process includes |
611 | if (my $incs = $config->{includes}) { |
612 | $self->msg($incs); |
613 | DBIx::Class::Exception->throw( |
614 | 'includes params of config must be an array ref of hashrefs' |
615 | ) unless ref $incs eq 'ARRAY'; |
616 | |
617 | foreach my $include_config (@$incs) { |
618 | DBIx::Class::Exception->throw( |
619 | 'includes params of config must be an array ref of hashrefs' |
620 | ) unless (ref $include_config eq 'HASH') && $include_config->{file}; |
621 | |
622 | my $include_file = $self->config_dir->file($include_config->{file}); |
623 | |
624 | DBIx::Class::Exception->throw("config does not exist at $include_file") |
625 | unless -e $include_file; |
626 | |
627 | my $include = Config::Any::JSON->load($include_file); |
628 | $self->msg($include); |
629 | $config = merge( $config, $include ); |
630 | } |
631 | delete $config->{includes}; |
632 | } |
633 | |
634 | # validate config |
635 | return DBIx::Class::Exception->throw('config has no sets') |
636 | unless $config && $config->{sets} && |
637 | ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}}; |
638 | |
639 | $config->{might_have} = { fetch => 0 } unless exists $config->{might_have}; |
640 | $config->{has_many} = { fetch => 0 } unless exists $config->{has_many}; |
641 | $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to}; |
642 | |
643 | return $config; |
644 | } |
645 | |
646 | sub dump_rs { |
647 | my ($self, $rs, $params) = @_; |
648 | |
649 | while (my $row = $rs->next) { |
650 | $self->dump_object($row, $params); |
651 | } |
652 | } |
653 | |
0fc424b7 |
654 | sub dump_object { |
8a1df391 |
655 | my ($self, $object, $params) = @_; |
0fc424b7 |
656 | my $set = $params->{set}; |
657 | die 'no dir passed to dump_object' unless $params->{set_dir}; |
658 | die 'no object passed to dump_object' unless $object; |
659 | |
660 | my @inherited_attrs = @{$self->_inherited_attributes}; |
661 | |
8a1df391 |
662 | my @pk_vals = map { |
663 | $object->get_column($_) |
664 | } $object->primary_columns; |
665 | |
666 | my $key = join("\0", @pk_vals); |
667 | |
668 | my $src = $object->result_source; |
669 | my $exists = $self->dumped_objects->{$src->name}{$key}++; |
670 | |
671 | |
0fc424b7 |
672 | # write dir and gen filename |
8a1df391 |
673 | my $source_dir = $params->{set_dir}->subdir(lc $src->from); |
674 | $source_dir->mkpath(0, 0777); |
5f3da1e0 |
675 | |
676 | # strip dir separators from file name |
0a54a6e8 |
677 | my $file = $source_dir->file( |
678 | join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix' |
679 | ); |
8a1df391 |
680 | |
0fc424b7 |
681 | |
682 | # write file |
0fc424b7 |
683 | unless ($exists) { |
684 | $self->msg('-- dumping ' . $file->stringify, 2); |
685 | my %ds = $object->get_columns; |
686 | |
687 | # mess with dates if specified |
0566a82d |
688 | if ($set->{datetime_relative}) { |
8a1df391 |
689 | my $formatter= $object->result_source->schema->storage->datetime_parser; |
0566a82d |
690 | unless ($@ || !$formatter) { |
691 | my $dt; |
692 | if ($set->{datetime_relative} eq 'today') { |
693 | $dt = DateTime->today; |
694 | } else { |
695 | $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@); |
696 | } |
0fc424b7 |
697 | |
0566a82d |
698 | while (my ($col, $value) = each %ds) { |
699 | my $col_info = $object->result_source->column_info($col); |
0fc424b7 |
700 | |
0566a82d |
701 | next unless $value |
702 | && $col_info->{_inflate_info} |
703 | && uc($col_info->{data_type}) eq 'DATETIME'; |
0fc424b7 |
704 | |
0566a82d |
705 | $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); |
706 | } |
707 | } else { |
b099fee9 |
708 | warn "datetime_relative not supported for this db driver at the moment"; |
0fc424b7 |
709 | } |
710 | } |
711 | |
712 | # do the actual dumping |
713 | my $serialized = Dump(\%ds)->Out(); |
8a1df391 |
714 | $file->openw->print($serialized); |
0fc424b7 |
715 | } |
716 | |
2ef30e95 |
717 | # don't bother looking at rels unless we are actually planning to dump at least one type |
0a54a6e8 |
718 | my ($might_have, $belongs_to, $has_many) = map { |
719 | $set->{$_}{fetch}; |
720 | } qw/might_have belongs_to has_many/; |
721 | |
722 | return unless $might_have |
723 | || $belongs_to |
724 | || $has_many |
8a1df391 |
725 | || $set->{fetch}; |
2ef30e95 |
726 | |
0fc424b7 |
727 | # dump rels of object |
0fc424b7 |
728 | unless ($exists) { |
8a1df391 |
729 | foreach my $name (sort $src->relationships) { |
730 | my $info = $src->relationship_info($name); |
731 | my $r_source = $src->related_source($name); |
0a54a6e8 |
732 | # if belongs_to or might_have with might_have param set or has_many with |
733 | # has_many param set then |
8a1df391 |
734 | if ( |
0a54a6e8 |
735 | ( $info->{attrs}{accessor} eq 'single' && |
736 | (!$info->{attrs}{join_type} || $might_have) |
737 | ) |
738 | || $info->{attrs}{accessor} eq 'filter' |
739 | || |
740 | ($info->{attrs}{accessor} eq 'multi' && $has_many) |
8a1df391 |
741 | ) { |
0fc424b7 |
742 | my $related_rs = $object->related_resultset($name); |
743 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
744 | # these parts of the rule only apply to has_many rels |
745 | if ($rule && $info->{attrs}{accessor} eq 'multi') { |
0a54a6e8 |
746 | $related_rs = $related_rs->search( |
747 | $rule->{cond}, |
748 | { join => $rule->{join} } |
749 | ) if ($rule->{cond}); |
750 | |
751 | $related_rs = $related_rs->search( |
752 | {}, |
753 | { rows => $rule->{quantity} } |
754 | ) if ($rule->{quantity} && $rule->{quantity} ne 'all'); |
755 | |
756 | $related_rs = $related_rs->search( |
757 | {}, |
758 | { order_by => $rule->{order_by} } |
759 | ) if ($rule->{order_by}); |
760 | |
0fc424b7 |
761 | } |
0a54a6e8 |
762 | if ($set->{has_many}{quantity} && |
763 | $set->{has_many}{quantity} =~ /^\d+$/) { |
764 | $related_rs = $related_rs->search( |
765 | {}, |
766 | { rows => $set->{has_many}->{quantity} } |
767 | ); |
0fc424b7 |
768 | } |
0a54a6e8 |
769 | |
0fc424b7 |
770 | my %c_params = %{$params}; |
771 | # inherit date param |
0a54a6e8 |
772 | my %mock_set = map { |
773 | $_ => $set->{$_} |
774 | } grep { $set->{$_} } @inherited_attrs; |
775 | |
0fc424b7 |
776 | $c_params{set} = \%mock_set; |
0a54a6e8 |
777 | $c_params{set} = merge( $c_params{set}, $rule) |
778 | if $rule && $rule->{fetch}; |
779 | |
8a1df391 |
780 | $self->dump_rs($related_rs, \%c_params); |
0fc424b7 |
781 | } |
782 | } |
783 | } |
784 | |
785 | return unless $set && $set->{fetch}; |
786 | foreach my $fetch (@{$set->{fetch}}) { |
787 | # inherit date param |
0a54a6e8 |
788 | $fetch->{$_} = $set->{$_} foreach |
789 | grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs; |
0fc424b7 |
790 | my $related_rs = $object->related_resultset($fetch->{rel}); |
791 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
8a1df391 |
792 | |
0fc424b7 |
793 | if ($rule) { |
794 | my $info = $object->result_source->relationship_info($fetch->{rel}); |
795 | if ($info->{attrs}{accessor} eq 'multi') { |
796 | $fetch = merge( $fetch, $rule ); |
797 | } elsif ($rule->{fetch}) { |
798 | $fetch = merge( $fetch, { fetch => $rule->{fetch} } ); |
799 | } |
800 | } |
8a1df391 |
801 | |
0a54a6e8 |
802 | die "relationship $fetch->{rel} does not exist for " . $src->source_name |
8a1df391 |
803 | unless ($related_rs); |
804 | |
0fc424b7 |
805 | if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') { |
0a54a6e8 |
806 | # if value starts with \ assume it's meant to be passed as a scalar ref |
807 | # to dbic. ideally this would substitute deeply |
8a1df391 |
808 | $fetch->{cond} = { map { |
809 | $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} |
810 | : $fetch->{cond}->{$_} |
811 | } keys %{$fetch->{cond}} }; |
0fc424b7 |
812 | } |
8a1df391 |
813 | |
0a54a6e8 |
814 | $related_rs = $related_rs->search( |
815 | $fetch->{cond}, |
816 | { join => $fetch->{join} } |
817 | ) if $fetch->{cond}; |
818 | |
819 | $related_rs = $related_rs->search( |
820 | {}, |
821 | { rows => $fetch->{quantity} } |
822 | ) if $fetch->{quantity} && $fetch->{quantity} ne 'all'; |
823 | $related_rs = $related_rs->search( |
824 | {}, |
825 | { order_by => $fetch->{order_by} } |
826 | ) if $fetch->{order_by}; |
8a1df391 |
827 | |
828 | $self->dump_rs($related_rs, { %{$params}, set => $fetch }); |
0fc424b7 |
829 | } |
830 | } |
831 | |
384c3f0c |
832 | sub _generate_schema { |
833 | my $self = shift; |
834 | my $params = shift || {}; |
384c3f0c |
835 | require DBI; |
836 | $self->msg("\ncreating schema"); |
384c3f0c |
837 | |
c06f7b96 |
838 | my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema"; |
9a9a7832 |
839 | eval "require $schema_class"; |
840 | die $@ if $@; |
841 | |
4fb695f4 |
842 | my $pre_schema; |
843 | my $connection_details = $params->{connection_details}; |
8a1df391 |
844 | |
aa9f3cc7 |
845 | $namespace_counter++; |
8a1df391 |
846 | |
847 | my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter"; |
aa9f3cc7 |
848 | Class::C3::Componentised->inject_base( $namespace => $schema_class ); |
8a1df391 |
849 | |
aa9f3cc7 |
850 | $pre_schema = $namespace->connect(@{$connection_details}); |
851 | unless( $pre_schema ) { |
384c3f0c |
852 | return DBIx::Class::Exception->throw('connection details not valid'); |
853 | } |
aa9f3cc7 |
854 | my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources; |
f81264b2 |
855 | $self->msg("Tables to drop: [". join(', ', sort @tables) . "]"); |
4fb695f4 |
856 | my $dbh = $pre_schema->storage->dbh; |
384c3f0c |
857 | |
858 | # clear existing db |
859 | $self->msg("- clearing DB of existing tables"); |
9586eb0c |
860 | $pre_schema->storage->with_deferred_fk_checks(sub { |
861 | foreach my $table (@tables) { |
8a1df391 |
862 | eval { |
863 | $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) |
864 | }; |
9586eb0c |
865 | } |
866 | }); |
384c3f0c |
867 | |
868 | # import new ddl file to db |
869 | my $ddl_file = $params->{ddl}; |
870 | $self->msg("- deploying schema using $ddl_file"); |
f81264b2 |
871 | my $data = _read_sql($ddl_file); |
872 | foreach (@$data) { |
873 | eval { $dbh->do($_) or warn "SQL was:\n $_"}; |
1ac1b0d7 |
874 | if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } |
384c3f0c |
875 | } |
384c3f0c |
876 | $self->msg("- finished importing DDL into DB"); |
877 | |
878 | # load schema object from our new DB |
b4c67f96 |
879 | $namespace_counter++; |
0a54a6e8 |
880 | my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter"; |
b4c67f96 |
881 | Class::C3::Componentised->inject_base( $namespace2 => $schema_class ); |
882 | my $schema = $namespace2->connect(@{$connection_details}); |
384c3f0c |
883 | return $schema; |
884 | } |
885 | |
f81264b2 |
886 | sub _read_sql { |
887 | my $ddl_file = shift; |
888 | my $fh; |
889 | open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)"); |
890 | my @data = split(/\n/, join('', <$fh>)); |
891 | @data = grep(!/^--/, @data); |
892 | @data = split(/;/, join('', @data)); |
893 | close($fh); |
894 | @data = grep { $_ && $_ !~ /^-- / } @data; |
895 | return \@data; |
896 | } |
a5561f96 |
897 | |
898 | =head2 populate |
899 | |
900 | =over 4 |
901 | |
902 | =item Arguments: \%$attrs |
903 | |
904 | =item Return Value: 1 |
905 | |
906 | =back |
907 | |
8a1df391 |
908 | $fixtures->populate( { |
909 | # directory to look for fixtures in, as specified to dump |
910 | directory => '/home/me/app/fixtures', |
911 | |
912 | # DDL to deploy |
913 | ddl => '/home/me/app/sql/ddl.sql', |
914 | |
915 | # database to clear, deploy and then populate |
916 | connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], |
917 | |
918 | # DDL to deploy after populating records, ie. FK constraints |
919 | post_ddl => '/home/me/app/sql/post_ddl.sql', |
920 | |
921 | # use CASCADE option when dropping tables |
922 | cascade => 1, |
923 | |
924 | # optional, set to 1 to run ddl but not populate |
925 | no_populate => 0, |
926 | |
927 | # Dont try to clean the database, just populate over whats there. Requires |
928 | # schema option. Use this if you want to handle removing old data yourself |
929 | # no_deploy => 1 |
930 | # schema => $schema |
931 | } ); |
a5561f96 |
932 | |
9e77162b |
933 | In this case the database app_dev will be cleared of all tables, then the |
934 | specified DDL deployed to it, then finally all fixtures found in |
935 | /home/me/app/fixtures will be added to it. populate will generate its own |
936 | DBIx::Class schema from the DDL rather than being passed one to use. This is |
937 | better as custom insert methods are avoided which can to get in the way. In |
938 | some cases you might not have a DDL, and so this method will eventually allow a |
939 | $schema object to be passed instead. |
a5561f96 |
940 | |
9e77162b |
941 | If needed, you can specify a post_ddl attribute which is a DDL to be applied |
942 | after all the fixtures have been added to the database. A good use of this |
943 | option would be to add foreign key constraints since databases like Postgresql |
944 | cannot disable foreign key checks. |
f81264b2 |
945 | |
9e77162b |
946 | If your tables have foreign key constraints you may want to use the cascade |
947 | attribute which will make the drop table functionality cascade, ie 'DROP TABLE |
948 | $table CASCADE'. |
f81264b2 |
949 | |
9e77162b |
950 | C<directory> is a required attribute. |
951 | |
952 | If you wish for DBIx::Class::Fixtures to clear the database for you pass in |
953 | C<dll> (path to a DDL sql file) and C<connection_details> (array ref of DSN, |
954 | user and pass). |
955 | |
956 | If you wish to deal with cleaning the schema yourself, then pass in a C<schema> |
957 | attribute containing the connected schema you wish to operate on and set the |
958 | C<no_deploy> attribute. |
a5561f96 |
959 | |
960 | =cut |
961 | |
384c3f0c |
962 | sub populate { |
963 | my $self = shift; |
964 | my ($params) = @_; |
0a54a6e8 |
965 | DBIx::Class::Exception->throw('first arg to populate must be hash ref') |
966 | unless ref $params eq 'HASH'; |
967 | |
968 | DBIx::Class::Exception->throw('directory param not specified') |
969 | unless $params->{directory}; |
384c3f0c |
970 | |
9a9a7832 |
971 | my $fixture_dir = dir(delete $params->{directory}); |
0a54a6e8 |
972 | DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist") |
973 | unless -d $fixture_dir; |
384c3f0c |
974 | |
975 | my $ddl_file; |
9e77162b |
976 | my $dbh; |
977 | my $schema; |
384c3f0c |
978 | if ($params->{ddl} && $params->{connection_details}) { |
9a9a7832 |
979 | $ddl_file = file(delete $params->{ddl}); |
384c3f0c |
980 | unless (-e $ddl_file) { |
981 | return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file); |
982 | } |
983 | unless (ref $params->{connection_details} eq 'ARRAY') { |
984 | return DBIx::Class::Exception->throw('connection details must be an arrayref'); |
985 | } |
8a1df391 |
986 | $schema = $self->_generate_schema({ |
987 | ddl => $ddl_file, |
988 | connection_details => delete $params->{connection_details}, |
989 | %{$params} |
990 | }); |
9e77162b |
991 | } elsif ($params->{schema} && $params->{no_deploy}) { |
992 | $schema = $params->{schema}; |
384c3f0c |
993 | } else { |
0a54a6e8 |
994 | DBIx::Class::Exception->throw('you must set the ddl and connection_details params'); |
384c3f0c |
995 | } |
996 | |
3ad96388 |
997 | |
998 | return 1 if $params->{no_populate}; |
999 | |
4fb695f4 |
1000 | $self->msg("\nimporting fixtures"); |
384c3f0c |
1001 | my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); |
384c3f0c |
1002 | my $version_file = file($fixture_dir, '_dumper_version'); |
0a54a6e8 |
1003 | # DBIx::Class::Exception->throw('no version file found'); |
1004 | # unless -e $version_file; |
384c3f0c |
1005 | |
1006 | if (-e $tmp_fixture_dir) { |
1007 | $self->msg("- deleting existing temp directory $tmp_fixture_dir"); |
4fb695f4 |
1008 | $tmp_fixture_dir->rmtree; |
384c3f0c |
1009 | } |
1010 | $self->msg("- creating temp dir"); |
51794e1c |
1011 | $tmp_fixture_dir->mkpath(); |
0a54a6e8 |
1012 | for ( map { $schema->source($_)->from } $schema->sources) { |
1013 | my $from_dir = $fixture_dir->subdir($_); |
1014 | next unless -e $from_dir; |
1015 | dircopy($from_dir, $tmp_fixture_dir->subdir($_) ); |
1016 | } |
9e77162b |
1017 | |
1018 | unless (-d $tmp_fixture_dir) { |
0a54a6e8 |
1019 | DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!"); |
9e77162b |
1020 | } |
384c3f0c |
1021 | |
384c3f0c |
1022 | my $fixup_visitor; |
0a54a6e8 |
1023 | my $formatter = $schema->storage->datetime_parser; |
0566a82d |
1024 | unless ($@ || !$formatter) { |
1025 | my %callbacks; |
1026 | if ($params->{datetime_relative_to}) { |
1027 | $callbacks{'DateTime::Duration'} = sub { |
1028 | $params->{datetime_relative_to}->clone->add_duration($_); |
1029 | }; |
1030 | } else { |
1031 | $callbacks{'DateTime::Duration'} = sub { |
1032 | $formatter->format_datetime(DateTime->today->add_duration($_)) |
1033 | }; |
1034 | } |
1035 | $callbacks{object} ||= "visit_ref"; |
1036 | $fixup_visitor = new Data::Visitor::Callback(%callbacks); |
384c3f0c |
1037 | } |
1ac1b0d7 |
1038 | |
3ad96388 |
1039 | $schema->storage->with_deferred_fk_checks(sub { |
1ac1b0d7 |
1040 | foreach my $source (sort $schema->sources) { |
1041 | $self->msg("- adding " . $source); |
1042 | my $rs = $schema->resultset($source); |
0a54a6e8 |
1043 | my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from ); |
1ac1b0d7 |
1044 | next unless (-e $source_dir); |
1045 | my @rows; |
1046 | while (my $file = $source_dir->next) { |
1047 | next unless ($file =~ /\.fix$/); |
1048 | next if $file->is_dir; |
1049 | my $contents = $file->slurp; |
1050 | my $HASH1; |
1051 | eval($contents); |
1052 | $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; |
1053 | push(@rows, $HASH1); |
1054 | } |
0a54a6e8 |
1055 | $rs->populate(\@rows) if scalar(@rows); |
1ac1b0d7 |
1056 | } |
1057 | }); |
1058 | |
8a1df391 |
1059 | $self->do_post_ddl( { |
1060 | schema=>$schema, |
1061 | post_ddl=>$params->{post_ddl} |
1062 | } ) if $params->{post_ddl}; |
f81264b2 |
1063 | |
384c3f0c |
1064 | $self->msg("- fixtures imported"); |
1065 | $self->msg("- cleaning up"); |
1066 | $tmp_fixture_dir->rmtree; |
b099fee9 |
1067 | return 1; |
384c3f0c |
1068 | } |
1069 | |
6a05e381 |
1070 | sub do_post_ddl { |
1071 | my ($self, $params) = @_; |
1072 | |
1073 | my $schema = $params->{schema}; |
1074 | my $data = _read_sql($params->{post_ddl}); |
1075 | foreach (@$data) { |
1076 | eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"}; |
1ac1b0d7 |
1077 | if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } |
6a05e381 |
1078 | } |
1079 | $self->msg("- finished importing post-populate DDL into DB"); |
1080 | } |
1081 | |
0fc424b7 |
1082 | sub msg { |
1083 | my $self = shift; |
1084 | my $subject = shift || return; |
9a9a7832 |
1085 | my $level = shift || 1; |
9a9a7832 |
1086 | return unless $self->debug >= $level; |
0fc424b7 |
1087 | if (ref $subject) { |
1088 | print Dumper($subject); |
1089 | } else { |
1090 | print $subject . "\n"; |
1091 | } |
1092 | } |
a5561f96 |
1093 | |
1094 | =head1 AUTHOR |
1095 | |
1096 | Luke Saunders <luke@shadowcatsystems.co.uk> |
1097 | |
3b4f6e76 |
1098 | Initial development sponsored by and (c) Takkle, Inc. 2007 |
1099 | |
a5561f96 |
1100 | =head1 CONTRIBUTORS |
1101 | |
1102 | Ash Berlin <ash@shadowcatsystems.co.uk> |
8a1df391 |
1103 | |
a5561f96 |
1104 | Matt S. Trout <mst@shadowcatsystems.co.uk> |
8a1df391 |
1105 | |
fc17c598 |
1106 | Drew Taylor <taylor.andrew.j@gmail.com> |
a5561f96 |
1107 | |
3b4f6e76 |
1108 | =head1 LICENSE |
1109 | |
1110 | This library is free software under the same license as perl itself |
1111 | |
a5561f96 |
1112 | =cut |
1113 | |
e5963c1b |
1114 | 1; |