Commit | Line | Data |
e5963c1b |
1 | package DBIx::Class::Fixtures; |
2 | |
3 | use strict; |
4 | use warnings; |
5 | |
6 | use DBIx::Class::Exception; |
7 | use Class::Accessor; |
8 | use Path::Class qw(dir file); |
6116de11 |
9 | use File::Slurp; |
0fc424b7 |
10 | use Config::Any::JSON; |
11 | use Data::Dump::Streamer; |
4fb695f4 |
12 | use Data::Visitor::Callback; |
0fc424b7 |
13 | use File::Path; |
4fb695f4 |
14 | use File::Copy::Recursive qw/dircopy/; |
6116de11 |
15 | use File::Copy qw/move/; |
0fc424b7 |
16 | use Hash::Merge qw( merge ); |
e5963c1b |
17 | use Data::Dumper; |
18 | |
19 | use base qw(Class::Accessor); |
20 | |
9a9a7832 |
21 | __PACKAGE__->mk_accessors(qw(config_dir _inherited_attributes debug schema_class )); |
e5963c1b |
22 | |
23 | =head1 VERSION |
24 | |
25 | Version 1.000 |
26 | |
27 | =cut |
28 | |
29 | our $VERSION = '1.000'; |
30 | |
31 | =head1 NAME |
32 | |
9f96b203 |
33 | DBIx::Class::Fixtures |
34 | |
e5963c1b |
35 | =head1 SYNOPSIS |
36 | |
37 | use DBIx::Class::Fixtures; |
38 | |
39 | ... |
40 | |
41 | my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' }); |
42 | |
43 | $fixtures->dump({ |
44 | config => 'set_config.json', |
45 | schema => $source_dbic_schema, |
46 | directory => '/home/me/app/fixtures' |
47 | }); |
48 | |
49 | $fixtures->populate({ |
50 | directory => '/home/me/app/fixtures', |
51 | ddl => '/home/me/app/sql/ddl.sql', |
52 | connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'] |
53 | }); |
54 | |
55 | =head1 DESCRIPTION |
56 | |
57 | =head1 AUTHOR |
58 | |
6116de11 |
59 | Luke Saunders <luke@shadowcatsystems.co.uk> |
60 | |
e5963c1b |
61 | =head1 CONTRIBUTORS |
62 | |
0fc424b7 |
63 | =head1 METHODS |
64 | |
65 | =head2 new |
e5963c1b |
66 | |
0fc424b7 |
67 | =cut |
e5963c1b |
68 | |
69 | sub new { |
70 | my $class = shift; |
71 | |
72 | my ($params) = @_; |
73 | unless (ref $params eq 'HASH') { |
74 | return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref'); |
75 | } |
76 | |
77 | unless ($params->{config_dir}) { |
78 | return DBIx::Class::Exception->throw('config_dir param not specified'); |
79 | } |
80 | |
81 | my $config_dir = dir($params->{config_dir}); |
82 | unless (-e $params->{config_dir}) { |
83 | return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist'); |
84 | } |
85 | |
86 | my $self = { |
0fc424b7 |
87 | config_dir => $config_dir, |
88 | _inherited_attributes => [qw/datetime_relative might_have rules/], |
89 | debug => $params->{debug} |
e5963c1b |
90 | }; |
91 | |
92 | bless $self, $class; |
93 | |
94 | return $self; |
95 | } |
96 | |
0fc424b7 |
97 | =head2 dump |
98 | |
99 | =cut |
100 | |
101 | sub dump { |
102 | my $self = shift; |
103 | |
104 | my ($params) = @_; |
105 | unless (ref $params eq 'HASH') { |
106 | return DBIx::Class::Exception->throw('first arg to dump must be hash ref'); |
107 | } |
108 | |
109 | foreach my $param (qw/config schema directory/) { |
110 | unless ($params->{$param}) { |
111 | return DBIx::Class::Exception->throw($param . ' param not specified'); |
112 | } |
113 | } |
114 | |
115 | my $config_file = file($self->config_dir, $params->{config}); |
116 | unless (-e $config_file) { |
117 | return DBIx::Class::Exception->throw('config does not exist at ' . $config_file); |
118 | } |
119 | |
120 | my $config = Config::Any::JSON->load($config_file); |
121 | unless ($config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar(@{$config->{sets}})) { |
122 | return DBIx::Class::Exception->throw('config has no sets'); |
123 | } |
124 | |
125 | my $output_dir = dir($params->{directory}); |
126 | unless (-e $output_dir) { |
127 | return DBIx::Class::Exception->throw('output directory does not exist at ' . $output_dir); |
128 | } |
129 | |
130 | my $schema = $params->{schema}; |
131 | |
9f96b203 |
132 | $self->msg("generating fixtures"); |
0fc424b7 |
133 | my $tmp_output_dir = dir($output_dir, '-~dump~-'); |
134 | |
6116de11 |
135 | if (-e $tmp_output_dir) { |
0fc424b7 |
136 | $self->msg("- clearing existing $tmp_output_dir"); |
6116de11 |
137 | $tmp_output_dir->rmtree; |
0fc424b7 |
138 | } |
6116de11 |
139 | $self->msg("- creating $tmp_output_dir"); |
140 | $tmp_output_dir->mkpath; |
0fc424b7 |
141 | |
142 | # write version file (for the potential benefit of populate) |
143 | my $version_file = file($tmp_output_dir, '_dumper_version'); |
144 | write_file($version_file->stringify, $VERSION); |
145 | |
146 | $config->{rules} ||= {}; |
147 | my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; |
148 | my %options = ( is_root => 1 ); |
149 | foreach my $source (@sources) { |
150 | # apply rule to set if specified |
151 | my $rule = $config->{rules}->{$source->{class}}; |
152 | $source = merge( $source, $rule ) if ($rule); |
153 | |
154 | # fetch objects |
155 | my $rs = $schema->resultset($source->{class}); |
156 | $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond}); |
157 | $self->msg("- dumping $source->{class}"); |
158 | my @objects; |
159 | my %source_options = ( set => { %{$config}, %{$source} } ); |
160 | if ($source->{quantity}) { |
161 | $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by}); |
162 | if ($source->{quantity} eq 'all') { |
163 | push (@objects, $rs->all); |
164 | } elsif ($source->{quantity} =~ /^\d+$/) { |
165 | push (@objects, $rs->search({}, { rows => $source->{quantity} })); |
166 | } else { |
167 | DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity}); |
168 | } |
169 | } |
170 | if ($source->{ids}) { |
171 | my @ids = @{$source->{ids}}; |
172 | my @id_objects = grep { $_ } map { $rs->find($_) } @ids; |
173 | push (@objects, @id_objects); |
174 | } |
175 | unless ($source->{quantity} || $source->{ids}) { |
176 | DBIx::Class::Exception->throw('must specify either quantity or ids'); |
177 | } |
178 | |
179 | # dump objects |
180 | foreach my $object (@objects) { |
181 | $source_options{set_dir} = $tmp_output_dir; |
182 | $self->dump_object($object, { %options, %source_options } ); |
183 | next; |
184 | } |
185 | } |
186 | |
187 | foreach my $dir ($output_dir->children) { |
188 | next if ($dir eq $tmp_output_dir); |
189 | $dir->remove || $dir->rmtree; |
190 | } |
191 | |
192 | $self->msg("- moving temp dir to $output_dir"); |
6116de11 |
193 | move($_, dir($output_dir, $_->relative($_->parent)->stringify)) for $tmp_output_dir->children; |
0fc424b7 |
194 | if (-e $output_dir) { |
195 | $self->msg("- clearing tmp dir $tmp_output_dir"); |
196 | # delete existing fixture set |
197 | $tmp_output_dir->remove; |
198 | } |
199 | |
200 | $self->msg("done"); |
201 | |
202 | return 1; |
203 | } |
204 | |
205 | sub dump_object { |
206 | my ($self, $object, $params, $rr_info) = @_; |
207 | my $set = $params->{set}; |
208 | die 'no dir passed to dump_object' unless $params->{set_dir}; |
209 | die 'no object passed to dump_object' unless $object; |
210 | |
211 | my @inherited_attrs = @{$self->_inherited_attributes}; |
212 | |
213 | # write dir and gen filename |
214 | my $source_dir = dir($params->{set_dir}, lc($object->result_source->from)); |
215 | mkdir($source_dir->stringify, 0777); |
216 | my $file = file($source_dir, join('-', map { $object->get_column($_) } sort $object->primary_columns) . '.fix'); |
217 | |
218 | # write file |
219 | my $exists = (-e $file->stringify) ? 1 : 0; |
220 | unless ($exists) { |
221 | $self->msg('-- dumping ' . $file->stringify, 2); |
222 | my %ds = $object->get_columns; |
223 | |
224 | # mess with dates if specified |
225 | if ($set->{datetime_relative}) { |
226 | my $dt; |
227 | if ($set->{datetime_relative} eq 'today') { |
228 | $dt = DateTime->today; |
229 | } else { |
230 | require DateTime::Format::MySQL; |
231 | $dt = DateTime::Format::MySQL->parse_datetime($set->{datetime_relative}); |
232 | } |
233 | |
234 | while (my ($col, $value) = each %ds) { |
235 | my $col_info = $object->result_source->column_info($col); |
236 | |
237 | next unless $value |
238 | && $col_info->{_inflate_info} |
239 | && uc($col_info->{data_type}) eq 'DATETIME'; |
240 | |
241 | $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); |
242 | } |
243 | } |
244 | |
245 | # do the actual dumping |
246 | my $serialized = Dump(\%ds)->Out(); |
247 | write_file($file->stringify, $serialized); |
248 | my $mode = 0777; chmod $mode, $file->stringify; |
249 | } |
250 | |
251 | # dump rels of object |
252 | my $s = $object->result_source; |
253 | unless ($exists) { |
254 | foreach my $name (sort $s->relationships) { |
255 | my $info = $s->relationship_info($name); |
256 | my $r_source = $s->related_source($name); |
257 | # if belongs_to or might_have with might_have param set or has_many with has_many param set then |
258 | if (($info->{attrs}{accessor} eq 'single' && (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))) || $info->{attrs}{accessor} eq 'filter' || ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))) { |
259 | my $related_rs = $object->related_resultset($name); |
260 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
261 | # these parts of the rule only apply to has_many rels |
262 | if ($rule && $info->{attrs}{accessor} eq 'multi') { |
263 | $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond}); |
264 | $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all'); |
265 | $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by}); |
266 | } |
267 | if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) { |
268 | $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} }); |
269 | } |
270 | my %c_params = %{$params}; |
271 | # inherit date param |
272 | my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs; |
273 | $c_params{set} = \%mock_set; |
274 | # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch}); |
275 | $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch}); |
276 | # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch}); |
5eab44a9 |
277 | $self->dump_object($_, \%c_params) foreach $related_rs->all; |
0fc424b7 |
278 | } |
279 | } |
280 | } |
281 | |
282 | return unless $set && $set->{fetch}; |
283 | foreach my $fetch (@{$set->{fetch}}) { |
284 | # inherit date param |
285 | $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs; |
286 | my $related_rs = $object->related_resultset($fetch->{rel}); |
287 | my $rule = $set->{rules}->{$related_rs->result_source->source_name}; |
288 | if ($rule) { |
289 | my $info = $object->result_source->relationship_info($fetch->{rel}); |
290 | if ($info->{attrs}{accessor} eq 'multi') { |
291 | $fetch = merge( $fetch, $rule ); |
292 | } elsif ($rule->{fetch}) { |
293 | $fetch = merge( $fetch, { fetch => $rule->{fetch} } ); |
294 | } |
295 | } |
296 | die "relationship " . $fetch->{rel} . " does not exist for " . $s->source_name unless ($related_rs); |
297 | if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') { |
298 | # if value starts with / assume it's meant to be passed as a scalar ref to dbic |
299 | # ideally this would substitute deeply |
300 | $fetch->{cond} = { map { $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} }; |
301 | } |
302 | $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} }) if ($fetch->{cond}); |
303 | $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} }) if ($fetch->{quantity} && $fetch->{quantity} ne 'all'); |
304 | $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} }) if ($fetch->{order_by}); |
5eab44a9 |
305 | $self->dump_object($_, { %{$params}, set => $fetch }) foreach $related_rs->all; |
0fc424b7 |
306 | } |
307 | } |
308 | |
384c3f0c |
309 | sub _generate_schema { |
310 | my $self = shift; |
311 | my $params = shift || {}; |
384c3f0c |
312 | require DBI; |
313 | $self->msg("\ncreating schema"); |
314 | # die 'must pass version param to generate_schema_from_ddl' unless $params->{version}; |
315 | |
c06f7b96 |
316 | my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema"; |
9a9a7832 |
317 | eval "require $schema_class"; |
318 | die $@ if $@; |
319 | |
4fb695f4 |
320 | my $pre_schema; |
321 | my $connection_details = $params->{connection_details}; |
9a9a7832 |
322 | unless( $pre_schema = $schema_class->connect(@{$connection_details}) ) { |
384c3f0c |
323 | return DBIx::Class::Exception->throw('connection details not valid'); |
324 | } |
4fb695f4 |
325 | my @tables = map { $pre_schema->source($_)->from }$pre_schema->sources; |
326 | my $dbh = $pre_schema->storage->dbh; |
384c3f0c |
327 | |
328 | # clear existing db |
329 | $self->msg("- clearing DB of existing tables"); |
4fb695f4 |
330 | eval { $dbh->do('SET foreign_key_checks=0') }; |
331 | $dbh->do('drop table ' . $_) for (@tables); |
384c3f0c |
332 | |
333 | # import new ddl file to db |
334 | my $ddl_file = $params->{ddl}; |
335 | $self->msg("- deploying schema using $ddl_file"); |
336 | my $fh; |
337 | open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)"); |
338 | my @data = split(/\n/, join('', <$fh>)); |
339 | @data = grep(!/^--/, @data); |
340 | @data = split(/;/, join('', @data)); |
341 | close($fh); |
342 | @data = grep { $_ && $_ !~ /^-- / } @data; |
343 | for (@data) { |
344 | eval { $dbh->do($_) or warn "SQL was:\n $_"}; |
345 | if ($@) { die "SQL was:\n $_\n$@"; } |
346 | } |
384c3f0c |
347 | $self->msg("- finished importing DDL into DB"); |
348 | |
349 | # load schema object from our new DB |
350 | $self->msg("- loading fresh DBIC object from DB"); |
9a9a7832 |
351 | my $schema = $schema_class->connect(@{$connection_details}); |
384c3f0c |
352 | return $schema; |
353 | } |
354 | |
355 | sub populate { |
356 | my $self = shift; |
357 | my ($params) = @_; |
358 | unless (ref $params eq 'HASH') { |
359 | return DBIx::Class::Exception->throw('first arg to populate must be hash ref'); |
360 | } |
361 | |
362 | foreach my $param (qw/directory/) { |
363 | unless ($params->{$param}) { |
364 | return DBIx::Class::Exception->throw($param . ' param not specified'); |
365 | } |
366 | } |
9a9a7832 |
367 | my $fixture_dir = dir(delete $params->{directory}); |
384c3f0c |
368 | unless (-e $fixture_dir) { |
369 | return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir); |
370 | } |
371 | |
372 | my $ddl_file; |
373 | my $dbh; |
374 | if ($params->{ddl} && $params->{connection_details}) { |
9a9a7832 |
375 | $ddl_file = file(delete $params->{ddl}); |
384c3f0c |
376 | unless (-e $ddl_file) { |
377 | return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file); |
378 | } |
379 | unless (ref $params->{connection_details} eq 'ARRAY') { |
380 | return DBIx::Class::Exception->throw('connection details must be an arrayref'); |
381 | } |
382 | } elsif ($params->{schema}) { |
383 | return DBIx::Class::Exception->throw('passing a schema is not supported at the moment'); |
384 | } else { |
385 | return DBIx::Class::Exception->throw('you must set the ddl and connection_details params'); |
386 | } |
387 | |
9a9a7832 |
388 | my $schema = $self->_generate_schema({ ddl => $ddl_file, connection_details => delete $params->{connection_details}, %{$params} }); |
4fb695f4 |
389 | $self->msg("\nimporting fixtures"); |
384c3f0c |
390 | my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); |
391 | |
392 | my $version_file = file($fixture_dir, '_dumper_version'); |
393 | unless (-e $version_file) { |
394 | # return DBIx::Class::Exception->throw('no version file found'); |
395 | } |
396 | |
397 | if (-e $tmp_fixture_dir) { |
398 | $self->msg("- deleting existing temp directory $tmp_fixture_dir"); |
4fb695f4 |
399 | $tmp_fixture_dir->rmtree; |
384c3f0c |
400 | } |
401 | $self->msg("- creating temp dir"); |
4fb695f4 |
402 | dircopy(dir($fixture_dir, $schema->source($_)->from), dir($tmp_fixture_dir, $schema->source($_)->from)) for $schema->sources; |
384c3f0c |
403 | |
4fb695f4 |
404 | eval { $schema->storage->dbh->do('SET foreign_key_checks=0') }; |
384c3f0c |
405 | my $fixup_visitor; |
406 | my %callbacks; |
407 | if ($params->{datetime_relative_to}) { |
408 | $callbacks{'DateTime::Duration'} = sub { |
409 | $params->{datetime_relative_to}->clone->add_duration($_); |
410 | }; |
411 | } else { |
412 | $callbacks{'DateTime::Duration'} = sub { |
413 | DateTime->today->add_duration($_) |
414 | }; |
415 | } |
416 | $callbacks{object} ||= "visit_ref"; |
417 | $fixup_visitor = new Data::Visitor::Callback(%callbacks); |
418 | |
419 | foreach my $source (sort $schema->sources) { |
420 | $self->msg("- adding " . $source); |
421 | my $rs = $schema->resultset($source); |
422 | my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from)); |
423 | next unless (-e $source_dir); |
424 | while (my $file = $source_dir->next) { |
425 | next unless ($file =~ /\.fix$/); |
426 | next if $file->is_dir; |
427 | my $contents = $file->slurp; |
428 | my $HASH1; |
429 | eval($contents); |
430 | $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; |
431 | $rs->find_or_create($HASH1); |
432 | } |
433 | } |
434 | |
435 | $self->msg("- fixtures imported"); |
436 | $self->msg("- cleaning up"); |
437 | $tmp_fixture_dir->rmtree; |
4fb695f4 |
438 | eval { $schema->storage->dbh->do('SET foreign_key_checks=1') }; |
384c3f0c |
439 | } |
440 | |
0fc424b7 |
441 | sub msg { |
442 | my $self = shift; |
443 | my $subject = shift || return; |
9a9a7832 |
444 | my $level = shift || 1; |
445 | |
446 | return unless $self->debug >= $level; |
0fc424b7 |
447 | if (ref $subject) { |
448 | print Dumper($subject); |
449 | } else { |
450 | print $subject . "\n"; |
451 | } |
452 | } |
e5963c1b |
453 | 1; |