Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
9af9d0b2 |
3 | |
9a3a62f1 |
4 | # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories |
5 | |
9af9d0b2 |
6 | use autodie; |
7 | use Carp qw( carp croak ); |
0df68524 |
8 | use Log::Contextual::WarnLogger; |
9 | use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({ |
10 | env_prefix => 'DBICDH' |
11 | }); |
12 | use Data::Dumper::Concise; |
9af9d0b2 |
13 | |
2e68a8e1 |
14 | use Method::Signatures::Simple; |
7f50d101 |
15 | use Try::Tiny; |
9af9d0b2 |
16 | |
d23c7c77 |
17 | use SQL::Translator; |
18 | require SQL::Translator::Diff; |
9af9d0b2 |
19 | |
d23c7c77 |
20 | require DBIx::Class::Storage; # loaded for type constraint |
41863428 |
21 | use DBIx::Class::DeploymentHandler::Types; |
22 | |
9af9d0b2 |
23 | use File::Path 'mkpath'; |
24 | use File::Spec::Functions; |
2e68a8e1 |
25 | |
7521a845 |
26 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
27 | |
d54b8d69 |
28 | has schema => ( |
29 | isa => 'DBIx::Class::Schema', |
30 | is => 'ro', |
31 | required => 1, |
d54b8d69 |
32 | ); |
33 | |
334bced5 |
34 | has storage => ( |
35 | isa => 'DBIx::Class::Storage', |
36 | is => 'ro', |
37 | lazy_build => 1, |
38 | ); |
39 | |
2eaf903b |
40 | method _build_storage { |
41 | my $s = $self->schema->storage; |
42 | $s->_determine_driver; |
43 | $s |
44 | } |
45 | |
02a7b8ac |
46 | has sql_translator_args => ( |
334bced5 |
47 | isa => 'HashRef', |
48 | is => 'ro', |
49 | default => sub { {} }, |
50 | ); |
91adde75 |
51 | has script_directory => ( |
334bced5 |
52 | isa => 'Str', |
53 | is => 'ro', |
54 | required => 1, |
55 | default => 'sql', |
56 | ); |
57 | |
334bced5 |
58 | has databases => ( |
59 | coerce => 1, |
60 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
61 | is => 'ro', |
62 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
63 | ); |
64 | |
a7d53deb |
65 | has txn_wrap => ( |
66 | is => 'ro', |
67 | isa => 'Bool', |
68 | default => 1, |
69 | ); |
70 | |
73caa630 |
71 | has schema_version => ( |
72 | is => 'ro', |
e86c0c07 |
73 | isa => 'Str', |
73caa630 |
74 | lazy_build => 1, |
75 | ); |
76 | |
6df6dcb9 |
77 | # this will probably never get called as the DBICDH |
78 | # will be passing down a schema_version normally, which |
79 | # is built the same way, but we leave this in place |
73caa630 |
80 | method _build_schema_version { $self->schema->schema_version } |
81 | |
1f0d0633 |
82 | has _json => ( |
83 | is => 'ro', |
84 | lazy_build => 1, |
85 | ); |
86 | |
87 | sub _build__json { require JSON; JSON->new->pretty } |
88 | |
76d311e7 |
89 | method __ddl_consume_with_prefix($type, $versions, $prefix) { |
91adde75 |
90 | my $base_dir = $self->script_directory; |
262166c1 |
91 | |
76d08d08 |
92 | my $main = catfile( $base_dir, $type ); |
93 | my $generic = catfile( $base_dir, '_generic' ); |
94 | my $common = |
95 | catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
262166c1 |
96 | |
97 | my $dir; |
98 | if (-d $main) { |
76d08d08 |
99 | $dir = catfile($main, $prefix, join q(-), @{$versions}) |
262166c1 |
100 | } elsif (-d $generic) { |
9af9d0b2 |
101 | $dir = catfile($generic, $prefix, join q(-), @{$versions}); |
262166c1 |
102 | } else { |
9af9d0b2 |
103 | croak "neither $main or $generic exist; please write/generate some SQL"; |
262166c1 |
104 | } |
105 | |
106 | opendir my($dh), $dir; |
f36afe83 |
107 | my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; |
262166c1 |
108 | closedir $dh; |
109 | |
110 | if (-d $common) { |
111 | opendir my($dh), $common; |
41219a5d |
112 | for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { |
262166c1 |
113 | unless ($files{$filename}) { |
9af9d0b2 |
114 | $files{$filename} = catfile($common,$filename); |
262166c1 |
115 | } |
116 | } |
117 | closedir $dh; |
118 | } |
119 | |
120 | return [@files{sort keys %files}] |
121 | } |
3c1b5ee8 |
122 | |
fc4b7602 |
123 | method _ddl_preinstall_consume_filenames($type, $version) { |
124 | $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall') |
125 | } |
126 | |
76d311e7 |
127 | method _ddl_schema_consume_filenames($type, $version) { |
128 | $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
129 | } |
130 | |
76d311e7 |
131 | method _ddl_schema_produce_filename($type, $version) { |
91adde75 |
132 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
76d08d08 |
133 | mkpath($dirname) unless -d $dirname; |
d54b8d69 |
134 | |
1f0d0633 |
135 | return catfile( $dirname, '001-auto.sql-json' ); |
d54b8d69 |
136 | } |
137 | |
76d311e7 |
138 | method _ddl_schema_up_consume_filenames($type, $versions) { |
139 | $self->__ddl_consume_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
140 | } |
141 | |
76d311e7 |
142 | method _ddl_schema_down_consume_filenames($type, $versions) { |
143 | $self->__ddl_consume_with_prefix($type, $versions, 'down') |
a41a04e5 |
144 | } |
145 | |
76d311e7 |
146 | method _ddl_schema_up_produce_filename($type, $versions) { |
91adde75 |
147 | my $dir = $self->script_directory; |
76d311e7 |
148 | |
76d08d08 |
149 | my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); |
150 | mkpath($dirname) unless -d $dirname; |
a41a04e5 |
151 | |
1f0d0633 |
152 | return catfile( $dirname, '001-auto.sql-json' ); |
a41a04e5 |
153 | } |
154 | |
76d311e7 |
155 | method _ddl_schema_down_produce_filename($type, $versions, $dir) { |
76d08d08 |
156 | my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); |
157 | mkpath($dirname) unless -d $dirname; |
24f4524b |
158 | |
1f0d0633 |
159 | return catfile( $dirname, '001-auto.sql-json'); |
24f4524b |
160 | } |
161 | |
f36afe83 |
162 | method _run_sql_array($sql) { |
41219a5d |
163 | my $storage = $self->storage; |
5d7b27cf |
164 | |
1f0d0633 |
165 | $sql = [grep { |
166 | $_ && # remove blank lines |
167 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's |
168 | } map { |
169 | s/^\s+//; s/\s+$//; # trim whitespace |
170 | join '', grep { !/^--/ } split /\n/ # remove comments |
171 | } @$sql]; |
172 | |
4d09f712 |
173 | log_trace { '[DBICDH] Running SQL ' . Dumper($sql) }; |
f36afe83 |
174 | foreach my $line (@{$sql}) { |
5d7b27cf |
175 | $storage->_query_start($line); |
176 | try { |
177 | # do a dbh_do cycle here, as we need some error checking in |
178 | # place (even though we will ignore errors) |
179 | $storage->dbh_do (sub { $_[1]->do($line) }); |
180 | } |
181 | catch { |
182 | carp "$_ (running '${line}')" |
183 | } |
184 | $storage->_query_end($line); |
185 | } |
4d09f712 |
186 | return join "\n", @$sql |
f36afe83 |
187 | } |
188 | |
189 | method _run_sql($filename) { |
190 | log_debug { "[DBICDH] Running SQL from $filename" }; |
191 | return $self->_run_sql_array($self->_read_sql_file($filename)); |
5d7b27cf |
192 | } |
2e68a8e1 |
193 | |
5d7b27cf |
194 | method _run_perl($filename) { |
195 | log_debug { "[DBICDH] Running Perl from $filename" }; |
196 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
c8a2f7bd |
197 | |
5d7b27cf |
198 | no warnings 'redefine'; |
199 | my $fn = eval "$filedata"; |
200 | use warnings; |
201 | log_trace { '[DBICDH] Running Perl ' . Dumper($fn) }; |
202 | |
203 | if ($@) { |
204 | carp "$filename failed to compile: $@"; |
205 | } elsif (ref $fn eq 'CODE') { |
206 | $fn->($self->schema) |
207 | } else { |
208 | carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; |
209 | } |
210 | } |
5d7b27cf |
211 | |
1f0d0633 |
212 | method _run_serialized_sql($filename, $type) { |
213 | if (lc $type eq 'json') { |
214 | return $self->_run_sql_array($self->_json->decode( |
215 | do { local( @ARGV, $/ ) = $filename; <> } # slurp |
216 | )) |
217 | } else { |
218 | croak "$type is not one of the supported serialzed types" |
219 | } |
5d7b27cf |
220 | } |
221 | |
222 | method _run_sql_and_perl($filenames) { |
5d7b27cf |
223 | my @files = @{$filenames}; |
224 | my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; |
225 | |
226 | my $sql = ''; |
41219a5d |
227 | for my $filename (@files) { |
228 | if ($filename =~ /\.sql$/) { |
5d7b27cf |
229 | $sql .= $self->_run_sql($filename) |
230 | } elsif ( $filename =~ /\.sql-(\w+)$/ ) { |
231 | $sql .= $self->_run_serialized_sql($filename, $1) |
398b1385 |
232 | } elsif ( $filename =~ /\.pl$/ ) { |
5d7b27cf |
233 | $self->_run_perl($filename) |
41219a5d |
234 | } else { |
fc4b7602 |
235 | croak "A file ($filename) got to deploy that wasn't sql or perl!"; |
2e68a8e1 |
236 | } |
2e68a8e1 |
237 | } |
a7d53deb |
238 | |
239 | $guard->commit if $self->txn_wrap; |
41219a5d |
240 | |
241 | return $sql; |
242 | } |
243 | |
244 | sub deploy { |
245 | my $self = shift; |
be140a5f |
246 | my $version = (shift @_ || {})->{version} || $self->schema_version; |
0df68524 |
247 | log_info { "[DBICDH] deploying version $version" }; |
41219a5d |
248 | |
249 | return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( |
250 | $self->storage->sqlt_type, |
92c34cab |
251 | $version, |
41219a5d |
252 | )); |
2e68a8e1 |
253 | } |
254 | |
80ff6f6d |
255 | sub preinstall { |
9faec51a |
256 | my $self = shift; |
257 | my $args = shift; |
258 | my $version = $args->{version} || $self->schema_version; |
0df68524 |
259 | log_info { "[DBICDH] preinstalling version $version" }; |
9faec51a |
260 | my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; |
fc4b7602 |
261 | |
262 | my @files = @{$self->_ddl_preinstall_consume_filenames( |
9faec51a |
263 | $storage_type, |
fc4b7602 |
264 | $version, |
265 | )}; |
266 | |
267 | for my $filename (@files) { |
268 | # We ignore sql for now (till I figure out what to do with it) |
269 | if ( $filename =~ /^(.+)\.pl$/ ) { |
fc4b7602 |
270 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
fc4b7602 |
271 | |
9faec51a |
272 | no warnings 'redefine'; |
5b5defbc |
273 | my $fn = eval "$filedata"; |
fc4b7602 |
274 | use warnings; |
5b5defbc |
275 | |
9faec51a |
276 | if ($@) { |
3fa64c79 |
277 | carp "$filename failed to compile: $@"; |
9faec51a |
278 | } elsif (ref $fn eq 'CODE') { |
fc4b7602 |
279 | $fn->() |
280 | } else { |
5b5defbc |
281 | carp "$filename should define an anonymous sub but it didn't!"; |
fc4b7602 |
282 | } |
283 | } else { |
284 | croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; |
285 | } |
286 | } |
287 | } |
288 | |
c8a2f7bd |
289 | sub _prepare_install { |
73caa630 |
290 | my $self = shift; |
02a7b8ac |
291 | my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; |
c8a2f7bd |
292 | my $to_file = shift; |
2e68a8e1 |
293 | my $schema = $self->schema; |
294 | my $databases = $self->databases; |
91adde75 |
295 | my $dir = $self->script_directory; |
73caa630 |
296 | my $version = $self->schema_version; |
d54b8d69 |
297 | |
9600776d |
298 | my $sqlt = SQL::Translator->new({ |
1f0d0633 |
299 | no_comments => 1, |
d54b8d69 |
300 | add_drop_table => 1, |
2e68a8e1 |
301 | ignore_constraint_names => 1, |
d54b8d69 |
302 | ignore_index_names => 1, |
303 | parser => 'SQL::Translator::Parser::DBIx::Class', |
3aaf766f |
304 | %{$sqltargs} |
9600776d |
305 | }); |
2e68a8e1 |
306 | |
d53e0bfc |
307 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
308 | or croak($sqlt->error); |
2e68a8e1 |
309 | |
310 | foreach my $db (@$databases) { |
311 | $sqlt->reset; |
312 | $sqlt->{schema} = $sqlt_schema; |
313 | $sqlt->producer($db); |
314 | |
c8a2f7bd |
315 | my $filename = $self->$to_file($db, $version, $dir); |
9600776d |
316 | if (-e $filename ) { |
2e68a8e1 |
317 | carp "Overwriting existing DDL file - $filename"; |
318 | unlink $filename; |
319 | } |
320 | |
1f0d0633 |
321 | my $sql = $self->_generate_final_sql($sqlt); |
322 | if(!$sql) { |
2e68a8e1 |
323 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
324 | next; |
325 | } |
387b11d2 |
326 | open my $file, q(>), $filename; |
1f0d0633 |
327 | print {$file} $sql; |
2e68a8e1 |
328 | close $file; |
329 | } |
330 | } |
331 | |
1f0d0633 |
332 | method _generate_final_sql($sqlt) { |
333 | my @output = $sqlt->translate; |
334 | $self->_json->encode(\@output); |
335 | } |
336 | |
c8a2f7bd |
337 | sub _resultsource_install_filename { |
338 | my ($self, $source_name) = @_; |
339 | return sub { |
340 | my ($self, $type, $version) = @_; |
91adde75 |
341 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
c8a2f7bd |
342 | mkpath($dirname) unless -d $dirname; |
343 | |
1f0d0633 |
344 | return catfile( $dirname, "001-auto-$source_name.sql-json" ); |
c8a2f7bd |
345 | } |
346 | } |
347 | |
348 | sub install_resultsource { |
be140a5f |
349 | my ($self, $args) = @_; |
350 | my $source = $args->{result_source}; |
351 | my $version = $args->{version}; |
0df68524 |
352 | log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" }; |
c8a2f7bd |
353 | my $rs_install_file = |
354 | $self->_resultsource_install_filename($source->source_name); |
355 | |
356 | my $files = [ |
357 | $self->$rs_install_file( |
358 | $self->storage->sqlt_type, |
359 | $version, |
360 | ) |
361 | ]; |
362 | $self->_run_sql_and_perl($files); |
363 | } |
364 | |
365 | sub prepare_resultsource_install { |
366 | my $self = shift; |
be140a5f |
367 | my $source = (shift @_)->{result_source}; |
0df68524 |
368 | log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name }; |
c8a2f7bd |
369 | |
370 | my $filename = $self->_resultsource_install_filename($source->source_name); |
371 | $self->_prepare_install({ |
372 | parser_args => { sources => [$source->source_name], } |
373 | }, $filename); |
374 | } |
375 | |
91557c90 |
376 | sub prepare_deploy { |
0df68524 |
377 | log_info { '[DBICDH] preparing deploy' }; |
c8a2f7bd |
378 | my $self = shift; |
379 | $self->_prepare_install({}, '_ddl_schema_produce_filename'); |
380 | } |
381 | |
a41a04e5 |
382 | sub prepare_upgrade { |
be140a5f |
383 | my ($self, $args) = @_; |
0df68524 |
384 | log_info { |
385 | '[DBICDH] preparing upgrade ' . |
386 | "from $args->{from_version} to $args->{to_version}" |
387 | }; |
be140a5f |
388 | $self->_prepare_changegrade( |
389 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' |
390 | ); |
76d311e7 |
391 | } |
392 | |
393 | sub prepare_downgrade { |
be140a5f |
394 | my ($self, $args) = @_; |
0df68524 |
395 | log_info { |
396 | '[DBICDH] preparing downgrade ' . |
397 | "from $args->{from_version} to $args->{to_version}" |
398 | }; |
be140a5f |
399 | $self->_prepare_changegrade( |
400 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' |
401 | ); |
76d311e7 |
402 | } |
403 | |
404 | method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { |
2e68a8e1 |
405 | my $schema = $self->schema; |
406 | my $databases = $self->databases; |
91adde75 |
407 | my $dir = $self->script_directory; |
02a7b8ac |
408 | my $sqltargs = $self->sql_translator_args; |
2e68a8e1 |
409 | |
73caa630 |
410 | my $schema_version = $self->schema_version; |
2e68a8e1 |
411 | |
412 | $sqltargs = { |
413 | add_drop_table => 1, |
1f0d0633 |
414 | no_comments => 1, |
2e68a8e1 |
415 | ignore_constraint_names => 1, |
416 | ignore_index_names => 1, |
417 | %{$sqltargs} |
418 | }; |
419 | |
420 | my $sqlt = SQL::Translator->new( $sqltargs ); |
421 | |
422 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
d53e0bfc |
423 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
424 | or croak($sqlt->error); |
2e68a8e1 |
425 | |
426 | foreach my $db (@$databases) { |
427 | $sqlt->reset; |
428 | $sqlt->{schema} = $sqlt_schema; |
429 | $sqlt->producer($db); |
430 | |
76d311e7 |
431 | my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); |
2e68a8e1 |
432 | unless(-e $prefilename) { |
433 | carp("No previous schema file found ($prefilename)"); |
434 | next; |
435 | } |
76d311e7 |
436 | my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; |
437 | my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); |
2e68a8e1 |
438 | if(-e $diff_file) { |
76d311e7 |
439 | carp("Overwriting existing $direction-diff file - $diff_file"); |
2e68a8e1 |
440 | unlink $diff_file; |
441 | } |
442 | |
443 | my $source_schema; |
444 | { |
445 | my $t = SQL::Translator->new({ |
446 | %{$sqltargs}, |
447 | debug => 0, |
448 | trace => 0, |
449 | }); |
450 | |
451 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
452 | or croak($t->error); |
2e68a8e1 |
453 | |
1f0d0633 |
454 | my $sql = $self->_default_read_sql_file_as_string($prefilename); |
455 | my $out = $t->translate( \$sql ) |
387b11d2 |
456 | or croak($t->error); |
2e68a8e1 |
457 | |
458 | $source_schema = $t->schema; |
459 | |
460 | $source_schema->name( $prefilename ) |
461 | unless $source_schema->name; |
462 | } |
463 | |
464 | # The "new" style of producers have sane normalization and can support |
465 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
466 | # And we have to diff parsed SQL against parsed SQL. |
467 | my $dest_schema = $sqlt_schema; |
468 | |
469 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
470 | my $t = SQL::Translator->new({ |
471 | %{$sqltargs}, |
472 | debug => 0, |
473 | trace => 0, |
474 | }); |
475 | |
476 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
477 | or croak($t->error); |
2e68a8e1 |
478 | |
76d311e7 |
479 | my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); |
1f0d0633 |
480 | my $sql = $self->_default_read_sql_file_as_string($filename); |
481 | my $out = $t->translate( \$sql ) |
387b11d2 |
482 | or croak($t->error); |
2e68a8e1 |
483 | |
484 | $dest_schema = $t->schema; |
485 | |
486 | $dest_schema->name( $filename ) |
487 | unless $dest_schema->name; |
488 | } |
489 | |
387b11d2 |
490 | open my $file, q(>), $diff_file; |
1f0d0633 |
491 | print {$file} |
492 | $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs); |
2e68a8e1 |
493 | close $file; |
494 | } |
495 | } |
496 | |
1f0d0633 |
497 | method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) { |
498 | $self->_json->encode([ |
499 | SQL::Translator::Diff::schema_diff( |
500 | $source_schema, $db, |
501 | $dest_schema, $db, |
502 | $sqltargs |
503 | ) |
504 | ]) |
505 | } |
506 | |
334bced5 |
507 | method _read_sql_file($file) { |
508 | return unless $file; |
509 | |
aabd4237 |
510 | open my $fh, '<', $file; |
0d19af1d |
511 | my @data = split /;\n/, join '', <$fh>; |
334bced5 |
512 | close $fh; |
513 | |
334bced5 |
514 | return \@data; |
515 | } |
516 | |
1f0d0633 |
517 | method _default_read_sql_file_as_string($file) { |
518 | return join q(), map "$_;\n", @{$self->_json->decode( |
519 | do { local( @ARGV, $/ ) = $file; <> } # slurp |
520 | )}; |
521 | } |
522 | |
7d2a6974 |
523 | sub downgrade_single_step { |
76d311e7 |
524 | my $self = shift; |
be140a5f |
525 | my $version_set = (shift @_)->{version_set}; |
0df68524 |
526 | log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) }; |
41219a5d |
527 | |
528 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( |
76d311e7 |
529 | $self->storage->sqlt_type, |
627581cd |
530 | $version_set, |
41219a5d |
531 | )); |
3249629f |
532 | |
41219a5d |
533 | return ['', $sql]; |
76d311e7 |
534 | } |
535 | |
7d2a6974 |
536 | sub upgrade_single_step { |
7521a845 |
537 | my $self = shift; |
be140a5f |
538 | my $version_set = (shift @_)->{version_set}; |
0df68524 |
539 | log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) }; |
41219a5d |
540 | |
541 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( |
334bced5 |
542 | $self->storage->sqlt_type, |
627581cd |
543 | $version_set, |
41219a5d |
544 | )); |
545 | return ['', $sql]; |
334bced5 |
546 | } |
547 | |
aabd4237 |
548 | __PACKAGE__->meta->make_immutable; |
549 | |
2e68a8e1 |
550 | 1; |
e051bb00 |
551 | |
e52174e3 |
552 | # vim: ts=2 sw=2 expandtab |
553 | |
e051bb00 |
554 | __END__ |
555 | |
bcc72297 |
556 | =head1 DESCRIPTION |
557 | |
558 | This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care of |
559 | generating sql files representing schemata as well as sql files to move from |
560 | one version of a schema to the rest. One of the hallmark features of this |
561 | class is that it allows for multiple sql files for deploy and upgrade, allowing |
562 | developers to fine tune deployment. In addition it also allows for perl files |
563 | to be run at any stage of the process. |
564 | |
565 | For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's |
566 | documented here is extra fun stuff or private methods. |
567 | |
568 | =head1 DIRECTORY LAYOUT |
569 | |
92c34cab |
570 | Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's |
571 | heavily based upon L<DBIx::Migration::Directories>, but has some extensions and |
572 | modifications, so even if you are familiar with it, please read this. I feel |
573 | like the best way to describe the layout is with the following example: |
574 | |
575 | $sql_migration_dir |
576 | |- SQLite |
577 | | |- down |
4f85efc6 |
578 | | | `- 2-1 |
92c34cab |
579 | | | `- 001-auto.sql |
580 | | |- schema |
581 | | | `- 1 |
582 | | | `- 001-auto.sql |
583 | | `- up |
584 | | |- 1-2 |
585 | | | `- 001-auto.sql |
586 | | `- 2-3 |
587 | | `- 001-auto.sql |
588 | |- _common |
589 | | |- down |
4f85efc6 |
590 | | | `- 2-1 |
92c34cab |
591 | | | `- 002-remove-customers.pl |
592 | | `- up |
593 | | `- 1-2 |
594 | | `- 002-generate-customers.pl |
595 | |- _generic |
596 | | |- down |
4f85efc6 |
597 | | | `- 2-1 |
92c34cab |
598 | | | `- 001-auto.sql |
599 | | |- schema |
600 | | | `- 1 |
601 | | | `- 001-auto.sql |
602 | | `- up |
603 | | `- 1-2 |
604 | | |- 001-auto.sql |
605 | | `- 002-create-stored-procedures.sql |
606 | `- MySQL |
607 | |- down |
4f85efc6 |
608 | | `- 2-1 |
92c34cab |
609 | | `- 001-auto.sql |
80ff6f6d |
610 | |- preinstall |
611 | | `- 1 |
612 | | |- 001-create_database.pl |
613 | | `- 002-create_users_and_permissions.pl |
92c34cab |
614 | |- schema |
615 | | `- 1 |
616 | | `- 001-auto.sql |
617 | `- up |
618 | `- 1-2 |
619 | `- 001-auto.sql |
620 | |
621 | So basically, the code |
622 | |
623 | $dm->deploy(1) |
624 | |
625 | on an C<SQLite> database that would simply run |
626 | C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next, |
627 | |
628 | $dm->upgrade_single_step([1,2]) |
629 | |
630 | would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by |
631 | C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. |
632 | |
633 | Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of |
634 | the time it probably should be, since perl scripts will mostly be database |
635 | independent. |
636 | |
637 | C<_generic> exists for when you for some reason are sure that your SQL is |
638 | generic enough to run on all databases. Good luck with that one. |
639 | |
80ff6f6d |
640 | Note that unlike most steps in the process, C<preinstall> will not run SQL, as |
641 | there may not even be an database at preinstall time. It will run perl scripts |
642 | just like the other steps in the process, but nothing is passed to them. |
643 | Until people have used this more it will remain freeform, but a recommended use |
644 | of preinstall is to have it prompt for username and password, and then call the |
645 | appropriate C<< CREATE DATABASE >> commands etc. |
646 | |
92c34cab |
647 | =head1 PERL SCRIPTS |
648 | |
7d0b0f2b |
649 | A perl script for this tool is very simple. It merely needs to contain an |
650 | anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument. |
92c34cab |
651 | A very basic perl script might look like: |
652 | |
653 | #!perl |
654 | |
655 | use strict; |
656 | use warnings; |
657 | |
7d0b0f2b |
658 | sub { |
92c34cab |
659 | my $schema = shift; |
660 | |
661 | $schema->resultset('Users')->create({ |
662 | name => 'root', |
663 | password => 'root', |
664 | }) |
665 | } |
bcc72297 |
666 | |
eb28403b |
667 | =attr schema |
a65184c8 |
668 | |
bcc72297 |
669 | The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database |
670 | and generate the DDL. |
671 | |
eb28403b |
672 | =attr storage |
a65184c8 |
673 | |
bcc72297 |
674 | The L<DBIx::Class::Storage> that is I<actually> used to talk to the database |
675 | and generate the DDL. This is automatically created with L</_build_storage>. |
676 | |
02a7b8ac |
677 | =attr sql_translator_args |
cfc9edf9 |
678 | |
02a7b8ac |
679 | The arguments that get passed to L<SQL::Translator> when it's used. |
a65184c8 |
680 | |
91adde75 |
681 | =attr script_directory |
cfc9edf9 |
682 | |
91adde75 |
683 | The directory (default C<'sql'>) that scripts are stored in |
cfc9edf9 |
684 | |
eb28403b |
685 | =attr databases |
cfc9edf9 |
686 | |
687 | The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to |
688 | generate files for |
689 | |
eb28403b |
690 | =attr txn_wrap |
691 | |
bcc72297 |
692 | Set to true (which is the default) to wrap all upgrades and deploys in a single |
693 | transaction. |
694 | |
73caa630 |
695 | =attr schema_version |
696 | |
697 | The version the schema on your harddrive is at. Defaults to |
698 | C<< $self->schema->schema_version >>. |
699 | |
db223aff |
700 | =begin comment |
701 | |
702 | =head2 __ddl_consume_with_prefix |
a65184c8 |
703 | |
bcc72297 |
704 | $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) |
705 | |
706 | This is the meat of the multi-file upgrade/deploy stuff. It returns a list of |
707 | files in the order that they should be run for a generic "type" of upgrade. |
708 | You should not be calling this in user code. |
709 | |
db223aff |
710 | =head2 _ddl_schema_consume_filenames |
a65184c8 |
711 | |
bcc72297 |
712 | $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
713 | |
714 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
715 | initial deploy. |
716 | |
db223aff |
717 | =head2 _ddl_schema_produce_filename |
a65184c8 |
718 | |
bcc72297 |
719 | $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) |
720 | |
721 | Returns a single file in which an initial schema will be stored. |
722 | |
db223aff |
723 | =head2 _ddl_schema_up_consume_filenames |
a65184c8 |
724 | |
bcc72297 |
725 | $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
726 | |
727 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
728 | upgrade. |
729 | |
db223aff |
730 | =head2 _ddl_schema_down_consume_filenames |
a65184c8 |
731 | |
bcc72297 |
732 | $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
733 | |
734 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a |
735 | downgrade. |
736 | |
db223aff |
737 | =head2 _ddl_schema_up_produce_filenames |
a65184c8 |
738 | |
bcc72297 |
739 | $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
740 | |
741 | Returns a single file in which the sql to upgrade from one schema to another |
742 | will be stored. |
743 | |
db223aff |
744 | =head2 _ddl_schema_down_produce_filename |
bcc72297 |
745 | |
746 | $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
747 | |
748 | Returns a single file in which the sql to downgrade from one schema to another |
749 | will be stored. |
a65184c8 |
750 | |
db223aff |
751 | =head2 _resultsource_install_filename |
a65184c8 |
752 | |
bcc72297 |
753 | my $filename_fn = $dm->_resultsource_install_filename('User'); |
754 | $dm->$filename_fn('SQLite', '1.00') |
755 | |
756 | Returns a function which in turn returns a single filename used to install a |
757 | single resultsource. Weird interface is convenient for me. Deal with it. |
758 | |
db223aff |
759 | =head2 _run_sql_and_perl |
eb28403b |
760 | |
bcc72297 |
761 | $dm->_run_sql_and_perl([qw( list of filenames )]) |
a65184c8 |
762 | |
bcc72297 |
763 | Simply put, this runs the list of files passed to it. If the file ends in |
764 | C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. |
a65184c8 |
765 | |
bcc72297 |
766 | Depending on L</txn_wrap> all of the files run will be wrapped in a single |
767 | transaction. |
eb28403b |
768 | |
db223aff |
769 | =head2 _prepare_install |
a65184c8 |
770 | |
bcc72297 |
771 | $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) |
a65184c8 |
772 | |
bcc72297 |
773 | Generates the sql file for installing the database. First arg is simply |
774 | L<SQL::Translator> args and the second is a coderef that returns the filename |
775 | to store the sql in. |
a65184c8 |
776 | |
db223aff |
777 | =head2 _prepare_changegrade |
bcc72297 |
778 | |
779 | $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') |
a65184c8 |
780 | |
bcc72297 |
781 | Generates the sql file for migrating from one schema version to another. First |
782 | arg is the version to start from, second is the version to go to, third is the |
783 | L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the |
784 | direction of the changegrade, be it 'up' or 'down'. |
a65184c8 |
785 | |
db223aff |
786 | =head2 _read_sql_file |
a65184c8 |
787 | |
bcc72297 |
788 | $dm->_read_sql_file('foo.sql') |
a65184c8 |
789 | |
bcc72297 |
790 | Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments, |
791 | transactions, and blank lines. |
eb28403b |
792 | |
db223aff |
793 | =end comment |