Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
9af9d0b2 |
3 | |
9a3a62f1 |
4 | # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories |
5 | |
9af9d0b2 |
6 | use autodie; |
7 | use Carp qw( carp croak ); |
0df68524 |
8 | use Log::Contextual::WarnLogger; |
9 | use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({ |
10 | env_prefix => 'DBICDH' |
11 | }); |
12 | use Data::Dumper::Concise; |
9af9d0b2 |
13 | |
2e68a8e1 |
14 | use Method::Signatures::Simple; |
7f50d101 |
15 | use Try::Tiny; |
9af9d0b2 |
16 | |
d23c7c77 |
17 | use SQL::Translator; |
18 | require SQL::Translator::Diff; |
9af9d0b2 |
19 | |
d23c7c77 |
20 | require DBIx::Class::Storage; # loaded for type constraint |
41863428 |
21 | use DBIx::Class::DeploymentHandler::Types; |
22 | |
9af9d0b2 |
23 | use File::Path 'mkpath'; |
24 | use File::Spec::Functions; |
2e68a8e1 |
25 | |
7521a845 |
26 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
27 | |
d54b8d69 |
28 | has schema => ( |
29 | isa => 'DBIx::Class::Schema', |
30 | is => 'ro', |
31 | required => 1, |
d54b8d69 |
32 | ); |
33 | |
334bced5 |
34 | has storage => ( |
35 | isa => 'DBIx::Class::Storage', |
36 | is => 'ro', |
37 | lazy_build => 1, |
38 | ); |
39 | |
2eaf903b |
40 | method _build_storage { |
41 | my $s = $self->schema->storage; |
42 | $s->_determine_driver; |
43 | $s |
44 | } |
45 | |
02a7b8ac |
46 | has sql_translator_args => ( |
334bced5 |
47 | isa => 'HashRef', |
48 | is => 'ro', |
49 | default => sub { {} }, |
50 | ); |
91adde75 |
51 | has script_directory => ( |
334bced5 |
52 | isa => 'Str', |
53 | is => 'ro', |
54 | required => 1, |
55 | default => 'sql', |
56 | ); |
57 | |
334bced5 |
58 | has databases => ( |
59 | coerce => 1, |
60 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
61 | is => 'ro', |
62 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
63 | ); |
64 | |
a7d53deb |
65 | has txn_wrap => ( |
66 | is => 'ro', |
67 | isa => 'Bool', |
68 | default => 1, |
69 | ); |
70 | |
73caa630 |
71 | has schema_version => ( |
72 | is => 'ro', |
e86c0c07 |
73 | isa => 'Str', |
73caa630 |
74 | lazy_build => 1, |
75 | ); |
76 | |
6df6dcb9 |
77 | # this will probably never get called as the DBICDH |
78 | # will be passing down a schema_version normally, which |
79 | # is built the same way, but we leave this in place |
73caa630 |
80 | method _build_schema_version { $self->schema->schema_version } |
81 | |
1f0d0633 |
82 | has _json => ( |
83 | is => 'ro', |
84 | lazy_build => 1, |
85 | ); |
86 | |
87 | sub _build__json { require JSON; JSON->new->pretty } |
88 | |
76d311e7 |
89 | method __ddl_consume_with_prefix($type, $versions, $prefix) { |
91adde75 |
90 | my $base_dir = $self->script_directory; |
262166c1 |
91 | |
76d08d08 |
92 | my $main = catfile( $base_dir, $type ); |
93 | my $generic = catfile( $base_dir, '_generic' ); |
94 | my $common = |
95 | catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
262166c1 |
96 | |
97 | my $dir; |
98 | if (-d $main) { |
76d08d08 |
99 | $dir = catfile($main, $prefix, join q(-), @{$versions}) |
262166c1 |
100 | } elsif (-d $generic) { |
9af9d0b2 |
101 | $dir = catfile($generic, $prefix, join q(-), @{$versions}); |
262166c1 |
102 | } else { |
9af9d0b2 |
103 | croak "neither $main or $generic exist; please write/generate some SQL"; |
262166c1 |
104 | } |
105 | |
106 | opendir my($dh), $dir; |
f36afe83 |
107 | my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; |
262166c1 |
108 | closedir $dh; |
109 | |
110 | if (-d $common) { |
111 | opendir my($dh), $common; |
41219a5d |
112 | for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { |
262166c1 |
113 | unless ($files{$filename}) { |
9af9d0b2 |
114 | $files{$filename} = catfile($common,$filename); |
262166c1 |
115 | } |
116 | } |
117 | closedir $dh; |
118 | } |
119 | |
120 | return [@files{sort keys %files}] |
121 | } |
3c1b5ee8 |
122 | |
fc4b7602 |
123 | method _ddl_preinstall_consume_filenames($type, $version) { |
124 | $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall') |
125 | } |
126 | |
76d311e7 |
127 | method _ddl_schema_consume_filenames($type, $version) { |
128 | $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
129 | } |
130 | |
76d311e7 |
131 | method _ddl_schema_produce_filename($type, $version) { |
91adde75 |
132 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
76d08d08 |
133 | mkpath($dirname) unless -d $dirname; |
d54b8d69 |
134 | |
1f0d0633 |
135 | return catfile( $dirname, '001-auto.sql-json' ); |
d54b8d69 |
136 | } |
137 | |
76d311e7 |
138 | method _ddl_schema_up_consume_filenames($type, $versions) { |
139 | $self->__ddl_consume_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
140 | } |
141 | |
76d311e7 |
142 | method _ddl_schema_down_consume_filenames($type, $versions) { |
143 | $self->__ddl_consume_with_prefix($type, $versions, 'down') |
a41a04e5 |
144 | } |
145 | |
76d311e7 |
146 | method _ddl_schema_up_produce_filename($type, $versions) { |
91adde75 |
147 | my $dir = $self->script_directory; |
76d311e7 |
148 | |
76d08d08 |
149 | my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); |
150 | mkpath($dirname) unless -d $dirname; |
a41a04e5 |
151 | |
1f0d0633 |
152 | return catfile( $dirname, '001-auto.sql-json' ); |
a41a04e5 |
153 | } |
154 | |
76d311e7 |
155 | method _ddl_schema_down_produce_filename($type, $versions, $dir) { |
76d08d08 |
156 | my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); |
157 | mkpath($dirname) unless -d $dirname; |
24f4524b |
158 | |
1f0d0633 |
159 | return catfile( $dirname, '001-auto.sql-json'); |
24f4524b |
160 | } |
161 | |
f36afe83 |
162 | method _run_sql_array($sql) { |
41219a5d |
163 | my $storage = $self->storage; |
5d7b27cf |
164 | |
1f0d0633 |
165 | $sql = [grep { |
166 | $_ && # remove blank lines |
167 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's |
168 | } map { |
169 | s/^\s+//; s/\s+$//; # trim whitespace |
170 | join '', grep { !/^--/ } split /\n/ # remove comments |
171 | } @$sql]; |
172 | |
4d09f712 |
173 | log_trace { '[DBICDH] Running SQL ' . Dumper($sql) }; |
f36afe83 |
174 | foreach my $line (@{$sql}) { |
5d7b27cf |
175 | $storage->_query_start($line); |
10a62c3d |
176 | # the whole reason we do this is so that we can see the line that was run |
5d7b27cf |
177 | try { |
5d7b27cf |
178 | $storage->dbh_do (sub { $_[1]->do($line) }); |
179 | } |
180 | catch { |
10a62c3d |
181 | die "$_ (running line '$line')" |
5d7b27cf |
182 | } |
183 | $storage->_query_end($line); |
184 | } |
4d09f712 |
185 | return join "\n", @$sql |
f36afe83 |
186 | } |
187 | |
188 | method _run_sql($filename) { |
189 | log_debug { "[DBICDH] Running SQL from $filename" }; |
190 | return $self->_run_sql_array($self->_read_sql_file($filename)); |
5d7b27cf |
191 | } |
2e68a8e1 |
192 | |
5d7b27cf |
193 | method _run_perl($filename) { |
194 | log_debug { "[DBICDH] Running Perl from $filename" }; |
195 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
c8a2f7bd |
196 | |
5d7b27cf |
197 | no warnings 'redefine'; |
198 | my $fn = eval "$filedata"; |
199 | use warnings; |
200 | log_trace { '[DBICDH] Running Perl ' . Dumper($fn) }; |
201 | |
202 | if ($@) { |
203 | carp "$filename failed to compile: $@"; |
204 | } elsif (ref $fn eq 'CODE') { |
205 | $fn->($self->schema) |
206 | } else { |
207 | carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; |
208 | } |
209 | } |
5d7b27cf |
210 | |
1f0d0633 |
211 | method _run_serialized_sql($filename, $type) { |
212 | if (lc $type eq 'json') { |
213 | return $self->_run_sql_array($self->_json->decode( |
214 | do { local( @ARGV, $/ ) = $filename; <> } # slurp |
215 | )) |
216 | } else { |
217 | croak "$type is not one of the supported serialzed types" |
218 | } |
5d7b27cf |
219 | } |
220 | |
221 | method _run_sql_and_perl($filenames) { |
5d7b27cf |
222 | my @files = @{$filenames}; |
223 | my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; |
224 | |
225 | my $sql = ''; |
41219a5d |
226 | for my $filename (@files) { |
227 | if ($filename =~ /\.sql$/) { |
5d7b27cf |
228 | $sql .= $self->_run_sql($filename) |
229 | } elsif ( $filename =~ /\.sql-(\w+)$/ ) { |
230 | $sql .= $self->_run_serialized_sql($filename, $1) |
398b1385 |
231 | } elsif ( $filename =~ /\.pl$/ ) { |
5d7b27cf |
232 | $self->_run_perl($filename) |
41219a5d |
233 | } else { |
fc4b7602 |
234 | croak "A file ($filename) got to deploy that wasn't sql or perl!"; |
2e68a8e1 |
235 | } |
2e68a8e1 |
236 | } |
a7d53deb |
237 | |
238 | $guard->commit if $self->txn_wrap; |
41219a5d |
239 | |
240 | return $sql; |
241 | } |
242 | |
243 | sub deploy { |
244 | my $self = shift; |
be140a5f |
245 | my $version = (shift @_ || {})->{version} || $self->schema_version; |
0df68524 |
246 | log_info { "[DBICDH] deploying version $version" }; |
41219a5d |
247 | |
248 | return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( |
249 | $self->storage->sqlt_type, |
92c34cab |
250 | $version, |
41219a5d |
251 | )); |
2e68a8e1 |
252 | } |
253 | |
80ff6f6d |
254 | sub preinstall { |
9faec51a |
255 | my $self = shift; |
256 | my $args = shift; |
257 | my $version = $args->{version} || $self->schema_version; |
0df68524 |
258 | log_info { "[DBICDH] preinstalling version $version" }; |
9faec51a |
259 | my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; |
fc4b7602 |
260 | |
261 | my @files = @{$self->_ddl_preinstall_consume_filenames( |
9faec51a |
262 | $storage_type, |
fc4b7602 |
263 | $version, |
264 | )}; |
265 | |
266 | for my $filename (@files) { |
267 | # We ignore sql for now (till I figure out what to do with it) |
268 | if ( $filename =~ /^(.+)\.pl$/ ) { |
fc4b7602 |
269 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
fc4b7602 |
270 | |
9faec51a |
271 | no warnings 'redefine'; |
5b5defbc |
272 | my $fn = eval "$filedata"; |
fc4b7602 |
273 | use warnings; |
5b5defbc |
274 | |
9faec51a |
275 | if ($@) { |
3fa64c79 |
276 | carp "$filename failed to compile: $@"; |
9faec51a |
277 | } elsif (ref $fn eq 'CODE') { |
fc4b7602 |
278 | $fn->() |
279 | } else { |
5b5defbc |
280 | carp "$filename should define an anonymous sub but it didn't!"; |
fc4b7602 |
281 | } |
282 | } else { |
283 | croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; |
284 | } |
285 | } |
286 | } |
287 | |
c8a2f7bd |
288 | sub _prepare_install { |
73caa630 |
289 | my $self = shift; |
02a7b8ac |
290 | my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; |
c8a2f7bd |
291 | my $to_file = shift; |
2e68a8e1 |
292 | my $schema = $self->schema; |
293 | my $databases = $self->databases; |
91adde75 |
294 | my $dir = $self->script_directory; |
73caa630 |
295 | my $version = $self->schema_version; |
d54b8d69 |
296 | |
9600776d |
297 | my $sqlt = SQL::Translator->new({ |
1f0d0633 |
298 | no_comments => 1, |
d54b8d69 |
299 | add_drop_table => 1, |
2e68a8e1 |
300 | ignore_constraint_names => 1, |
d54b8d69 |
301 | ignore_index_names => 1, |
302 | parser => 'SQL::Translator::Parser::DBIx::Class', |
3aaf766f |
303 | %{$sqltargs} |
9600776d |
304 | }); |
2e68a8e1 |
305 | |
d53e0bfc |
306 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
307 | or croak($sqlt->error); |
2e68a8e1 |
308 | |
309 | foreach my $db (@$databases) { |
310 | $sqlt->reset; |
311 | $sqlt->{schema} = $sqlt_schema; |
312 | $sqlt->producer($db); |
313 | |
c8a2f7bd |
314 | my $filename = $self->$to_file($db, $version, $dir); |
9600776d |
315 | if (-e $filename ) { |
2e68a8e1 |
316 | carp "Overwriting existing DDL file - $filename"; |
317 | unlink $filename; |
318 | } |
319 | |
1f0d0633 |
320 | my $sql = $self->_generate_final_sql($sqlt); |
321 | if(!$sql) { |
2e68a8e1 |
322 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
323 | next; |
324 | } |
387b11d2 |
325 | open my $file, q(>), $filename; |
1f0d0633 |
326 | print {$file} $sql; |
2e68a8e1 |
327 | close $file; |
328 | } |
329 | } |
330 | |
1f0d0633 |
331 | method _generate_final_sql($sqlt) { |
332 | my @output = $sqlt->translate; |
333 | $self->_json->encode(\@output); |
334 | } |
335 | |
c8a2f7bd |
336 | sub _resultsource_install_filename { |
337 | my ($self, $source_name) = @_; |
338 | return sub { |
339 | my ($self, $type, $version) = @_; |
91adde75 |
340 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
c8a2f7bd |
341 | mkpath($dirname) unless -d $dirname; |
342 | |
1f0d0633 |
343 | return catfile( $dirname, "001-auto-$source_name.sql-json" ); |
c8a2f7bd |
344 | } |
345 | } |
346 | |
347 | sub install_resultsource { |
be140a5f |
348 | my ($self, $args) = @_; |
349 | my $source = $args->{result_source}; |
350 | my $version = $args->{version}; |
0df68524 |
351 | log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" }; |
c8a2f7bd |
352 | my $rs_install_file = |
353 | $self->_resultsource_install_filename($source->source_name); |
354 | |
355 | my $files = [ |
356 | $self->$rs_install_file( |
357 | $self->storage->sqlt_type, |
358 | $version, |
359 | ) |
360 | ]; |
361 | $self->_run_sql_and_perl($files); |
362 | } |
363 | |
364 | sub prepare_resultsource_install { |
365 | my $self = shift; |
be140a5f |
366 | my $source = (shift @_)->{result_source}; |
0df68524 |
367 | log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name }; |
c8a2f7bd |
368 | |
369 | my $filename = $self->_resultsource_install_filename($source->source_name); |
370 | $self->_prepare_install({ |
371 | parser_args => { sources => [$source->source_name], } |
372 | }, $filename); |
373 | } |
374 | |
91557c90 |
375 | sub prepare_deploy { |
0df68524 |
376 | log_info { '[DBICDH] preparing deploy' }; |
c8a2f7bd |
377 | my $self = shift; |
378 | $self->_prepare_install({}, '_ddl_schema_produce_filename'); |
379 | } |
380 | |
a41a04e5 |
381 | sub prepare_upgrade { |
be140a5f |
382 | my ($self, $args) = @_; |
0df68524 |
383 | log_info { |
384 | '[DBICDH] preparing upgrade ' . |
385 | "from $args->{from_version} to $args->{to_version}" |
386 | }; |
be140a5f |
387 | $self->_prepare_changegrade( |
388 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' |
389 | ); |
76d311e7 |
390 | } |
391 | |
392 | sub prepare_downgrade { |
be140a5f |
393 | my ($self, $args) = @_; |
0df68524 |
394 | log_info { |
395 | '[DBICDH] preparing downgrade ' . |
396 | "from $args->{from_version} to $args->{to_version}" |
397 | }; |
be140a5f |
398 | $self->_prepare_changegrade( |
399 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' |
400 | ); |
76d311e7 |
401 | } |
402 | |
403 | method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { |
2e68a8e1 |
404 | my $schema = $self->schema; |
405 | my $databases = $self->databases; |
91adde75 |
406 | my $dir = $self->script_directory; |
02a7b8ac |
407 | my $sqltargs = $self->sql_translator_args; |
2e68a8e1 |
408 | |
73caa630 |
409 | my $schema_version = $self->schema_version; |
2e68a8e1 |
410 | |
411 | $sqltargs = { |
412 | add_drop_table => 1, |
1f0d0633 |
413 | no_comments => 1, |
2e68a8e1 |
414 | ignore_constraint_names => 1, |
415 | ignore_index_names => 1, |
416 | %{$sqltargs} |
417 | }; |
418 | |
419 | my $sqlt = SQL::Translator->new( $sqltargs ); |
420 | |
421 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
d53e0bfc |
422 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
423 | or croak($sqlt->error); |
2e68a8e1 |
424 | |
425 | foreach my $db (@$databases) { |
426 | $sqlt->reset; |
427 | $sqlt->{schema} = $sqlt_schema; |
428 | $sqlt->producer($db); |
429 | |
76d311e7 |
430 | my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); |
2e68a8e1 |
431 | unless(-e $prefilename) { |
432 | carp("No previous schema file found ($prefilename)"); |
433 | next; |
434 | } |
76d311e7 |
435 | my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; |
436 | my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); |
2e68a8e1 |
437 | if(-e $diff_file) { |
76d311e7 |
438 | carp("Overwriting existing $direction-diff file - $diff_file"); |
2e68a8e1 |
439 | unlink $diff_file; |
440 | } |
441 | |
442 | my $source_schema; |
443 | { |
444 | my $t = SQL::Translator->new({ |
445 | %{$sqltargs}, |
446 | debug => 0, |
447 | trace => 0, |
448 | }); |
449 | |
450 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
451 | or croak($t->error); |
2e68a8e1 |
452 | |
1f0d0633 |
453 | my $sql = $self->_default_read_sql_file_as_string($prefilename); |
454 | my $out = $t->translate( \$sql ) |
387b11d2 |
455 | or croak($t->error); |
2e68a8e1 |
456 | |
457 | $source_schema = $t->schema; |
458 | |
459 | $source_schema->name( $prefilename ) |
460 | unless $source_schema->name; |
461 | } |
462 | |
463 | # The "new" style of producers have sane normalization and can support |
464 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
465 | # And we have to diff parsed SQL against parsed SQL. |
466 | my $dest_schema = $sqlt_schema; |
467 | |
468 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
469 | my $t = SQL::Translator->new({ |
470 | %{$sqltargs}, |
471 | debug => 0, |
472 | trace => 0, |
473 | }); |
474 | |
475 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
476 | or croak($t->error); |
2e68a8e1 |
477 | |
76d311e7 |
478 | my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); |
1f0d0633 |
479 | my $sql = $self->_default_read_sql_file_as_string($filename); |
480 | my $out = $t->translate( \$sql ) |
387b11d2 |
481 | or croak($t->error); |
2e68a8e1 |
482 | |
483 | $dest_schema = $t->schema; |
484 | |
485 | $dest_schema->name( $filename ) |
486 | unless $dest_schema->name; |
487 | } |
488 | |
387b11d2 |
489 | open my $file, q(>), $diff_file; |
1f0d0633 |
490 | print {$file} |
491 | $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs); |
2e68a8e1 |
492 | close $file; |
493 | } |
494 | } |
495 | |
1f0d0633 |
496 | method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) { |
497 | $self->_json->encode([ |
498 | SQL::Translator::Diff::schema_diff( |
499 | $source_schema, $db, |
500 | $dest_schema, $db, |
501 | $sqltargs |
502 | ) |
503 | ]) |
504 | } |
505 | |
334bced5 |
506 | method _read_sql_file($file) { |
507 | return unless $file; |
508 | |
aabd4237 |
509 | open my $fh, '<', $file; |
0d19af1d |
510 | my @data = split /;\n/, join '', <$fh>; |
334bced5 |
511 | close $fh; |
512 | |
334bced5 |
513 | return \@data; |
514 | } |
515 | |
1f0d0633 |
516 | method _default_read_sql_file_as_string($file) { |
517 | return join q(), map "$_;\n", @{$self->_json->decode( |
518 | do { local( @ARGV, $/ ) = $file; <> } # slurp |
519 | )}; |
520 | } |
521 | |
7d2a6974 |
522 | sub downgrade_single_step { |
76d311e7 |
523 | my $self = shift; |
be140a5f |
524 | my $version_set = (shift @_)->{version_set}; |
0df68524 |
525 | log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) }; |
41219a5d |
526 | |
527 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( |
76d311e7 |
528 | $self->storage->sqlt_type, |
627581cd |
529 | $version_set, |
41219a5d |
530 | )); |
3249629f |
531 | |
41219a5d |
532 | return ['', $sql]; |
76d311e7 |
533 | } |
534 | |
7d2a6974 |
535 | sub upgrade_single_step { |
7521a845 |
536 | my $self = shift; |
be140a5f |
537 | my $version_set = (shift @_)->{version_set}; |
0df68524 |
538 | log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) }; |
41219a5d |
539 | |
540 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( |
334bced5 |
541 | $self->storage->sqlt_type, |
627581cd |
542 | $version_set, |
41219a5d |
543 | )); |
544 | return ['', $sql]; |
334bced5 |
545 | } |
546 | |
aabd4237 |
547 | __PACKAGE__->meta->make_immutable; |
548 | |
2e68a8e1 |
549 | 1; |
e051bb00 |
550 | |
e52174e3 |
551 | # vim: ts=2 sw=2 expandtab |
552 | |
e051bb00 |
553 | __END__ |
554 | |
bcc72297 |
555 | =head1 DESCRIPTION |
556 | |
0824f31f |
557 | This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes |
558 | care of generating serialized sql files representing schemata as well |
559 | as serialized sql files to move from one version of a schema to the rest. |
560 | One of the hallmark features of this class is that it allows for multiple sql |
561 | files for deploy and upgrade, allowing developers to fine tune deployment. |
562 | In addition it also allows for perl files to be run |
563 | at any stage of the process. |
bcc72297 |
564 | |
565 | For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's |
566 | documented here is extra fun stuff or private methods. |
567 | |
568 | =head1 DIRECTORY LAYOUT |
569 | |
92c34cab |
570 | Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's |
571 | heavily based upon L<DBIx::Migration::Directories>, but has some extensions and |
572 | modifications, so even if you are familiar with it, please read this. I feel |
573 | like the best way to describe the layout is with the following example: |
574 | |
575 | $sql_migration_dir |
576 | |- SQLite |
577 | | |- down |
4f85efc6 |
578 | | | `- 2-1 |
0824f31f |
579 | | | `- 001-auto.sql-json |
92c34cab |
580 | | |- schema |
581 | | | `- 1 |
0824f31f |
582 | | | `- 001-auto.sql-json |
92c34cab |
583 | | `- up |
584 | | |- 1-2 |
0824f31f |
585 | | | `- 001-auto.sql-json |
92c34cab |
586 | | `- 2-3 |
0824f31f |
587 | | `- 001-auto.sql-json |
92c34cab |
588 | |- _common |
589 | | |- down |
4f85efc6 |
590 | | | `- 2-1 |
92c34cab |
591 | | | `- 002-remove-customers.pl |
592 | | `- up |
593 | | `- 1-2 |
594 | | `- 002-generate-customers.pl |
595 | |- _generic |
596 | | |- down |
4f85efc6 |
597 | | | `- 2-1 |
0824f31f |
598 | | | `- 001-auto.sql-json |
92c34cab |
599 | | |- schema |
600 | | | `- 1 |
0824f31f |
601 | | | `- 001-auto.sql-json |
92c34cab |
602 | | `- up |
603 | | `- 1-2 |
0824f31f |
604 | | |- 001-auto.sql-json |
92c34cab |
605 | | `- 002-create-stored-procedures.sql |
606 | `- MySQL |
607 | |- down |
4f85efc6 |
608 | | `- 2-1 |
0824f31f |
609 | | `- 001-auto.sql-json |
80ff6f6d |
610 | |- preinstall |
611 | | `- 1 |
612 | | |- 001-create_database.pl |
613 | | `- 002-create_users_and_permissions.pl |
92c34cab |
614 | |- schema |
615 | | `- 1 |
0824f31f |
616 | | `- 001-auto.sql-json |
92c34cab |
617 | `- up |
618 | `- 1-2 |
0824f31f |
619 | `- 001-auto.sql-json |
92c34cab |
620 | |
621 | So basically, the code |
622 | |
623 | $dm->deploy(1) |
624 | |
625 | on an C<SQLite> database that would simply run |
0824f31f |
626 | C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next, |
92c34cab |
627 | |
628 | $dm->upgrade_single_step([1,2]) |
629 | |
0824f31f |
630 | would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by |
92c34cab |
631 | C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. |
632 | |
0824f31f |
633 | C<.pl> files don't have to be in the C<_common> directory, but most of the time |
634 | they should be, because perl scripts are generally be database independent. |
92c34cab |
635 | |
636 | C<_generic> exists for when you for some reason are sure that your SQL is |
637 | generic enough to run on all databases. Good luck with that one. |
638 | |
80ff6f6d |
639 | Note that unlike most steps in the process, C<preinstall> will not run SQL, as |
640 | there may not even be an database at preinstall time. It will run perl scripts |
641 | just like the other steps in the process, but nothing is passed to them. |
642 | Until people have used this more it will remain freeform, but a recommended use |
643 | of preinstall is to have it prompt for username and password, and then call the |
644 | appropriate C<< CREATE DATABASE >> commands etc. |
645 | |
0824f31f |
646 | =head1 SERIALIZED SQL |
647 | |
648 | The SQL that this module generates and uses is serialized into an array of |
649 | SQL statements. The reason being that some databases handle multiple |
650 | statements in a single execution differently. Generally you do not need to |
651 | worry about this as these are scripts generated for you. If you find that |
652 | you are editing them on a regular basis something is wrong and you either need |
653 | to submit a bug or consider writing extra serialized SQL or Perl scripts to run |
654 | before or after the automatically generated script. |
655 | |
656 | B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in |
657 | patches that will allow more serialization formats if you want that feature, |
658 | but if you do send me a patch for that realize that I do not want to add YAML |
659 | support or whatever, I would rather add a generic method of adding any |
660 | serialization format. |
661 | |
92c34cab |
662 | =head1 PERL SCRIPTS |
663 | |
7d0b0f2b |
664 | A perl script for this tool is very simple. It merely needs to contain an |
665 | anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument. |
92c34cab |
666 | A very basic perl script might look like: |
667 | |
668 | #!perl |
669 | |
670 | use strict; |
671 | use warnings; |
672 | |
7d0b0f2b |
673 | sub { |
92c34cab |
674 | my $schema = shift; |
675 | |
676 | $schema->resultset('Users')->create({ |
677 | name => 'root', |
678 | password => 'root', |
679 | }) |
680 | } |
bcc72297 |
681 | |
eb28403b |
682 | =attr schema |
a65184c8 |
683 | |
bcc72297 |
684 | The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database |
685 | and generate the DDL. |
686 | |
eb28403b |
687 | =attr storage |
a65184c8 |
688 | |
bcc72297 |
689 | The L<DBIx::Class::Storage> that is I<actually> used to talk to the database |
690 | and generate the DDL. This is automatically created with L</_build_storage>. |
691 | |
02a7b8ac |
692 | =attr sql_translator_args |
cfc9edf9 |
693 | |
02a7b8ac |
694 | The arguments that get passed to L<SQL::Translator> when it's used. |
a65184c8 |
695 | |
91adde75 |
696 | =attr script_directory |
cfc9edf9 |
697 | |
91adde75 |
698 | The directory (default C<'sql'>) that scripts are stored in |
cfc9edf9 |
699 | |
eb28403b |
700 | =attr databases |
cfc9edf9 |
701 | |
702 | The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to |
703 | generate files for |
704 | |
eb28403b |
705 | =attr txn_wrap |
706 | |
bcc72297 |
707 | Set to true (which is the default) to wrap all upgrades and deploys in a single |
708 | transaction. |
709 | |
73caa630 |
710 | =attr schema_version |
711 | |
712 | The version the schema on your harddrive is at. Defaults to |
713 | C<< $self->schema->schema_version >>. |
714 | |
db223aff |
715 | =begin comment |
716 | |
717 | =head2 __ddl_consume_with_prefix |
a65184c8 |
718 | |
bcc72297 |
719 | $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) |
720 | |
721 | This is the meat of the multi-file upgrade/deploy stuff. It returns a list of |
722 | files in the order that they should be run for a generic "type" of upgrade. |
723 | You should not be calling this in user code. |
724 | |
db223aff |
725 | =head2 _ddl_schema_consume_filenames |
a65184c8 |
726 | |
bcc72297 |
727 | $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
728 | |
729 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
730 | initial deploy. |
731 | |
db223aff |
732 | =head2 _ddl_schema_produce_filename |
a65184c8 |
733 | |
bcc72297 |
734 | $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) |
735 | |
736 | Returns a single file in which an initial schema will be stored. |
737 | |
db223aff |
738 | =head2 _ddl_schema_up_consume_filenames |
a65184c8 |
739 | |
bcc72297 |
740 | $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
741 | |
742 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
743 | upgrade. |
744 | |
db223aff |
745 | =head2 _ddl_schema_down_consume_filenames |
a65184c8 |
746 | |
bcc72297 |
747 | $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
748 | |
749 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a |
750 | downgrade. |
751 | |
db223aff |
752 | =head2 _ddl_schema_up_produce_filenames |
a65184c8 |
753 | |
bcc72297 |
754 | $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
755 | |
756 | Returns a single file in which the sql to upgrade from one schema to another |
757 | will be stored. |
758 | |
db223aff |
759 | =head2 _ddl_schema_down_produce_filename |
bcc72297 |
760 | |
761 | $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
762 | |
763 | Returns a single file in which the sql to downgrade from one schema to another |
764 | will be stored. |
a65184c8 |
765 | |
db223aff |
766 | =head2 _resultsource_install_filename |
a65184c8 |
767 | |
bcc72297 |
768 | my $filename_fn = $dm->_resultsource_install_filename('User'); |
769 | $dm->$filename_fn('SQLite', '1.00') |
770 | |
771 | Returns a function which in turn returns a single filename used to install a |
772 | single resultsource. Weird interface is convenient for me. Deal with it. |
773 | |
db223aff |
774 | =head2 _run_sql_and_perl |
eb28403b |
775 | |
bcc72297 |
776 | $dm->_run_sql_and_perl([qw( list of filenames )]) |
a65184c8 |
777 | |
bcc72297 |
778 | Simply put, this runs the list of files passed to it. If the file ends in |
779 | C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. |
a65184c8 |
780 | |
bcc72297 |
781 | Depending on L</txn_wrap> all of the files run will be wrapped in a single |
782 | transaction. |
eb28403b |
783 | |
db223aff |
784 | =head2 _prepare_install |
a65184c8 |
785 | |
bcc72297 |
786 | $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) |
a65184c8 |
787 | |
bcc72297 |
788 | Generates the sql file for installing the database. First arg is simply |
789 | L<SQL::Translator> args and the second is a coderef that returns the filename |
790 | to store the sql in. |
a65184c8 |
791 | |
db223aff |
792 | =head2 _prepare_changegrade |
bcc72297 |
793 | |
794 | $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') |
a65184c8 |
795 | |
bcc72297 |
796 | Generates the sql file for migrating from one schema version to another. First |
797 | arg is the version to start from, second is the version to go to, third is the |
798 | L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the |
799 | direction of the changegrade, be it 'up' or 'down'. |
a65184c8 |
800 | |
db223aff |
801 | =head2 _read_sql_file |
a65184c8 |
802 | |
bcc72297 |
803 | $dm->_read_sql_file('foo.sql') |
a65184c8 |
804 | |
bcc72297 |
805 | Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments, |
806 | transactions, and blank lines. |
eb28403b |
807 | |
db223aff |
808 | =end comment |