Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
9af9d0b2 |
3 | |
9a3a62f1 |
4 | # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories |
5 | |
9af9d0b2 |
6 | use autodie; |
7 | use Carp qw( carp croak ); |
8465e767 |
8 | use DBIx::Class::DeploymentHandler::Logger; |
9 | use Log::Contextual qw(:log :dlog), -default_logger => |
10 | DBIx::Class::DeploymentHandler::Logger->new({ |
11 | env_prefix => 'DBICDH' |
12 | }); |
9af9d0b2 |
13 | |
2e68a8e1 |
14 | use Method::Signatures::Simple; |
7f50d101 |
15 | use Try::Tiny; |
9af9d0b2 |
16 | |
d23c7c77 |
17 | use SQL::Translator; |
18 | require SQL::Translator::Diff; |
9af9d0b2 |
19 | |
d23c7c77 |
20 | require DBIx::Class::Storage; # loaded for type constraint |
41863428 |
21 | use DBIx::Class::DeploymentHandler::Types; |
22 | |
9af9d0b2 |
23 | use File::Path 'mkpath'; |
24 | use File::Spec::Functions; |
2e68a8e1 |
25 | |
7521a845 |
26 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
27 | |
d54b8d69 |
28 | has schema => ( |
29 | isa => 'DBIx::Class::Schema', |
30 | is => 'ro', |
31 | required => 1, |
d54b8d69 |
32 | ); |
33 | |
334bced5 |
34 | has storage => ( |
35 | isa => 'DBIx::Class::Storage', |
36 | is => 'ro', |
37 | lazy_build => 1, |
38 | ); |
39 | |
2eaf903b |
40 | method _build_storage { |
41 | my $s = $self->schema->storage; |
42 | $s->_determine_driver; |
43 | $s |
44 | } |
45 | |
02a7b8ac |
46 | has sql_translator_args => ( |
334bced5 |
47 | isa => 'HashRef', |
48 | is => 'ro', |
49 | default => sub { {} }, |
50 | ); |
91adde75 |
51 | has script_directory => ( |
334bced5 |
52 | isa => 'Str', |
53 | is => 'ro', |
54 | required => 1, |
55 | default => 'sql', |
56 | ); |
57 | |
334bced5 |
58 | has databases => ( |
59 | coerce => 1, |
60 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
61 | is => 'ro', |
62 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
63 | ); |
64 | |
a7d53deb |
65 | has txn_wrap => ( |
66 | is => 'ro', |
67 | isa => 'Bool', |
68 | default => 1, |
69 | ); |
70 | |
73caa630 |
71 | has schema_version => ( |
72 | is => 'ro', |
e86c0c07 |
73 | isa => 'Str', |
73caa630 |
74 | lazy_build => 1, |
75 | ); |
76 | |
6df6dcb9 |
77 | # this will probably never get called as the DBICDH |
78 | # will be passing down a schema_version normally, which |
79 | # is built the same way, but we leave this in place |
73caa630 |
80 | method _build_schema_version { $self->schema->schema_version } |
81 | |
1f0d0633 |
82 | has _json => ( |
83 | is => 'ro', |
84 | lazy_build => 1, |
85 | ); |
86 | |
87 | sub _build__json { require JSON; JSON->new->pretty } |
88 | |
76d311e7 |
89 | method __ddl_consume_with_prefix($type, $versions, $prefix) { |
91adde75 |
90 | my $base_dir = $self->script_directory; |
262166c1 |
91 | |
76d08d08 |
92 | my $main = catfile( $base_dir, $type ); |
93 | my $generic = catfile( $base_dir, '_generic' ); |
94 | my $common = |
95 | catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
262166c1 |
96 | |
97 | my $dir; |
98 | if (-d $main) { |
76d08d08 |
99 | $dir = catfile($main, $prefix, join q(-), @{$versions}) |
262166c1 |
100 | } elsif (-d $generic) { |
9af9d0b2 |
101 | $dir = catfile($generic, $prefix, join q(-), @{$versions}); |
262166c1 |
102 | } else { |
9af9d0b2 |
103 | croak "neither $main or $generic exist; please write/generate some SQL"; |
262166c1 |
104 | } |
105 | |
106 | opendir my($dh), $dir; |
f36afe83 |
107 | my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; |
262166c1 |
108 | closedir $dh; |
109 | |
110 | if (-d $common) { |
111 | opendir my($dh), $common; |
41219a5d |
112 | for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { |
262166c1 |
113 | unless ($files{$filename}) { |
9af9d0b2 |
114 | $files{$filename} = catfile($common,$filename); |
262166c1 |
115 | } |
116 | } |
117 | closedir $dh; |
118 | } |
119 | |
120 | return [@files{sort keys %files}] |
121 | } |
3c1b5ee8 |
122 | |
fc4b7602 |
123 | method _ddl_preinstall_consume_filenames($type, $version) { |
124 | $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall') |
125 | } |
126 | |
76d311e7 |
127 | method _ddl_schema_consume_filenames($type, $version) { |
128 | $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
129 | } |
130 | |
76d311e7 |
131 | method _ddl_schema_produce_filename($type, $version) { |
91adde75 |
132 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
76d08d08 |
133 | mkpath($dirname) unless -d $dirname; |
d54b8d69 |
134 | |
1f0d0633 |
135 | return catfile( $dirname, '001-auto.sql-json' ); |
d54b8d69 |
136 | } |
137 | |
76d311e7 |
138 | method _ddl_schema_up_consume_filenames($type, $versions) { |
139 | $self->__ddl_consume_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
140 | } |
141 | |
76d311e7 |
142 | method _ddl_schema_down_consume_filenames($type, $versions) { |
143 | $self->__ddl_consume_with_prefix($type, $versions, 'down') |
a41a04e5 |
144 | } |
145 | |
76d311e7 |
146 | method _ddl_schema_up_produce_filename($type, $versions) { |
91adde75 |
147 | my $dir = $self->script_directory; |
76d311e7 |
148 | |
76d08d08 |
149 | my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); |
150 | mkpath($dirname) unless -d $dirname; |
a41a04e5 |
151 | |
1f0d0633 |
152 | return catfile( $dirname, '001-auto.sql-json' ); |
a41a04e5 |
153 | } |
154 | |
76d311e7 |
155 | method _ddl_schema_down_produce_filename($type, $versions, $dir) { |
76d08d08 |
156 | my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); |
157 | mkpath($dirname) unless -d $dirname; |
24f4524b |
158 | |
1f0d0633 |
159 | return catfile( $dirname, '001-auto.sql-json'); |
24f4524b |
160 | } |
161 | |
f36afe83 |
162 | method _run_sql_array($sql) { |
41219a5d |
163 | my $storage = $self->storage; |
5d7b27cf |
164 | |
1f0d0633 |
165 | $sql = [grep { |
166 | $_ && # remove blank lines |
167 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's |
168 | } map { |
169 | s/^\s+//; s/\s+$//; # trim whitespace |
170 | join '', grep { !/^--/ } split /\n/ # remove comments |
171 | } @$sql]; |
172 | |
f4075791 |
173 | Dlog_trace { "Running SQL $_" } $sql; |
f36afe83 |
174 | foreach my $line (@{$sql}) { |
5d7b27cf |
175 | $storage->_query_start($line); |
10a62c3d |
176 | # the whole reason we do this is so that we can see the line that was run |
5d7b27cf |
177 | try { |
5d7b27cf |
178 | $storage->dbh_do (sub { $_[1]->do($line) }); |
179 | } |
180 | catch { |
10a62c3d |
181 | die "$_ (running line '$line')" |
5d7b27cf |
182 | } |
183 | $storage->_query_end($line); |
184 | } |
4d09f712 |
185 | return join "\n", @$sql |
f36afe83 |
186 | } |
187 | |
188 | method _run_sql($filename) { |
f4075791 |
189 | log_debug { "Running SQL from $filename" }; |
f36afe83 |
190 | return $self->_run_sql_array($self->_read_sql_file($filename)); |
5d7b27cf |
191 | } |
2e68a8e1 |
192 | |
5d7b27cf |
193 | method _run_perl($filename) { |
f4075791 |
194 | log_debug { "Running Perl from $filename" }; |
5d7b27cf |
195 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
c8a2f7bd |
196 | |
5d7b27cf |
197 | no warnings 'redefine'; |
198 | my $fn = eval "$filedata"; |
199 | use warnings; |
f4075791 |
200 | Dlog_trace { "Running Perl $_" } $fn; |
5d7b27cf |
201 | |
202 | if ($@) { |
203 | carp "$filename failed to compile: $@"; |
204 | } elsif (ref $fn eq 'CODE') { |
205 | $fn->($self->schema) |
206 | } else { |
207 | carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; |
208 | } |
209 | } |
5d7b27cf |
210 | |
1f0d0633 |
211 | method _run_serialized_sql($filename, $type) { |
212 | if (lc $type eq 'json') { |
213 | return $self->_run_sql_array($self->_json->decode( |
214 | do { local( @ARGV, $/ ) = $filename; <> } # slurp |
215 | )) |
216 | } else { |
217 | croak "$type is not one of the supported serialzed types" |
218 | } |
5d7b27cf |
219 | } |
220 | |
221 | method _run_sql_and_perl($filenames) { |
5d7b27cf |
222 | my @files = @{$filenames}; |
223 | my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; |
224 | |
225 | my $sql = ''; |
41219a5d |
226 | for my $filename (@files) { |
227 | if ($filename =~ /\.sql$/) { |
5d7b27cf |
228 | $sql .= $self->_run_sql($filename) |
229 | } elsif ( $filename =~ /\.sql-(\w+)$/ ) { |
230 | $sql .= $self->_run_serialized_sql($filename, $1) |
398b1385 |
231 | } elsif ( $filename =~ /\.pl$/ ) { |
5d7b27cf |
232 | $self->_run_perl($filename) |
41219a5d |
233 | } else { |
fc4b7602 |
234 | croak "A file ($filename) got to deploy that wasn't sql or perl!"; |
2e68a8e1 |
235 | } |
2e68a8e1 |
236 | } |
a7d53deb |
237 | |
238 | $guard->commit if $self->txn_wrap; |
41219a5d |
239 | |
240 | return $sql; |
241 | } |
242 | |
243 | sub deploy { |
244 | my $self = shift; |
be140a5f |
245 | my $version = (shift @_ || {})->{version} || $self->schema_version; |
f4075791 |
246 | log_info { "deploying version $version" }; |
41219a5d |
247 | |
248 | return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( |
249 | $self->storage->sqlt_type, |
92c34cab |
250 | $version, |
41219a5d |
251 | )); |
2e68a8e1 |
252 | } |
253 | |
80ff6f6d |
254 | sub preinstall { |
9faec51a |
255 | my $self = shift; |
256 | my $args = shift; |
257 | my $version = $args->{version} || $self->schema_version; |
f4075791 |
258 | log_info { "preinstalling version $version" }; |
9faec51a |
259 | my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; |
fc4b7602 |
260 | |
261 | my @files = @{$self->_ddl_preinstall_consume_filenames( |
9faec51a |
262 | $storage_type, |
fc4b7602 |
263 | $version, |
264 | )}; |
265 | |
266 | for my $filename (@files) { |
267 | # We ignore sql for now (till I figure out what to do with it) |
268 | if ( $filename =~ /^(.+)\.pl$/ ) { |
fc4b7602 |
269 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
fc4b7602 |
270 | |
9faec51a |
271 | no warnings 'redefine'; |
5b5defbc |
272 | my $fn = eval "$filedata"; |
fc4b7602 |
273 | use warnings; |
5b5defbc |
274 | |
9faec51a |
275 | if ($@) { |
3fa64c79 |
276 | carp "$filename failed to compile: $@"; |
9faec51a |
277 | } elsif (ref $fn eq 'CODE') { |
fc4b7602 |
278 | $fn->() |
279 | } else { |
5b5defbc |
280 | carp "$filename should define an anonymous sub but it didn't!"; |
fc4b7602 |
281 | } |
282 | } else { |
283 | croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; |
284 | } |
285 | } |
286 | } |
287 | |
c8a2f7bd |
288 | sub _prepare_install { |
73caa630 |
289 | my $self = shift; |
02a7b8ac |
290 | my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; |
c8a2f7bd |
291 | my $to_file = shift; |
2e68a8e1 |
292 | my $schema = $self->schema; |
293 | my $databases = $self->databases; |
91adde75 |
294 | my $dir = $self->script_directory; |
73caa630 |
295 | my $version = $self->schema_version; |
d54b8d69 |
296 | |
9600776d |
297 | my $sqlt = SQL::Translator->new({ |
1f0d0633 |
298 | no_comments => 1, |
d54b8d69 |
299 | add_drop_table => 1, |
2e68a8e1 |
300 | ignore_constraint_names => 1, |
d54b8d69 |
301 | ignore_index_names => 1, |
302 | parser => 'SQL::Translator::Parser::DBIx::Class', |
3aaf766f |
303 | %{$sqltargs} |
9600776d |
304 | }); |
2e68a8e1 |
305 | |
d53e0bfc |
306 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
307 | or croak($sqlt->error); |
2e68a8e1 |
308 | |
309 | foreach my $db (@$databases) { |
310 | $sqlt->reset; |
311 | $sqlt->{schema} = $sqlt_schema; |
312 | $sqlt->producer($db); |
313 | |
c8a2f7bd |
314 | my $filename = $self->$to_file($db, $version, $dir); |
9600776d |
315 | if (-e $filename ) { |
2e68a8e1 |
316 | carp "Overwriting existing DDL file - $filename"; |
317 | unlink $filename; |
318 | } |
319 | |
1f0d0633 |
320 | my $sql = $self->_generate_final_sql($sqlt); |
321 | if(!$sql) { |
2e68a8e1 |
322 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
323 | next; |
324 | } |
387b11d2 |
325 | open my $file, q(>), $filename; |
1f0d0633 |
326 | print {$file} $sql; |
2e68a8e1 |
327 | close $file; |
328 | } |
329 | } |
330 | |
1f0d0633 |
331 | method _generate_final_sql($sqlt) { |
332 | my @output = $sqlt->translate; |
333 | $self->_json->encode(\@output); |
334 | } |
335 | |
c8a2f7bd |
336 | sub _resultsource_install_filename { |
337 | my ($self, $source_name) = @_; |
338 | return sub { |
339 | my ($self, $type, $version) = @_; |
91adde75 |
340 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
c8a2f7bd |
341 | mkpath($dirname) unless -d $dirname; |
342 | |
1f0d0633 |
343 | return catfile( $dirname, "001-auto-$source_name.sql-json" ); |
c8a2f7bd |
344 | } |
345 | } |
346 | |
347 | sub install_resultsource { |
be140a5f |
348 | my ($self, $args) = @_; |
349 | my $source = $args->{result_source}; |
350 | my $version = $args->{version}; |
f4075791 |
351 | log_info { 'installing_resultsource ' . $source->source_name . ", version $version" }; |
c8a2f7bd |
352 | my $rs_install_file = |
353 | $self->_resultsource_install_filename($source->source_name); |
354 | |
355 | my $files = [ |
356 | $self->$rs_install_file( |
357 | $self->storage->sqlt_type, |
358 | $version, |
359 | ) |
360 | ]; |
361 | $self->_run_sql_and_perl($files); |
362 | } |
363 | |
364 | sub prepare_resultsource_install { |
365 | my $self = shift; |
be140a5f |
366 | my $source = (shift @_)->{result_source}; |
f4075791 |
367 | log_info { 'preparing install for resultsource ' . $source->source_name }; |
c8a2f7bd |
368 | |
369 | my $filename = $self->_resultsource_install_filename($source->source_name); |
370 | $self->_prepare_install({ |
371 | parser_args => { sources => [$source->source_name], } |
372 | }, $filename); |
373 | } |
374 | |
91557c90 |
375 | sub prepare_deploy { |
f4075791 |
376 | log_info { 'preparing deploy' }; |
c8a2f7bd |
377 | my $self = shift; |
378 | $self->_prepare_install({}, '_ddl_schema_produce_filename'); |
379 | } |
380 | |
a41a04e5 |
381 | sub prepare_upgrade { |
be140a5f |
382 | my ($self, $args) = @_; |
0df68524 |
383 | log_info { |
f4075791 |
384 | "preparing upgrade from $args->{from_version} to $args->{to_version}" |
0df68524 |
385 | }; |
be140a5f |
386 | $self->_prepare_changegrade( |
387 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' |
388 | ); |
76d311e7 |
389 | } |
390 | |
391 | sub prepare_downgrade { |
be140a5f |
392 | my ($self, $args) = @_; |
0df68524 |
393 | log_info { |
f4075791 |
394 | "preparing downgrade from $args->{from_version} to $args->{to_version}" |
0df68524 |
395 | }; |
be140a5f |
396 | $self->_prepare_changegrade( |
397 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' |
398 | ); |
76d311e7 |
399 | } |
400 | |
401 | method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { |
2e68a8e1 |
402 | my $schema = $self->schema; |
403 | my $databases = $self->databases; |
91adde75 |
404 | my $dir = $self->script_directory; |
02a7b8ac |
405 | my $sqltargs = $self->sql_translator_args; |
2e68a8e1 |
406 | |
73caa630 |
407 | my $schema_version = $self->schema_version; |
2e68a8e1 |
408 | |
409 | $sqltargs = { |
410 | add_drop_table => 1, |
1f0d0633 |
411 | no_comments => 1, |
2e68a8e1 |
412 | ignore_constraint_names => 1, |
413 | ignore_index_names => 1, |
414 | %{$sqltargs} |
415 | }; |
416 | |
417 | my $sqlt = SQL::Translator->new( $sqltargs ); |
418 | |
419 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
d53e0bfc |
420 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
421 | or croak($sqlt->error); |
2e68a8e1 |
422 | |
423 | foreach my $db (@$databases) { |
424 | $sqlt->reset; |
425 | $sqlt->{schema} = $sqlt_schema; |
426 | $sqlt->producer($db); |
427 | |
76d311e7 |
428 | my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); |
2e68a8e1 |
429 | unless(-e $prefilename) { |
430 | carp("No previous schema file found ($prefilename)"); |
431 | next; |
432 | } |
76d311e7 |
433 | my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; |
434 | my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); |
2e68a8e1 |
435 | if(-e $diff_file) { |
76d311e7 |
436 | carp("Overwriting existing $direction-diff file - $diff_file"); |
2e68a8e1 |
437 | unlink $diff_file; |
438 | } |
439 | |
440 | my $source_schema; |
441 | { |
442 | my $t = SQL::Translator->new({ |
443 | %{$sqltargs}, |
444 | debug => 0, |
445 | trace => 0, |
446 | }); |
447 | |
448 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
449 | or croak($t->error); |
2e68a8e1 |
450 | |
1f0d0633 |
451 | my $sql = $self->_default_read_sql_file_as_string($prefilename); |
452 | my $out = $t->translate( \$sql ) |
387b11d2 |
453 | or croak($t->error); |
2e68a8e1 |
454 | |
455 | $source_schema = $t->schema; |
456 | |
457 | $source_schema->name( $prefilename ) |
458 | unless $source_schema->name; |
459 | } |
460 | |
461 | # The "new" style of producers have sane normalization and can support |
462 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
463 | # And we have to diff parsed SQL against parsed SQL. |
464 | my $dest_schema = $sqlt_schema; |
465 | |
466 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
467 | my $t = SQL::Translator->new({ |
468 | %{$sqltargs}, |
469 | debug => 0, |
470 | trace => 0, |
471 | }); |
472 | |
473 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
474 | or croak($t->error); |
2e68a8e1 |
475 | |
76d311e7 |
476 | my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); |
1f0d0633 |
477 | my $sql = $self->_default_read_sql_file_as_string($filename); |
478 | my $out = $t->translate( \$sql ) |
387b11d2 |
479 | or croak($t->error); |
2e68a8e1 |
480 | |
481 | $dest_schema = $t->schema; |
482 | |
483 | $dest_schema->name( $filename ) |
484 | unless $dest_schema->name; |
485 | } |
486 | |
387b11d2 |
487 | open my $file, q(>), $diff_file; |
1f0d0633 |
488 | print {$file} |
489 | $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs); |
2e68a8e1 |
490 | close $file; |
491 | } |
492 | } |
493 | |
1f0d0633 |
494 | method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) { |
495 | $self->_json->encode([ |
496 | SQL::Translator::Diff::schema_diff( |
497 | $source_schema, $db, |
498 | $dest_schema, $db, |
499 | $sqltargs |
500 | ) |
501 | ]) |
502 | } |
503 | |
334bced5 |
504 | method _read_sql_file($file) { |
505 | return unless $file; |
506 | |
aabd4237 |
507 | open my $fh, '<', $file; |
0d19af1d |
508 | my @data = split /;\n/, join '', <$fh>; |
334bced5 |
509 | close $fh; |
510 | |
334bced5 |
511 | return \@data; |
512 | } |
513 | |
1f0d0633 |
514 | method _default_read_sql_file_as_string($file) { |
515 | return join q(), map "$_;\n", @{$self->_json->decode( |
516 | do { local( @ARGV, $/ ) = $file; <> } # slurp |
517 | )}; |
518 | } |
519 | |
7d2a6974 |
520 | sub downgrade_single_step { |
76d311e7 |
521 | my $self = shift; |
be140a5f |
522 | my $version_set = (shift @_)->{version_set}; |
f4075791 |
523 | Dlog_info { "downgrade_single_step'ing $_" } $version_set; |
41219a5d |
524 | |
525 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( |
76d311e7 |
526 | $self->storage->sqlt_type, |
627581cd |
527 | $version_set, |
41219a5d |
528 | )); |
3249629f |
529 | |
41219a5d |
530 | return ['', $sql]; |
76d311e7 |
531 | } |
532 | |
7d2a6974 |
533 | sub upgrade_single_step { |
7521a845 |
534 | my $self = shift; |
be140a5f |
535 | my $version_set = (shift @_)->{version_set}; |
f4075791 |
536 | Dlog_info { "upgrade_single_step'ing $_" } $version_set; |
41219a5d |
537 | |
538 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( |
334bced5 |
539 | $self->storage->sqlt_type, |
627581cd |
540 | $version_set, |
41219a5d |
541 | )); |
542 | return ['', $sql]; |
334bced5 |
543 | } |
544 | |
aabd4237 |
545 | __PACKAGE__->meta->make_immutable; |
546 | |
2e68a8e1 |
547 | 1; |
e051bb00 |
548 | |
e52174e3 |
549 | # vim: ts=2 sw=2 expandtab |
550 | |
e051bb00 |
551 | __END__ |
552 | |
bcc72297 |
553 | =head1 DESCRIPTION |
554 | |
0824f31f |
555 | This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes |
556 | care of generating serialized sql files representing schemata as well |
557 | as serialized sql files to move from one version of a schema to the rest. |
558 | One of the hallmark features of this class is that it allows for multiple sql |
559 | files for deploy and upgrade, allowing developers to fine tune deployment. |
560 | In addition it also allows for perl files to be run |
561 | at any stage of the process. |
bcc72297 |
562 | |
563 | For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's |
564 | documented here is extra fun stuff or private methods. |
565 | |
566 | =head1 DIRECTORY LAYOUT |
567 | |
92c34cab |
568 | Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's |
569 | heavily based upon L<DBIx::Migration::Directories>, but has some extensions and |
570 | modifications, so even if you are familiar with it, please read this. I feel |
571 | like the best way to describe the layout is with the following example: |
572 | |
573 | $sql_migration_dir |
574 | |- SQLite |
575 | | |- down |
4f85efc6 |
576 | | | `- 2-1 |
0824f31f |
577 | | | `- 001-auto.sql-json |
92c34cab |
578 | | |- schema |
579 | | | `- 1 |
0824f31f |
580 | | | `- 001-auto.sql-json |
92c34cab |
581 | | `- up |
582 | | |- 1-2 |
0824f31f |
583 | | | `- 001-auto.sql-json |
92c34cab |
584 | | `- 2-3 |
0824f31f |
585 | | `- 001-auto.sql-json |
92c34cab |
586 | |- _common |
587 | | |- down |
4f85efc6 |
588 | | | `- 2-1 |
92c34cab |
589 | | | `- 002-remove-customers.pl |
590 | | `- up |
591 | | `- 1-2 |
592 | | `- 002-generate-customers.pl |
593 | |- _generic |
594 | | |- down |
4f85efc6 |
595 | | | `- 2-1 |
0824f31f |
596 | | | `- 001-auto.sql-json |
92c34cab |
597 | | |- schema |
598 | | | `- 1 |
0824f31f |
599 | | | `- 001-auto.sql-json |
92c34cab |
600 | | `- up |
601 | | `- 1-2 |
0824f31f |
602 | | |- 001-auto.sql-json |
92c34cab |
603 | | `- 002-create-stored-procedures.sql |
604 | `- MySQL |
605 | |- down |
4f85efc6 |
606 | | `- 2-1 |
0824f31f |
607 | | `- 001-auto.sql-json |
80ff6f6d |
608 | |- preinstall |
609 | | `- 1 |
610 | | |- 001-create_database.pl |
611 | | `- 002-create_users_and_permissions.pl |
92c34cab |
612 | |- schema |
613 | | `- 1 |
0824f31f |
614 | | `- 001-auto.sql-json |
92c34cab |
615 | `- up |
616 | `- 1-2 |
0824f31f |
617 | `- 001-auto.sql-json |
92c34cab |
618 | |
619 | So basically, the code |
620 | |
621 | $dm->deploy(1) |
622 | |
623 | on an C<SQLite> database that would simply run |
0824f31f |
624 | C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next, |
92c34cab |
625 | |
626 | $dm->upgrade_single_step([1,2]) |
627 | |
0824f31f |
628 | would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by |
92c34cab |
629 | C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. |
630 | |
0824f31f |
631 | C<.pl> files don't have to be in the C<_common> directory, but most of the time |
632 | they should be, because perl scripts are generally be database independent. |
92c34cab |
633 | |
634 | C<_generic> exists for when you for some reason are sure that your SQL is |
635 | generic enough to run on all databases. Good luck with that one. |
636 | |
80ff6f6d |
637 | Note that unlike most steps in the process, C<preinstall> will not run SQL, as |
638 | there may not even be an database at preinstall time. It will run perl scripts |
639 | just like the other steps in the process, but nothing is passed to them. |
640 | Until people have used this more it will remain freeform, but a recommended use |
641 | of preinstall is to have it prompt for username and password, and then call the |
642 | appropriate C<< CREATE DATABASE >> commands etc. |
643 | |
0824f31f |
644 | =head1 SERIALIZED SQL |
645 | |
646 | The SQL that this module generates and uses is serialized into an array of |
647 | SQL statements. The reason being that some databases handle multiple |
648 | statements in a single execution differently. Generally you do not need to |
649 | worry about this as these are scripts generated for you. If you find that |
650 | you are editing them on a regular basis something is wrong and you either need |
651 | to submit a bug or consider writing extra serialized SQL or Perl scripts to run |
652 | before or after the automatically generated script. |
653 | |
654 | B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in |
655 | patches that will allow more serialization formats if you want that feature, |
656 | but if you do send me a patch for that realize that I do not want to add YAML |
657 | support or whatever, I would rather add a generic method of adding any |
658 | serialization format. |
659 | |
92c34cab |
660 | =head1 PERL SCRIPTS |
661 | |
7d0b0f2b |
662 | A perl script for this tool is very simple. It merely needs to contain an |
663 | anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument. |
92c34cab |
664 | A very basic perl script might look like: |
665 | |
666 | #!perl |
667 | |
668 | use strict; |
669 | use warnings; |
670 | |
7d0b0f2b |
671 | sub { |
92c34cab |
672 | my $schema = shift; |
673 | |
674 | $schema->resultset('Users')->create({ |
675 | name => 'root', |
676 | password => 'root', |
677 | }) |
678 | } |
bcc72297 |
679 | |
eb28403b |
680 | =attr schema |
a65184c8 |
681 | |
bcc72297 |
682 | The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database |
683 | and generate the DDL. |
684 | |
eb28403b |
685 | =attr storage |
a65184c8 |
686 | |
bcc72297 |
687 | The L<DBIx::Class::Storage> that is I<actually> used to talk to the database |
688 | and generate the DDL. This is automatically created with L</_build_storage>. |
689 | |
02a7b8ac |
690 | =attr sql_translator_args |
cfc9edf9 |
691 | |
02a7b8ac |
692 | The arguments that get passed to L<SQL::Translator> when it's used. |
a65184c8 |
693 | |
91adde75 |
694 | =attr script_directory |
cfc9edf9 |
695 | |
91adde75 |
696 | The directory (default C<'sql'>) that scripts are stored in |
cfc9edf9 |
697 | |
eb28403b |
698 | =attr databases |
cfc9edf9 |
699 | |
700 | The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to |
701 | generate files for |
702 | |
eb28403b |
703 | =attr txn_wrap |
704 | |
bcc72297 |
705 | Set to true (which is the default) to wrap all upgrades and deploys in a single |
706 | transaction. |
707 | |
73caa630 |
708 | =attr schema_version |
709 | |
710 | The version the schema on your harddrive is at. Defaults to |
711 | C<< $self->schema->schema_version >>. |
712 | |
db223aff |
713 | =begin comment |
714 | |
715 | =head2 __ddl_consume_with_prefix |
a65184c8 |
716 | |
bcc72297 |
717 | $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) |
718 | |
719 | This is the meat of the multi-file upgrade/deploy stuff. It returns a list of |
720 | files in the order that they should be run for a generic "type" of upgrade. |
721 | You should not be calling this in user code. |
722 | |
db223aff |
723 | =head2 _ddl_schema_consume_filenames |
a65184c8 |
724 | |
bcc72297 |
725 | $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
726 | |
727 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
728 | initial deploy. |
729 | |
db223aff |
730 | =head2 _ddl_schema_produce_filename |
a65184c8 |
731 | |
bcc72297 |
732 | $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) |
733 | |
734 | Returns a single file in which an initial schema will be stored. |
735 | |
db223aff |
736 | =head2 _ddl_schema_up_consume_filenames |
a65184c8 |
737 | |
bcc72297 |
738 | $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
739 | |
740 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
741 | upgrade. |
742 | |
db223aff |
743 | =head2 _ddl_schema_down_consume_filenames |
a65184c8 |
744 | |
bcc72297 |
745 | $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
746 | |
747 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a |
748 | downgrade. |
749 | |
db223aff |
750 | =head2 _ddl_schema_up_produce_filenames |
a65184c8 |
751 | |
bcc72297 |
752 | $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
753 | |
754 | Returns a single file in which the sql to upgrade from one schema to another |
755 | will be stored. |
756 | |
db223aff |
757 | =head2 _ddl_schema_down_produce_filename |
bcc72297 |
758 | |
759 | $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
760 | |
761 | Returns a single file in which the sql to downgrade from one schema to another |
762 | will be stored. |
a65184c8 |
763 | |
db223aff |
764 | =head2 _resultsource_install_filename |
a65184c8 |
765 | |
bcc72297 |
766 | my $filename_fn = $dm->_resultsource_install_filename('User'); |
767 | $dm->$filename_fn('SQLite', '1.00') |
768 | |
769 | Returns a function which in turn returns a single filename used to install a |
770 | single resultsource. Weird interface is convenient for me. Deal with it. |
771 | |
db223aff |
772 | =head2 _run_sql_and_perl |
eb28403b |
773 | |
bcc72297 |
774 | $dm->_run_sql_and_perl([qw( list of filenames )]) |
a65184c8 |
775 | |
bcc72297 |
776 | Simply put, this runs the list of files passed to it. If the file ends in |
777 | C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. |
a65184c8 |
778 | |
bcc72297 |
779 | Depending on L</txn_wrap> all of the files run will be wrapped in a single |
780 | transaction. |
eb28403b |
781 | |
db223aff |
782 | =head2 _prepare_install |
a65184c8 |
783 | |
bcc72297 |
784 | $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) |
a65184c8 |
785 | |
bcc72297 |
786 | Generates the sql file for installing the database. First arg is simply |
787 | L<SQL::Translator> args and the second is a coderef that returns the filename |
788 | to store the sql in. |
a65184c8 |
789 | |
db223aff |
790 | =head2 _prepare_changegrade |
bcc72297 |
791 | |
792 | $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') |
a65184c8 |
793 | |
bcc72297 |
794 | Generates the sql file for migrating from one schema version to another. First |
795 | arg is the version to start from, second is the version to go to, third is the |
796 | L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the |
797 | direction of the changegrade, be it 'up' or 'down'. |
a65184c8 |
798 | |
db223aff |
799 | =head2 _read_sql_file |
a65184c8 |
800 | |
bcc72297 |
801 | $dm->_read_sql_file('foo.sql') |
a65184c8 |
802 | |
bcc72297 |
803 | Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments, |
804 | transactions, and blank lines. |
eb28403b |
805 | |
db223aff |
806 | =end comment |