Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
9af9d0b2 |
3 | |
9a3a62f1 |
4 | # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories |
5 | |
9af9d0b2 |
6 | use autodie; |
7 | use Carp qw( carp croak ); |
8465e767 |
8 | use DBIx::Class::DeploymentHandler::Logger; |
9 | use Log::Contextual qw(:log :dlog), -default_logger => |
10 | DBIx::Class::DeploymentHandler::Logger->new({ |
11 | env_prefix => 'DBICDH' |
12 | }); |
9af9d0b2 |
13 | |
2e68a8e1 |
14 | use Method::Signatures::Simple; |
7f50d101 |
15 | use Try::Tiny; |
9af9d0b2 |
16 | |
d23c7c77 |
17 | use SQL::Translator; |
18 | require SQL::Translator::Diff; |
9af9d0b2 |
19 | |
d23c7c77 |
20 | require DBIx::Class::Storage; # loaded for type constraint |
41863428 |
21 | use DBIx::Class::DeploymentHandler::Types; |
22 | |
9af9d0b2 |
23 | use File::Path 'mkpath'; |
24 | use File::Spec::Functions; |
2e68a8e1 |
25 | |
7521a845 |
26 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
27 | |
d54b8d69 |
28 | has schema => ( |
29 | isa => 'DBIx::Class::Schema', |
30 | is => 'ro', |
31 | required => 1, |
d54b8d69 |
32 | ); |
33 | |
334bced5 |
34 | has storage => ( |
35 | isa => 'DBIx::Class::Storage', |
36 | is => 'ro', |
37 | lazy_build => 1, |
38 | ); |
39 | |
2eaf903b |
40 | method _build_storage { |
41 | my $s = $self->schema->storage; |
42 | $s->_determine_driver; |
43 | $s |
44 | } |
45 | |
02a7b8ac |
46 | has sql_translator_args => ( |
334bced5 |
47 | isa => 'HashRef', |
48 | is => 'ro', |
49 | default => sub { {} }, |
50 | ); |
91adde75 |
51 | has script_directory => ( |
334bced5 |
52 | isa => 'Str', |
53 | is => 'ro', |
54 | required => 1, |
55 | default => 'sql', |
56 | ); |
57 | |
334bced5 |
58 | has databases => ( |
59 | coerce => 1, |
60 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
61 | is => 'ro', |
62 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
63 | ); |
64 | |
a7d53deb |
65 | has txn_wrap => ( |
66 | is => 'ro', |
67 | isa => 'Bool', |
68 | default => 1, |
69 | ); |
70 | |
73caa630 |
71 | has schema_version => ( |
72 | is => 'ro', |
e86c0c07 |
73 | isa => 'Str', |
73caa630 |
74 | lazy_build => 1, |
75 | ); |
76 | |
6df6dcb9 |
77 | # this will probably never get called as the DBICDH |
78 | # will be passing down a schema_version normally, which |
79 | # is built the same way, but we leave this in place |
73caa630 |
80 | method _build_schema_version { $self->schema->schema_version } |
81 | |
76d311e7 |
82 | method __ddl_consume_with_prefix($type, $versions, $prefix) { |
91adde75 |
83 | my $base_dir = $self->script_directory; |
262166c1 |
84 | |
76d08d08 |
85 | my $main = catfile( $base_dir, $type ); |
86 | my $generic = catfile( $base_dir, '_generic' ); |
87 | my $common = |
88 | catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
262166c1 |
89 | |
90 | my $dir; |
91 | if (-d $main) { |
76d08d08 |
92 | $dir = catfile($main, $prefix, join q(-), @{$versions}) |
262166c1 |
93 | } elsif (-d $generic) { |
9af9d0b2 |
94 | $dir = catfile($generic, $prefix, join q(-), @{$versions}); |
262166c1 |
95 | } else { |
9af9d0b2 |
96 | croak "neither $main or $generic exist; please write/generate some SQL"; |
262166c1 |
97 | } |
98 | |
99 | opendir my($dh), $dir; |
f36afe83 |
100 | my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; |
262166c1 |
101 | closedir $dh; |
102 | |
103 | if (-d $common) { |
104 | opendir my($dh), $common; |
41219a5d |
105 | for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { |
262166c1 |
106 | unless ($files{$filename}) { |
9af9d0b2 |
107 | $files{$filename} = catfile($common,$filename); |
262166c1 |
108 | } |
109 | } |
110 | closedir $dh; |
111 | } |
112 | |
113 | return [@files{sort keys %files}] |
114 | } |
3c1b5ee8 |
115 | |
fc4b7602 |
116 | method _ddl_preinstall_consume_filenames($type, $version) { |
117 | $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall') |
118 | } |
119 | |
76d311e7 |
120 | method _ddl_schema_consume_filenames($type, $version) { |
121 | $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
122 | } |
123 | |
76d311e7 |
124 | method _ddl_schema_produce_filename($type, $version) { |
91adde75 |
125 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
76d08d08 |
126 | mkpath($dirname) unless -d $dirname; |
d54b8d69 |
127 | |
09bc35e3 |
128 | return catfile( $dirname, '001-auto.sql' ); |
d54b8d69 |
129 | } |
130 | |
76d311e7 |
131 | method _ddl_schema_up_consume_filenames($type, $versions) { |
132 | $self->__ddl_consume_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
133 | } |
134 | |
76d311e7 |
135 | method _ddl_schema_down_consume_filenames($type, $versions) { |
136 | $self->__ddl_consume_with_prefix($type, $versions, 'down') |
a41a04e5 |
137 | } |
138 | |
76d311e7 |
139 | method _ddl_schema_up_produce_filename($type, $versions) { |
91adde75 |
140 | my $dir = $self->script_directory; |
76d311e7 |
141 | |
76d08d08 |
142 | my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); |
143 | mkpath($dirname) unless -d $dirname; |
a41a04e5 |
144 | |
09bc35e3 |
145 | return catfile( $dirname, '001-auto.sql' |
146 | ); |
a41a04e5 |
147 | } |
148 | |
76d311e7 |
149 | method _ddl_schema_down_produce_filename($type, $versions, $dir) { |
76d08d08 |
150 | my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); |
151 | mkpath($dirname) unless -d $dirname; |
24f4524b |
152 | |
09bc35e3 |
153 | return catfile( $dirname, '001-auto.sql'); |
24f4524b |
154 | } |
155 | |
f36afe83 |
156 | method _run_sql_array($sql) { |
41219a5d |
157 | my $storage = $self->storage; |
5d7b27cf |
158 | |
1f0d0633 |
159 | $sql = [grep { |
160 | $_ && # remove blank lines |
161 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's |
162 | } map { |
163 | s/^\s+//; s/\s+$//; # trim whitespace |
164 | join '', grep { !/^--/ } split /\n/ # remove comments |
165 | } @$sql]; |
166 | |
f4075791 |
167 | Dlog_trace { "Running SQL $_" } $sql; |
f36afe83 |
168 | foreach my $line (@{$sql}) { |
5d7b27cf |
169 | $storage->_query_start($line); |
10a62c3d |
170 | # the whole reason we do this is so that we can see the line that was run |
5d7b27cf |
171 | try { |
5d7b27cf |
172 | $storage->dbh_do (sub { $_[1]->do($line) }); |
173 | } |
174 | catch { |
10a62c3d |
175 | die "$_ (running line '$line')" |
5d7b27cf |
176 | } |
177 | $storage->_query_end($line); |
178 | } |
4d09f712 |
179 | return join "\n", @$sql |
f36afe83 |
180 | } |
181 | |
182 | method _run_sql($filename) { |
f4075791 |
183 | log_debug { "Running SQL from $filename" }; |
f36afe83 |
184 | return $self->_run_sql_array($self->_read_sql_file($filename)); |
5d7b27cf |
185 | } |
2e68a8e1 |
186 | |
5d7b27cf |
187 | method _run_perl($filename) { |
f4075791 |
188 | log_debug { "Running Perl from $filename" }; |
5d7b27cf |
189 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
c8a2f7bd |
190 | |
5d7b27cf |
191 | no warnings 'redefine'; |
192 | my $fn = eval "$filedata"; |
193 | use warnings; |
f4075791 |
194 | Dlog_trace { "Running Perl $_" } $fn; |
5d7b27cf |
195 | |
196 | if ($@) { |
197 | carp "$filename failed to compile: $@"; |
198 | } elsif (ref $fn eq 'CODE') { |
199 | $fn->($self->schema) |
200 | } else { |
201 | carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; |
202 | } |
203 | } |
09bc35e3 |
204 | { |
205 | my $json; |
206 | |
207 | method _run_serialized_sql($filename, $type) { |
208 | if ($type eq 'json') { |
209 | require JSON; |
210 | $json ||= JSON->new->pretty; |
211 | my @sql = @{$json->decode($filename)}; |
212 | } else { |
213 | croak "A file ($filename) got to deploy that wasn't sql or perl!"; |
214 | } |
215 | } |
5d7b27cf |
216 | |
217 | } |
218 | |
219 | method _run_sql_and_perl($filenames) { |
5d7b27cf |
220 | my @files = @{$filenames}; |
221 | my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; |
222 | |
223 | my $sql = ''; |
41219a5d |
224 | for my $filename (@files) { |
225 | if ($filename =~ /\.sql$/) { |
5d7b27cf |
226 | $sql .= $self->_run_sql($filename) |
227 | } elsif ( $filename =~ /\.sql-(\w+)$/ ) { |
228 | $sql .= $self->_run_serialized_sql($filename, $1) |
398b1385 |
229 | } elsif ( $filename =~ /\.pl$/ ) { |
5d7b27cf |
230 | $self->_run_perl($filename) |
41219a5d |
231 | } else { |
fc4b7602 |
232 | croak "A file ($filename) got to deploy that wasn't sql or perl!"; |
2e68a8e1 |
233 | } |
2e68a8e1 |
234 | } |
a7d53deb |
235 | |
236 | $guard->commit if $self->txn_wrap; |
41219a5d |
237 | |
238 | return $sql; |
239 | } |
240 | |
241 | sub deploy { |
242 | my $self = shift; |
be140a5f |
243 | my $version = (shift @_ || {})->{version} || $self->schema_version; |
f4075791 |
244 | log_info { "deploying version $version" }; |
41219a5d |
245 | |
246 | return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( |
247 | $self->storage->sqlt_type, |
92c34cab |
248 | $version, |
41219a5d |
249 | )); |
2e68a8e1 |
250 | } |
251 | |
80ff6f6d |
252 | sub preinstall { |
9faec51a |
253 | my $self = shift; |
254 | my $args = shift; |
255 | my $version = $args->{version} || $self->schema_version; |
f4075791 |
256 | log_info { "preinstalling version $version" }; |
9faec51a |
257 | my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; |
fc4b7602 |
258 | |
259 | my @files = @{$self->_ddl_preinstall_consume_filenames( |
9faec51a |
260 | $storage_type, |
fc4b7602 |
261 | $version, |
262 | )}; |
263 | |
264 | for my $filename (@files) { |
265 | # We ignore sql for now (till I figure out what to do with it) |
266 | if ( $filename =~ /^(.+)\.pl$/ ) { |
fc4b7602 |
267 | my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; |
fc4b7602 |
268 | |
9faec51a |
269 | no warnings 'redefine'; |
5b5defbc |
270 | my $fn = eval "$filedata"; |
fc4b7602 |
271 | use warnings; |
5b5defbc |
272 | |
9faec51a |
273 | if ($@) { |
3fa64c79 |
274 | carp "$filename failed to compile: $@"; |
9faec51a |
275 | } elsif (ref $fn eq 'CODE') { |
fc4b7602 |
276 | $fn->() |
277 | } else { |
5b5defbc |
278 | carp "$filename should define an anonymous sub but it didn't!"; |
fc4b7602 |
279 | } |
280 | } else { |
281 | croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; |
282 | } |
283 | } |
284 | } |
285 | |
c8a2f7bd |
286 | sub _prepare_install { |
73caa630 |
287 | my $self = shift; |
02a7b8ac |
288 | my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; |
c8a2f7bd |
289 | my $to_file = shift; |
2e68a8e1 |
290 | my $schema = $self->schema; |
291 | my $databases = $self->databases; |
91adde75 |
292 | my $dir = $self->script_directory; |
73caa630 |
293 | my $version = $self->schema_version; |
d54b8d69 |
294 | |
9600776d |
295 | my $sqlt = SQL::Translator->new({ |
d54b8d69 |
296 | add_drop_table => 1, |
2e68a8e1 |
297 | ignore_constraint_names => 1, |
d54b8d69 |
298 | ignore_index_names => 1, |
299 | parser => 'SQL::Translator::Parser::DBIx::Class', |
3aaf766f |
300 | %{$sqltargs} |
9600776d |
301 | }); |
2e68a8e1 |
302 | |
d53e0bfc |
303 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
304 | or croak($sqlt->error); |
2e68a8e1 |
305 | |
306 | foreach my $db (@$databases) { |
307 | $sqlt->reset; |
308 | $sqlt->{schema} = $sqlt_schema; |
309 | $sqlt->producer($db); |
310 | |
c8a2f7bd |
311 | my $filename = $self->$to_file($db, $version, $dir); |
9600776d |
312 | if (-e $filename ) { |
2e68a8e1 |
313 | carp "Overwriting existing DDL file - $filename"; |
314 | unlink $filename; |
315 | } |
316 | |
09bc35e3 |
317 | my $output = $sqlt->translate; |
318 | if(!$output) { |
2e68a8e1 |
319 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
320 | next; |
321 | } |
387b11d2 |
322 | open my $file, q(>), $filename; |
09bc35e3 |
323 | print {$file} $output; |
2e68a8e1 |
324 | close $file; |
325 | } |
326 | } |
327 | |
c8a2f7bd |
328 | sub _resultsource_install_filename { |
329 | my ($self, $source_name) = @_; |
330 | return sub { |
331 | my ($self, $type, $version) = @_; |
91adde75 |
332 | my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); |
c8a2f7bd |
333 | mkpath($dirname) unless -d $dirname; |
334 | |
09bc35e3 |
335 | return catfile( $dirname, "001-auto-$source_name.sql" ); |
c8a2f7bd |
336 | } |
337 | } |
338 | |
339 | sub install_resultsource { |
be140a5f |
340 | my ($self, $args) = @_; |
341 | my $source = $args->{result_source}; |
342 | my $version = $args->{version}; |
f4075791 |
343 | log_info { 'installing_resultsource ' . $source->source_name . ", version $version" }; |
c8a2f7bd |
344 | my $rs_install_file = |
345 | $self->_resultsource_install_filename($source->source_name); |
346 | |
347 | my $files = [ |
348 | $self->$rs_install_file( |
349 | $self->storage->sqlt_type, |
350 | $version, |
351 | ) |
352 | ]; |
353 | $self->_run_sql_and_perl($files); |
354 | } |
355 | |
356 | sub prepare_resultsource_install { |
357 | my $self = shift; |
be140a5f |
358 | my $source = (shift @_)->{result_source}; |
f4075791 |
359 | log_info { 'preparing install for resultsource ' . $source->source_name }; |
c8a2f7bd |
360 | |
361 | my $filename = $self->_resultsource_install_filename($source->source_name); |
362 | $self->_prepare_install({ |
363 | parser_args => { sources => [$source->source_name], } |
364 | }, $filename); |
365 | } |
366 | |
91557c90 |
367 | sub prepare_deploy { |
f4075791 |
368 | log_info { 'preparing deploy' }; |
c8a2f7bd |
369 | my $self = shift; |
370 | $self->_prepare_install({}, '_ddl_schema_produce_filename'); |
371 | } |
372 | |
a41a04e5 |
373 | sub prepare_upgrade { |
be140a5f |
374 | my ($self, $args) = @_; |
0df68524 |
375 | log_info { |
f4075791 |
376 | "preparing upgrade from $args->{from_version} to $args->{to_version}" |
0df68524 |
377 | }; |
be140a5f |
378 | $self->_prepare_changegrade( |
379 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' |
380 | ); |
76d311e7 |
381 | } |
382 | |
383 | sub prepare_downgrade { |
be140a5f |
384 | my ($self, $args) = @_; |
0df68524 |
385 | log_info { |
f4075791 |
386 | "preparing downgrade from $args->{from_version} to $args->{to_version}" |
0df68524 |
387 | }; |
be140a5f |
388 | $self->_prepare_changegrade( |
389 | $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' |
390 | ); |
76d311e7 |
391 | } |
392 | |
393 | method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { |
2e68a8e1 |
394 | my $schema = $self->schema; |
395 | my $databases = $self->databases; |
91adde75 |
396 | my $dir = $self->script_directory; |
02a7b8ac |
397 | my $sqltargs = $self->sql_translator_args; |
2e68a8e1 |
398 | |
73caa630 |
399 | my $schema_version = $self->schema_version; |
2e68a8e1 |
400 | |
401 | $sqltargs = { |
402 | add_drop_table => 1, |
403 | ignore_constraint_names => 1, |
404 | ignore_index_names => 1, |
405 | %{$sqltargs} |
406 | }; |
407 | |
408 | my $sqlt = SQL::Translator->new( $sqltargs ); |
409 | |
410 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
d53e0bfc |
411 | my $sqlt_schema = $sqlt->translate( data => $schema ) |
387b11d2 |
412 | or croak($sqlt->error); |
2e68a8e1 |
413 | |
414 | foreach my $db (@$databases) { |
415 | $sqlt->reset; |
416 | $sqlt->{schema} = $sqlt_schema; |
417 | $sqlt->producer($db); |
418 | |
76d311e7 |
419 | my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); |
2e68a8e1 |
420 | unless(-e $prefilename) { |
421 | carp("No previous schema file found ($prefilename)"); |
422 | next; |
423 | } |
76d311e7 |
424 | my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; |
425 | my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); |
2e68a8e1 |
426 | if(-e $diff_file) { |
76d311e7 |
427 | carp("Overwriting existing $direction-diff file - $diff_file"); |
2e68a8e1 |
428 | unlink $diff_file; |
429 | } |
430 | |
431 | my $source_schema; |
432 | { |
433 | my $t = SQL::Translator->new({ |
434 | %{$sqltargs}, |
435 | debug => 0, |
436 | trace => 0, |
437 | }); |
438 | |
439 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
440 | or croak($t->error); |
2e68a8e1 |
441 | |
09bc35e3 |
442 | my $out = $t->translate( $prefilename ) |
387b11d2 |
443 | or croak($t->error); |
2e68a8e1 |
444 | |
445 | $source_schema = $t->schema; |
446 | |
447 | $source_schema->name( $prefilename ) |
448 | unless $source_schema->name; |
449 | } |
450 | |
451 | # The "new" style of producers have sane normalization and can support |
452 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
453 | # And we have to diff parsed SQL against parsed SQL. |
454 | my $dest_schema = $sqlt_schema; |
455 | |
456 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
457 | my $t = SQL::Translator->new({ |
458 | %{$sqltargs}, |
459 | debug => 0, |
460 | trace => 0, |
461 | }); |
462 | |
463 | $t->parser( $db ) # could this really throw an exception? |
387b11d2 |
464 | or croak($t->error); |
2e68a8e1 |
465 | |
76d311e7 |
466 | my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); |
09bc35e3 |
467 | my $out = $t->translate( $filename ) |
387b11d2 |
468 | or croak($t->error); |
2e68a8e1 |
469 | |
470 | $dest_schema = $t->schema; |
471 | |
472 | $dest_schema->name( $filename ) |
473 | unless $dest_schema->name; |
474 | } |
475 | |
09bc35e3 |
476 | my $diff = SQL::Translator::Diff::schema_diff( |
477 | $source_schema, $db, |
478 | $dest_schema, $db, |
479 | $sqltargs |
480 | ); |
387b11d2 |
481 | open my $file, q(>), $diff_file; |
09bc35e3 |
482 | print {$file} $diff; |
2e68a8e1 |
483 | close $file; |
484 | } |
485 | } |
486 | |
334bced5 |
487 | method _read_sql_file($file) { |
488 | return unless $file; |
489 | |
aabd4237 |
490 | open my $fh, '<', $file; |
0d19af1d |
491 | my @data = split /;\n/, join '', <$fh>; |
334bced5 |
492 | close $fh; |
493 | |
09bc35e3 |
494 | @data = grep { |
495 | $_ && # remove blank lines |
496 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's |
497 | } map { |
498 | s/^\s+//; s/\s+$//; # trim whitespace |
499 | join '', grep { !/^--/ } split /\n/ # remove comments |
500 | } @data; |
334bced5 |
501 | |
09bc35e3 |
502 | return \@data; |
1f0d0633 |
503 | } |
504 | |
7d2a6974 |
505 | sub downgrade_single_step { |
76d311e7 |
506 | my $self = shift; |
be140a5f |
507 | my $version_set = (shift @_)->{version_set}; |
f4075791 |
508 | Dlog_info { "downgrade_single_step'ing $_" } $version_set; |
41219a5d |
509 | |
510 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( |
76d311e7 |
511 | $self->storage->sqlt_type, |
627581cd |
512 | $version_set, |
41219a5d |
513 | )); |
3249629f |
514 | |
41219a5d |
515 | return ['', $sql]; |
76d311e7 |
516 | } |
517 | |
7d2a6974 |
518 | sub upgrade_single_step { |
7521a845 |
519 | my $self = shift; |
be140a5f |
520 | my $version_set = (shift @_)->{version_set}; |
f4075791 |
521 | Dlog_info { "upgrade_single_step'ing $_" } $version_set; |
41219a5d |
522 | |
523 | my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( |
334bced5 |
524 | $self->storage->sqlt_type, |
627581cd |
525 | $version_set, |
41219a5d |
526 | )); |
527 | return ['', $sql]; |
334bced5 |
528 | } |
529 | |
aabd4237 |
530 | __PACKAGE__->meta->make_immutable; |
531 | |
2e68a8e1 |
532 | 1; |
e051bb00 |
533 | |
e52174e3 |
534 | # vim: ts=2 sw=2 expandtab |
535 | |
e051bb00 |
536 | __END__ |
537 | |
bcc72297 |
538 | =head1 DESCRIPTION |
539 | |
0824f31f |
540 | This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes |
541 | care of generating serialized sql files representing schemata as well |
542 | as serialized sql files to move from one version of a schema to the rest. |
543 | One of the hallmark features of this class is that it allows for multiple sql |
544 | files for deploy and upgrade, allowing developers to fine tune deployment. |
545 | In addition it also allows for perl files to be run |
546 | at any stage of the process. |
bcc72297 |
547 | |
548 | For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's |
549 | documented here is extra fun stuff or private methods. |
550 | |
551 | =head1 DIRECTORY LAYOUT |
552 | |
92c34cab |
553 | Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's |
554 | heavily based upon L<DBIx::Migration::Directories>, but has some extensions and |
555 | modifications, so even if you are familiar with it, please read this. I feel |
556 | like the best way to describe the layout is with the following example: |
557 | |
558 | $sql_migration_dir |
559 | |- SQLite |
560 | | |- down |
4f85efc6 |
561 | | | `- 2-1 |
0824f31f |
562 | | | `- 001-auto.sql-json |
92c34cab |
563 | | |- schema |
564 | | | `- 1 |
0824f31f |
565 | | | `- 001-auto.sql-json |
92c34cab |
566 | | `- up |
567 | | |- 1-2 |
0824f31f |
568 | | | `- 001-auto.sql-json |
92c34cab |
569 | | `- 2-3 |
0824f31f |
570 | | `- 001-auto.sql-json |
92c34cab |
571 | |- _common |
572 | | |- down |
4f85efc6 |
573 | | | `- 2-1 |
92c34cab |
574 | | | `- 002-remove-customers.pl |
575 | | `- up |
576 | | `- 1-2 |
577 | | `- 002-generate-customers.pl |
578 | |- _generic |
579 | | |- down |
4f85efc6 |
580 | | | `- 2-1 |
0824f31f |
581 | | | `- 001-auto.sql-json |
92c34cab |
582 | | |- schema |
583 | | | `- 1 |
0824f31f |
584 | | | `- 001-auto.sql-json |
92c34cab |
585 | | `- up |
586 | | `- 1-2 |
0824f31f |
587 | | |- 001-auto.sql-json |
92c34cab |
588 | | `- 002-create-stored-procedures.sql |
589 | `- MySQL |
590 | |- down |
4f85efc6 |
591 | | `- 2-1 |
0824f31f |
592 | | `- 001-auto.sql-json |
80ff6f6d |
593 | |- preinstall |
594 | | `- 1 |
595 | | |- 001-create_database.pl |
596 | | `- 002-create_users_and_permissions.pl |
92c34cab |
597 | |- schema |
598 | | `- 1 |
0824f31f |
599 | | `- 001-auto.sql-json |
92c34cab |
600 | `- up |
601 | `- 1-2 |
0824f31f |
602 | `- 001-auto.sql-json |
92c34cab |
603 | |
604 | So basically, the code |
605 | |
606 | $dm->deploy(1) |
607 | |
608 | on an C<SQLite> database that would simply run |
0824f31f |
609 | C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next, |
92c34cab |
610 | |
611 | $dm->upgrade_single_step([1,2]) |
612 | |
0824f31f |
613 | would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by |
92c34cab |
614 | C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. |
615 | |
0824f31f |
616 | C<.pl> files don't have to be in the C<_common> directory, but most of the time |
617 | they should be, because perl scripts are generally be database independent. |
92c34cab |
618 | |
619 | C<_generic> exists for when you for some reason are sure that your SQL is |
620 | generic enough to run on all databases. Good luck with that one. |
621 | |
80ff6f6d |
622 | Note that unlike most steps in the process, C<preinstall> will not run SQL, as |
623 | there may not even be an database at preinstall time. It will run perl scripts |
624 | just like the other steps in the process, but nothing is passed to them. |
625 | Until people have used this more it will remain freeform, but a recommended use |
626 | of preinstall is to have it prompt for username and password, and then call the |
627 | appropriate C<< CREATE DATABASE >> commands etc. |
628 | |
0824f31f |
629 | =head1 SERIALIZED SQL |
630 | |
631 | The SQL that this module generates and uses is serialized into an array of |
632 | SQL statements. The reason being that some databases handle multiple |
633 | statements in a single execution differently. Generally you do not need to |
634 | worry about this as these are scripts generated for you. If you find that |
635 | you are editing them on a regular basis something is wrong and you either need |
636 | to submit a bug or consider writing extra serialized SQL or Perl scripts to run |
637 | before or after the automatically generated script. |
638 | |
639 | B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in |
640 | patches that will allow more serialization formats if you want that feature, |
641 | but if you do send me a patch for that realize that I do not want to add YAML |
642 | support or whatever, I would rather add a generic method of adding any |
643 | serialization format. |
644 | |
92c34cab |
645 | =head1 PERL SCRIPTS |
646 | |
7d0b0f2b |
647 | A perl script for this tool is very simple. It merely needs to contain an |
648 | anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument. |
92c34cab |
649 | A very basic perl script might look like: |
650 | |
651 | #!perl |
652 | |
653 | use strict; |
654 | use warnings; |
655 | |
7d0b0f2b |
656 | sub { |
92c34cab |
657 | my $schema = shift; |
658 | |
659 | $schema->resultset('Users')->create({ |
660 | name => 'root', |
661 | password => 'root', |
662 | }) |
663 | } |
bcc72297 |
664 | |
eb28403b |
665 | =attr schema |
a65184c8 |
666 | |
bcc72297 |
667 | The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database |
668 | and generate the DDL. |
669 | |
eb28403b |
670 | =attr storage |
a65184c8 |
671 | |
bcc72297 |
672 | The L<DBIx::Class::Storage> that is I<actually> used to talk to the database |
673 | and generate the DDL. This is automatically created with L</_build_storage>. |
674 | |
02a7b8ac |
675 | =attr sql_translator_args |
cfc9edf9 |
676 | |
02a7b8ac |
677 | The arguments that get passed to L<SQL::Translator> when it's used. |
a65184c8 |
678 | |
91adde75 |
679 | =attr script_directory |
cfc9edf9 |
680 | |
91adde75 |
681 | The directory (default C<'sql'>) that scripts are stored in |
cfc9edf9 |
682 | |
eb28403b |
683 | =attr databases |
cfc9edf9 |
684 | |
685 | The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to |
686 | generate files for |
687 | |
eb28403b |
688 | =attr txn_wrap |
689 | |
bcc72297 |
690 | Set to true (which is the default) to wrap all upgrades and deploys in a single |
691 | transaction. |
692 | |
73caa630 |
693 | =attr schema_version |
694 | |
695 | The version the schema on your harddrive is at. Defaults to |
696 | C<< $self->schema->schema_version >>. |
697 | |
db223aff |
698 | =begin comment |
699 | |
700 | =head2 __ddl_consume_with_prefix |
a65184c8 |
701 | |
bcc72297 |
702 | $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) |
703 | |
704 | This is the meat of the multi-file upgrade/deploy stuff. It returns a list of |
705 | files in the order that they should be run for a generic "type" of upgrade. |
706 | You should not be calling this in user code. |
707 | |
db223aff |
708 | =head2 _ddl_schema_consume_filenames |
a65184c8 |
709 | |
bcc72297 |
710 | $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
711 | |
712 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
713 | initial deploy. |
714 | |
db223aff |
715 | =head2 _ddl_schema_produce_filename |
a65184c8 |
716 | |
bcc72297 |
717 | $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) |
718 | |
719 | Returns a single file in which an initial schema will be stored. |
720 | |
db223aff |
721 | =head2 _ddl_schema_up_consume_filenames |
a65184c8 |
722 | |
bcc72297 |
723 | $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
724 | |
725 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an |
726 | upgrade. |
727 | |
db223aff |
728 | =head2 _ddl_schema_down_consume_filenames |
a65184c8 |
729 | |
bcc72297 |
730 | $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) |
731 | |
732 | Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a |
733 | downgrade. |
734 | |
db223aff |
735 | =head2 _ddl_schema_up_produce_filenames |
a65184c8 |
736 | |
bcc72297 |
737 | $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
738 | |
739 | Returns a single file in which the sql to upgrade from one schema to another |
740 | will be stored. |
741 | |
db223aff |
742 | =head2 _ddl_schema_down_produce_filename |
bcc72297 |
743 | |
744 | $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) |
745 | |
746 | Returns a single file in which the sql to downgrade from one schema to another |
747 | will be stored. |
a65184c8 |
748 | |
db223aff |
749 | =head2 _resultsource_install_filename |
a65184c8 |
750 | |
bcc72297 |
751 | my $filename_fn = $dm->_resultsource_install_filename('User'); |
752 | $dm->$filename_fn('SQLite', '1.00') |
753 | |
754 | Returns a function which in turn returns a single filename used to install a |
755 | single resultsource. Weird interface is convenient for me. Deal with it. |
756 | |
db223aff |
757 | =head2 _run_sql_and_perl |
eb28403b |
758 | |
bcc72297 |
759 | $dm->_run_sql_and_perl([qw( list of filenames )]) |
a65184c8 |
760 | |
bcc72297 |
761 | Simply put, this runs the list of files passed to it. If the file ends in |
762 | C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. |
a65184c8 |
763 | |
bcc72297 |
764 | Depending on L</txn_wrap> all of the files run will be wrapped in a single |
765 | transaction. |
eb28403b |
766 | |
db223aff |
767 | =head2 _prepare_install |
a65184c8 |
768 | |
bcc72297 |
769 | $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) |
a65184c8 |
770 | |
bcc72297 |
771 | Generates the sql file for installing the database. First arg is simply |
772 | L<SQL::Translator> args and the second is a coderef that returns the filename |
773 | to store the sql in. |
a65184c8 |
774 | |
db223aff |
775 | =head2 _prepare_changegrade |
bcc72297 |
776 | |
777 | $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') |
a65184c8 |
778 | |
bcc72297 |
779 | Generates the sql file for migrating from one schema version to another. First |
780 | arg is the version to start from, second is the version to go to, third is the |
781 | L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the |
782 | direction of the changegrade, be it 'up' or 'down'. |
a65184c8 |
783 | |
db223aff |
784 | =head2 _read_sql_file |
a65184c8 |
785 | |
bcc72297 |
786 | $dm->_read_sql_file('foo.sql') |
a65184c8 |
787 | |
bcc72297 |
788 | Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments, |
789 | transactions, and blank lines. |
eb28403b |
790 | |
db223aff |
791 | =end comment |