Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
2e68a8e1 |
3 | use Method::Signatures::Simple; |
7f50d101 |
4 | use Try::Tiny; |
d23c7c77 |
5 | use SQL::Translator; |
6 | require SQL::Translator::Diff; |
7 | require DBIx::Class::Storage; # loaded for type constraint |
3c1b5ee8 |
8 | use autodie; |
85998cd9 |
9 | use File::Path; |
2e68a8e1 |
10 | |
7521a845 |
11 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
12 | |
2e68a8e1 |
13 | use Carp 'carp'; |
14 | |
d54b8d69 |
15 | has schema => ( |
16 | isa => 'DBIx::Class::Schema', |
17 | is => 'ro', |
18 | required => 1, |
19 | handles => [qw( schema_version )], |
20 | ); |
21 | |
334bced5 |
22 | has storage => ( |
23 | isa => 'DBIx::Class::Storage', |
24 | is => 'ro', |
25 | lazy_build => 1, |
26 | ); |
27 | |
2eaf903b |
28 | method _build_storage { |
29 | my $s = $self->schema->storage; |
30 | $s->_determine_driver; |
31 | $s |
32 | } |
33 | |
334bced5 |
34 | has sqltargs => ( |
35 | isa => 'HashRef', |
36 | is => 'ro', |
37 | default => sub { {} }, |
38 | ); |
39 | has upgrade_directory => ( |
40 | isa => 'Str', |
41 | is => 'ro', |
42 | required => 1, |
43 | default => 'sql', |
44 | ); |
45 | |
334bced5 |
46 | has databases => ( |
47 | coerce => 1, |
48 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
49 | is => 'ro', |
50 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
51 | ); |
52 | |
334bced5 |
53 | has _filedata => ( |
54 | isa => 'ArrayRef[Str]', |
55 | is => 'rw', |
56 | ); |
57 | |
76d311e7 |
58 | method __ddl_consume_with_prefix($type, $versions, $prefix) { |
262166c1 |
59 | my $base_dir = $self->upgrade_directory; |
60 | |
61 | my $main = File::Spec->catfile( $base_dir, $type ); |
62 | my $generic = File::Spec->catfile( $base_dir, '_generic' ); |
63 | my $common = File::Spec->catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
64 | |
65 | my $dir; |
66 | if (-d $main) { |
67 | $dir = File::Spec->catfile($main, $prefix, join q(-), @{$versions}) |
68 | } elsif (-d $generic) { |
69 | $dir = File::Spec->catfile($main, $prefix, join q(-), @{$versions}) |
70 | } else { |
71 | die 'PREPARE TO SQL' |
72 | } |
73 | |
74 | opendir my($dh), $dir; |
75 | my %files = map { $_ => "$dir/$_" } grep { /\.sql$/ && -f "$dir/$_" } readdir($dh); |
76 | closedir $dh; |
77 | |
78 | if (-d $common) { |
79 | opendir my($dh), $common; |
80 | for my $filename (grep { /\.sql$/ && -f "$common/$_" } readdir($dh)) { |
81 | unless ($files{$filename}) { |
82 | $files{$filename} = "$common/$_"; |
83 | } |
84 | } |
85 | closedir $dh; |
86 | } |
87 | |
88 | return [@files{sort keys %files}] |
89 | } |
3c1b5ee8 |
90 | |
76d311e7 |
91 | method _ddl_schema_consume_filenames($type, $version) { |
92 | $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
93 | } |
94 | |
76d311e7 |
95 | method _ddl_schema_produce_filename($type, $version) { |
96 | my $base_dir = $self->upgrade_directory; |
85998cd9 |
97 | my $dirname = File::Spec->catfile( |
76d311e7 |
98 | $base_dir, $type, 'schema', $version |
d54b8d69 |
99 | ); |
85998cd9 |
100 | File::Path::mkpath($dirname) unless -d $dirname; |
d54b8d69 |
101 | |
85998cd9 |
102 | return File::Spec->catfile( |
103 | $dirname, '001-auto.sql' |
104 | ); |
d54b8d69 |
105 | } |
106 | |
76d311e7 |
107 | method _ddl_schema_up_consume_filenames($type, $versions) { |
108 | $self->__ddl_consume_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
109 | } |
110 | |
76d311e7 |
111 | method _ddl_schema_down_consume_filenames($type, $versions) { |
112 | $self->__ddl_consume_with_prefix($type, $versions, 'down') |
a41a04e5 |
113 | } |
114 | |
76d311e7 |
115 | method _ddl_schema_up_produce_filename($type, $versions) { |
116 | my $dir = $self->upgrade_directory; |
117 | |
a41a04e5 |
118 | my $dirname = File::Spec->catfile( |
119 | $dir, $type, 'up', join( q(-), @{$versions} ) |
120 | ); |
121 | File::Path::mkpath($dirname) unless -d $dirname; |
122 | |
123 | return File::Spec->catfile( |
124 | $dirname, '001-auto.sql' |
125 | ); |
126 | } |
127 | |
76d311e7 |
128 | method _ddl_schema_down_produce_filename($type, $versions, $dir) { |
85998cd9 |
129 | my $dirname = File::Spec->catfile( |
a41a04e5 |
130 | $dir, $type, 'down', join( q(-), @{$versions} ) |
24f4524b |
131 | ); |
85998cd9 |
132 | File::Path::mkpath($dirname) unless -d $dirname; |
24f4524b |
133 | |
85998cd9 |
134 | return File::Spec->catfile( |
135 | $dirname, '001-auto.sql' |
136 | ); |
24f4524b |
137 | } |
138 | |
8a7847f1 |
139 | method _deployment_statements { |
2e68a8e1 |
140 | my $dir = $self->upgrade_directory; |
141 | my $schema = $self->schema; |
142 | my $type = $self->storage->sqlt_type; |
143 | my $sqltargs = $self->sqltargs; |
9600776d |
144 | my $version = $self->schema_version; |
2e68a8e1 |
145 | |
76d311e7 |
146 | my @filenames = @{$self->_ddl_schema_consume_filenames($type, $version)}; |
3c1b5ee8 |
147 | |
148 | for my $filename (@filenames) { |
149 | if(-f $filename) { |
150 | my $file; |
151 | open $file, q(<), $filename |
152 | or carp "Can't open $filename ($!)"; |
153 | my @rows = <$file>; |
154 | close $file; |
155 | return join '', @rows; |
156 | } |
2e68a8e1 |
157 | } |
158 | |
159 | # sources needs to be a parser arg, but for simplicty allow at top level |
160 | # coming in |
161 | $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources} |
162 | if exists $sqltargs->{sources}; |
163 | |
164 | my $tr = SQL::Translator->new( |
165 | producer => "SQL::Translator::Producer::${type}", |
166 | %$sqltargs, |
167 | parser => 'SQL::Translator::Parser::DBIx::Class', |
168 | data => $schema, |
169 | ); |
170 | |
284cda86 |
171 | my $ret = $tr->translate; |
2e68a8e1 |
172 | |
173 | $schema->throw_exception( 'Unable to produce deployment statements: ' . $tr->error) |
284cda86 |
174 | unless defined $ret; |
2e68a8e1 |
175 | |
284cda86 |
176 | return $ret; |
2e68a8e1 |
177 | } |
178 | |
7521a845 |
179 | sub _deploy { |
180 | my $self = shift; |
2e68a8e1 |
181 | my $storage = $self->storage; |
182 | |
183 | my $deploy = sub { |
184 | my $line = shift; |
ff7f0e7d |
185 | #< frew> k, also, we filter out comments and transaction stuff and blank lines |
186 | #< frew> is that really necesary? |
187 | #< frew> and what if I want to run my upgrade in a txn? seems like something you'd |
188 | # always want to do really |
189 | #< ribasushi> again - some stuff chokes |
190 | #< frew> ok, so I see filtering out -- and \s* |
191 | #< frew> but I think the txn filtering should be optional and default to NOT filter it |
192 | # out |
193 | #< ribasushi> then you have a problem |
194 | #< frew> tell me |
195 | #< ribasushi> someone runs a deploy in txn_do |
196 | #< ribasushi> the inner begin will blow up |
197 | #< frew> because it's a nested TXN? |
198 | #< ribasushi> (you an't begin twice on most dbs) |
199 | #< ribasushi> right |
200 | #< ribasushi> on sqlite - for sure |
201 | #< frew> so...read the docs and set txn_filter to true? |
202 | #< ribasushi> more like wrap deploy in a txn |
203 | #< frew> I like that better |
204 | #< ribasushi> and make sure the ddl has no literal txns in them |
205 | #< frew> sure |
206 | #< ribasushi> this way you have stuff under control |
207 | #< frew> so we have txn_wrap default to true |
208 | #< frew> and if people wanna do that by hand they can |
209 | |
2e68a8e1 |
210 | return if(!$line || $line =~ /^--|^BEGIN TRANSACTION|^COMMIT|^\s+$/); |
211 | $storage->_query_start($line); |
212 | try { |
213 | # do a dbh_do cycle here, as we need some error checking in |
214 | # place (even though we will ignore errors) |
215 | $storage->dbh_do (sub { $_[1]->do($line) }); |
216 | } |
217 | catch { |
218 | carp "$_ (running '${line}')" |
219 | } |
220 | $storage->_query_end($line); |
221 | }; |
8a7847f1 |
222 | my @statements = $self->_deployment_statements(); |
2e68a8e1 |
223 | if (@statements > 1) { |
224 | foreach my $statement (@statements) { |
225 | $deploy->( $statement ); |
226 | } |
227 | } |
228 | elsif (@statements == 1) { |
229 | foreach my $line ( split(";\n", $statements[0])) { |
230 | $deploy->( $line ); |
231 | } |
232 | } |
233 | } |
234 | |
7521a845 |
235 | sub prepare_install { |
236 | my $self = shift; |
2e68a8e1 |
237 | my $schema = $self->schema; |
238 | my $databases = $self->databases; |
239 | my $dir = $self->upgrade_directory; |
240 | my $sqltargs = $self->sqltargs; |
d54b8d69 |
241 | my $version = $schema->schema_version; |
242 | |
2e68a8e1 |
243 | unless( -d $dir ) { |
244 | carp "Upgrade directory $dir does not exist, using ./\n"; |
d54b8d69 |
245 | $dir = './'; |
2e68a8e1 |
246 | } |
247 | |
2e68a8e1 |
248 | |
9600776d |
249 | my $sqlt = SQL::Translator->new({ |
d54b8d69 |
250 | add_drop_table => 1, |
2e68a8e1 |
251 | ignore_constraint_names => 1, |
d54b8d69 |
252 | ignore_index_names => 1, |
253 | parser => 'SQL::Translator::Parser::DBIx::Class', |
2e68a8e1 |
254 | %{$sqltargs || {}} |
9600776d |
255 | }); |
2e68a8e1 |
256 | |
2e68a8e1 |
257 | my $sqlt_schema = $sqlt->translate({ data => $schema }) |
258 | or $self->throw_exception ($sqlt->error); |
259 | |
260 | foreach my $db (@$databases) { |
261 | $sqlt->reset; |
262 | $sqlt->{schema} = $sqlt_schema; |
263 | $sqlt->producer($db); |
264 | |
76d311e7 |
265 | my $filename = $self->_ddl_schema_produce_filename($db, $version, $dir); |
9600776d |
266 | if (-e $filename ) { |
2e68a8e1 |
267 | carp "Overwriting existing DDL file - $filename"; |
268 | unlink $filename; |
269 | } |
270 | |
271 | my $output = $sqlt->translate; |
272 | if(!$output) { |
273 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
274 | next; |
275 | } |
276 | my $file; |
277 | unless( open $file, q(>), $filename ) { |
278 | $self->throw_exception("Can't open $filename for writing ($!)"); |
279 | next; |
280 | } |
281 | print {$file} $output; |
282 | close $file; |
283 | } |
284 | } |
285 | |
a41a04e5 |
286 | sub prepare_upgrade { |
9600776d |
287 | my ($self, $from_version, $to_version, $version_set) = @_; |
288 | |
289 | $from_version ||= $self->db_version; |
290 | $to_version ||= $self->schema_version; |
291 | |
63e95f13 |
292 | # for updates prepared automatically (rob's stuff) |
293 | # one would want to explicitly set $version_set to |
294 | # [$to_version] |
9600776d |
295 | $version_set ||= [$from_version, $to_version]; |
76d311e7 |
296 | |
297 | $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up'); |
298 | } |
299 | |
300 | sub prepare_downgrade { |
301 | my ($self, $from_version, $to_version, $version_set) = @_; |
302 | |
303 | $from_version ||= $self->db_version; |
304 | $to_version ||= $self->schema_version; |
305 | |
306 | # for updates prepared automatically (rob's stuff) |
307 | # one would want to explicitly set $version_set to |
308 | # [$to_version] |
309 | $version_set ||= [$from_version, $to_version]; |
310 | |
311 | $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down'); |
312 | } |
313 | |
314 | method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { |
2e68a8e1 |
315 | my $schema = $self->schema; |
316 | my $databases = $self->databases; |
317 | my $dir = $self->upgrade_directory; |
318 | my $sqltargs = $self->sqltargs; |
319 | |
9600776d |
320 | my $schema_version = $schema->schema_version; |
2e68a8e1 |
321 | |
322 | $sqltargs = { |
323 | add_drop_table => 1, |
324 | ignore_constraint_names => 1, |
325 | ignore_index_names => 1, |
326 | %{$sqltargs} |
327 | }; |
328 | |
329 | my $sqlt = SQL::Translator->new( $sqltargs ); |
330 | |
331 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
332 | my $sqlt_schema = $sqlt->translate({ data => $schema }) |
333 | or $self->throw_exception ($sqlt->error); |
334 | |
335 | foreach my $db (@$databases) { |
336 | $sqlt->reset; |
337 | $sqlt->{schema} = $sqlt_schema; |
338 | $sqlt->producer($db); |
339 | |
76d311e7 |
340 | my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); |
2e68a8e1 |
341 | unless(-e $prefilename) { |
342 | carp("No previous schema file found ($prefilename)"); |
343 | next; |
344 | } |
76d311e7 |
345 | my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; |
346 | my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); |
2e68a8e1 |
347 | if(-e $diff_file) { |
76d311e7 |
348 | carp("Overwriting existing $direction-diff file - $diff_file"); |
2e68a8e1 |
349 | unlink $diff_file; |
350 | } |
351 | |
352 | my $source_schema; |
353 | { |
354 | my $t = SQL::Translator->new({ |
355 | %{$sqltargs}, |
356 | debug => 0, |
357 | trace => 0, |
358 | }); |
359 | |
360 | $t->parser( $db ) # could this really throw an exception? |
361 | or $self->throw_exception ($t->error); |
362 | |
363 | my $out = $t->translate( $prefilename ) |
364 | or $self->throw_exception ($t->error); |
365 | |
366 | $source_schema = $t->schema; |
367 | |
368 | $source_schema->name( $prefilename ) |
369 | unless $source_schema->name; |
370 | } |
371 | |
372 | # The "new" style of producers have sane normalization and can support |
373 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
374 | # And we have to diff parsed SQL against parsed SQL. |
375 | my $dest_schema = $sqlt_schema; |
376 | |
377 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
378 | my $t = SQL::Translator->new({ |
379 | %{$sqltargs}, |
380 | debug => 0, |
381 | trace => 0, |
382 | }); |
383 | |
384 | $t->parser( $db ) # could this really throw an exception? |
385 | or $self->throw_exception ($t->error); |
386 | |
76d311e7 |
387 | my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); |
2e68a8e1 |
388 | my $out = $t->translate( $filename ) |
389 | or $self->throw_exception ($t->error); |
390 | |
391 | $dest_schema = $t->schema; |
392 | |
393 | $dest_schema->name( $filename ) |
394 | unless $dest_schema->name; |
395 | } |
396 | |
397 | my $diff = SQL::Translator::Diff::schema_diff( |
398 | $source_schema, $db, |
399 | $dest_schema, $db, |
400 | $sqltargs |
401 | ); |
402 | my $file; |
403 | unless(open $file, q(>), $diff_file) { |
404 | $self->throw_exception("Can't write to $diff_file ($!)"); |
405 | next; |
406 | } |
407 | print {$file} $diff; |
408 | close $file; |
409 | } |
410 | } |
411 | |
334bced5 |
412 | method _read_sql_file($file) { |
413 | return unless $file; |
414 | |
415 | open my $fh, '<', $file or carp("Can't open upgrade file, $file ($!)"); |
416 | my @data = split /\n/, join '', <$fh>; |
417 | close $fh; |
418 | |
419 | @data = grep { |
420 | $_ && |
421 | !/^--/ && |
422 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/m |
423 | } split /;/, |
424 | join '', @data; |
425 | |
426 | return \@data; |
427 | } |
428 | |
76d311e7 |
429 | # these are exactly the same for now |
430 | sub _downgrade_single_step { |
431 | my $self = shift; |
432 | my @version_set = @{ shift @_ }; |
433 | my @upgrade_files = @{$self->_ddl_schema_up_consume_filenames( |
434 | $self->storage->sqlt_type, |
435 | \@version_set, |
436 | )}; |
437 | |
438 | for my $upgrade_file (@upgrade_files) { |
439 | unless (-f $upgrade_file) { |
440 | # croak? |
441 | carp "Upgrade not possible, no upgrade file found ($upgrade_file), please create one\n"; |
442 | return; |
443 | } |
444 | |
445 | $self->_filedata($self->_read_sql_file($upgrade_file)); # I don't like this --fREW 2010-02-22 |
446 | $self->schema->txn_do(sub { $self->_do_upgrade }); |
447 | } |
448 | } |
449 | |
7521a845 |
450 | sub _upgrade_single_step { |
451 | my $self = shift; |
24f4524b |
452 | my @version_set = @{ shift @_ }; |
76d311e7 |
453 | my @upgrade_files = @{$self->_ddl_schema_up_consume_filenames( |
334bced5 |
454 | $self->storage->sqlt_type, |
24f4524b |
455 | \@version_set, |
3c1b5ee8 |
456 | )}; |
334bced5 |
457 | |
3c1b5ee8 |
458 | for my $upgrade_file (@upgrade_files) { |
459 | unless (-f $upgrade_file) { |
460 | # croak? |
461 | carp "Upgrade not possible, no upgrade file found ($upgrade_file), please create one\n"; |
462 | return; |
463 | } |
334bced5 |
464 | |
3c1b5ee8 |
465 | $self->_filedata($self->_read_sql_file($upgrade_file)); # I don't like this --fREW 2010-02-22 |
466 | $self->schema->txn_do(sub { $self->_do_upgrade }); |
467 | } |
334bced5 |
468 | } |
469 | |
8a7847f1 |
470 | method _do_upgrade { $self->_run_upgrade(qr/.*?/) } |
334bced5 |
471 | |
8a7847f1 |
472 | method _run_upgrade($stm) { |
334bced5 |
473 | return unless $self->_filedata; |
474 | my @statements = grep { $_ =~ $stm } @{$self->_filedata}; |
475 | |
476 | for (@statements) { |
477 | $self->storage->debugobj->query_start($_) if $self->storage->debug; |
8a7847f1 |
478 | $self->_apply_statement($_); |
334bced5 |
479 | $self->storage->debugobj->query_end($_) if $self->storage->debug; |
480 | } |
481 | } |
482 | |
8a7847f1 |
483 | method _apply_statement($statement) { |
334bced5 |
484 | # croak? |
485 | $self->storage->dbh->do($_) or carp "SQL was: $_" |
486 | } |
487 | |
2e68a8e1 |
488 | 1; |
e051bb00 |
489 | |
490 | __END__ |
491 | |
2eaf903b |
492 | vim: ts=2 sw=2 expandtab |