Commit | Line | Data |
45d0d9d5 |
1 | package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; |
334bced5 |
2 | use Moose; |
2e68a8e1 |
3 | use Method::Signatures::Simple; |
7f50d101 |
4 | use Try::Tiny; |
d23c7c77 |
5 | use SQL::Translator; |
6 | require SQL::Translator::Diff; |
7 | require DBIx::Class::Storage; # loaded for type constraint |
3c1b5ee8 |
8 | use autodie; |
85998cd9 |
9 | use File::Path; |
2e68a8e1 |
10 | |
7521a845 |
11 | with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; |
3c1b5ee8 |
12 | |
2e68a8e1 |
13 | use Carp 'carp'; |
14 | |
d54b8d69 |
15 | has schema => ( |
16 | isa => 'DBIx::Class::Schema', |
17 | is => 'ro', |
18 | required => 1, |
19 | handles => [qw( schema_version )], |
20 | ); |
21 | |
334bced5 |
22 | has storage => ( |
23 | isa => 'DBIx::Class::Storage', |
24 | is => 'ro', |
25 | lazy_build => 1, |
26 | ); |
27 | |
2eaf903b |
28 | method _build_storage { |
29 | my $s = $self->schema->storage; |
30 | $s->_determine_driver; |
31 | $s |
32 | } |
33 | |
334bced5 |
34 | has sqltargs => ( |
35 | isa => 'HashRef', |
36 | is => 'ro', |
37 | default => sub { {} }, |
38 | ); |
39 | has upgrade_directory => ( |
40 | isa => 'Str', |
41 | is => 'ro', |
42 | required => 1, |
43 | default => 'sql', |
44 | ); |
45 | |
334bced5 |
46 | has databases => ( |
47 | coerce => 1, |
48 | isa => 'DBIx::Class::DeploymentHandler::Databases', |
49 | is => 'ro', |
50 | default => sub { [qw( MySQL SQLite PostgreSQL )] }, |
51 | ); |
52 | |
334bced5 |
53 | has _filedata => ( |
54 | isa => 'ArrayRef[Str]', |
55 | is => 'rw', |
56 | ); |
57 | |
262166c1 |
58 | method __ddl_in_with_prefix($type, $versions, $prefix) { |
59 | my $base_dir = $self->upgrade_directory; |
60 | |
61 | my $main = File::Spec->catfile( $base_dir, $type ); |
62 | my $generic = File::Spec->catfile( $base_dir, '_generic' ); |
63 | my $common = File::Spec->catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); |
64 | |
65 | my $dir; |
66 | if (-d $main) { |
67 | $dir = File::Spec->catfile($main, $prefix, join q(-), @{$versions}) |
68 | } elsif (-d $generic) { |
69 | $dir = File::Spec->catfile($main, $prefix, join q(-), @{$versions}) |
70 | } else { |
71 | die 'PREPARE TO SQL' |
72 | } |
73 | |
74 | opendir my($dh), $dir; |
75 | my %files = map { $_ => "$dir/$_" } grep { /\.sql$/ && -f "$dir/$_" } readdir($dh); |
76 | closedir $dh; |
77 | |
78 | if (-d $common) { |
79 | opendir my($dh), $common; |
80 | for my $filename (grep { /\.sql$/ && -f "$common/$_" } readdir($dh)) { |
81 | unless ($files{$filename}) { |
82 | $files{$filename} = "$common/$_"; |
83 | } |
84 | } |
85 | closedir $dh; |
86 | } |
87 | |
88 | return [@files{sort keys %files}] |
89 | } |
3c1b5ee8 |
90 | |
262166c1 |
91 | method _ddl_schema_in_filenames($type, $version) { |
92 | $self->__ddl_in_with_prefix($type, [ $version ], 'schema') |
3c1b5ee8 |
93 | } |
94 | |
95 | method _ddl_schema_out_filename($type, $version, $dir) { |
85998cd9 |
96 | my $dirname = File::Spec->catfile( |
97 | $dir, $type, 'schema', $version |
d54b8d69 |
98 | ); |
85998cd9 |
99 | File::Path::mkpath($dirname) unless -d $dirname; |
d54b8d69 |
100 | |
85998cd9 |
101 | return File::Spec->catfile( |
102 | $dirname, '001-auto.sql' |
103 | ); |
d54b8d69 |
104 | } |
105 | |
a41a04e5 |
106 | method _ddl_schema_up_in_filenames($type, $versions, $dir) { |
107 | $self->__ddl_in_with_prefix($type, $versions, 'up') |
3c1b5ee8 |
108 | } |
109 | |
a41a04e5 |
110 | method _ddl_schema_down_in_filenames($type, $versions, $dir) { |
111 | $self->__ddl_in_with_prefix($type, $versions, 'down') |
112 | } |
113 | |
114 | method _ddl_schema_up_out_filename($type, $versions, $dir) { |
115 | my $dirname = File::Spec->catfile( |
116 | $dir, $type, 'up', join( q(-), @{$versions} ) |
117 | ); |
118 | File::Path::mkpath($dirname) unless -d $dirname; |
119 | |
120 | return File::Spec->catfile( |
121 | $dirname, '001-auto.sql' |
122 | ); |
123 | } |
124 | |
125 | method _ddl_schema_down_out_filename($type, $versions, $dir) { |
85998cd9 |
126 | my $dirname = File::Spec->catfile( |
a41a04e5 |
127 | $dir, $type, 'down', join( q(-), @{$versions} ) |
24f4524b |
128 | ); |
85998cd9 |
129 | File::Path::mkpath($dirname) unless -d $dirname; |
24f4524b |
130 | |
85998cd9 |
131 | return File::Spec->catfile( |
132 | $dirname, '001-auto.sql' |
133 | ); |
24f4524b |
134 | } |
135 | |
8a7847f1 |
136 | method _deployment_statements { |
2e68a8e1 |
137 | my $dir = $self->upgrade_directory; |
138 | my $schema = $self->schema; |
139 | my $type = $self->storage->sqlt_type; |
140 | my $sqltargs = $self->sqltargs; |
9600776d |
141 | my $version = $self->schema_version; |
2e68a8e1 |
142 | |
262166c1 |
143 | my @filenames = @{$self->_ddl_schema_in_filenames($type, $version)}; |
3c1b5ee8 |
144 | |
145 | for my $filename (@filenames) { |
146 | if(-f $filename) { |
147 | my $file; |
148 | open $file, q(<), $filename |
149 | or carp "Can't open $filename ($!)"; |
150 | my @rows = <$file>; |
151 | close $file; |
152 | return join '', @rows; |
153 | } |
2e68a8e1 |
154 | } |
155 | |
156 | # sources needs to be a parser arg, but for simplicty allow at top level |
157 | # coming in |
158 | $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources} |
159 | if exists $sqltargs->{sources}; |
160 | |
161 | my $tr = SQL::Translator->new( |
162 | producer => "SQL::Translator::Producer::${type}", |
163 | %$sqltargs, |
164 | parser => 'SQL::Translator::Parser::DBIx::Class', |
165 | data => $schema, |
166 | ); |
167 | |
ff7f0e7d |
168 | #< frew> now note that deploy in the same file calls deployment_statements |
169 | #< ribasushi> right |
170 | #< frew> ALWAYS in array context |
171 | #< ribasushi> right, that's the only way |
172 | #< ribasushi> but create_ddl_dir |
173 | #< ribasushi> calls in scalar |
174 | #< ribasushi> because this is how you get stuff writable to a file |
175 | #< ribasushi> in list you get individual statements for dbh->do |
176 | #< frew> right |
177 | #< frew> ok... |
178 | #< frew> so for *me* I need it *always* in scalar |
179 | #< frew> because I *only* use it to generate the file |
180 | #< ribasushi> correct |
2e68a8e1 |
181 | my @ret; |
182 | my $wa = wantarray; |
183 | if ($wa) { |
184 | @ret = $tr->translate; |
185 | } |
186 | else { |
187 | $ret[0] = $tr->translate; |
188 | } |
189 | |
190 | $schema->throw_exception( 'Unable to produce deployment statements: ' . $tr->error) |
191 | unless (@ret && defined $ret[0]); |
192 | |
193 | return $wa ? @ret : $ret[0]; |
194 | } |
195 | |
7521a845 |
196 | sub _deploy { |
197 | my $self = shift; |
2e68a8e1 |
198 | my $storage = $self->storage; |
199 | |
200 | my $deploy = sub { |
201 | my $line = shift; |
ff7f0e7d |
202 | #< frew> k, also, we filter out comments and transaction stuff and blank lines |
203 | #< frew> is that really necesary? |
204 | #< frew> and what if I want to run my upgrade in a txn? seems like something you'd |
205 | # always want to do really |
206 | #< ribasushi> again - some stuff chokes |
207 | #< frew> ok, so I see filtering out -- and \s* |
208 | #< frew> but I think the txn filtering should be optional and default to NOT filter it |
209 | # out |
210 | #< ribasushi> then you have a problem |
211 | #< frew> tell me |
212 | #< ribasushi> someone runs a deploy in txn_do |
213 | #< ribasushi> the inner begin will blow up |
214 | #< frew> because it's a nested TXN? |
215 | #< ribasushi> (you an't begin twice on most dbs) |
216 | #< ribasushi> right |
217 | #< ribasushi> on sqlite - for sure |
218 | #< frew> so...read the docs and set txn_filter to true? |
219 | #< ribasushi> more like wrap deploy in a txn |
220 | #< frew> I like that better |
221 | #< ribasushi> and make sure the ddl has no literal txns in them |
222 | #< frew> sure |
223 | #< ribasushi> this way you have stuff under control |
224 | #< frew> so we have txn_wrap default to true |
225 | #< frew> and if people wanna do that by hand they can |
226 | |
2e68a8e1 |
227 | return if(!$line || $line =~ /^--|^BEGIN TRANSACTION|^COMMIT|^\s+$/); |
228 | $storage->_query_start($line); |
229 | try { |
230 | # do a dbh_do cycle here, as we need some error checking in |
231 | # place (even though we will ignore errors) |
232 | $storage->dbh_do (sub { $_[1]->do($line) }); |
233 | } |
234 | catch { |
235 | carp "$_ (running '${line}')" |
236 | } |
237 | $storage->_query_end($line); |
238 | }; |
8a7847f1 |
239 | my @statements = $self->_deployment_statements(); |
2e68a8e1 |
240 | if (@statements > 1) { |
241 | foreach my $statement (@statements) { |
242 | $deploy->( $statement ); |
243 | } |
244 | } |
245 | elsif (@statements == 1) { |
246 | foreach my $line ( split(";\n", $statements[0])) { |
247 | $deploy->( $line ); |
248 | } |
249 | } |
250 | } |
251 | |
7521a845 |
252 | sub prepare_install { |
253 | my $self = shift; |
2e68a8e1 |
254 | my $schema = $self->schema; |
255 | my $databases = $self->databases; |
256 | my $dir = $self->upgrade_directory; |
257 | my $sqltargs = $self->sqltargs; |
d54b8d69 |
258 | my $version = $schema->schema_version; |
259 | |
2e68a8e1 |
260 | unless( -d $dir ) { |
261 | carp "Upgrade directory $dir does not exist, using ./\n"; |
d54b8d69 |
262 | $dir = './'; |
2e68a8e1 |
263 | } |
264 | |
2e68a8e1 |
265 | |
9600776d |
266 | my $sqlt = SQL::Translator->new({ |
d54b8d69 |
267 | add_drop_table => 1, |
2e68a8e1 |
268 | ignore_constraint_names => 1, |
d54b8d69 |
269 | ignore_index_names => 1, |
270 | parser => 'SQL::Translator::Parser::DBIx::Class', |
2e68a8e1 |
271 | %{$sqltargs || {}} |
9600776d |
272 | }); |
2e68a8e1 |
273 | |
2e68a8e1 |
274 | my $sqlt_schema = $sqlt->translate({ data => $schema }) |
275 | or $self->throw_exception ($sqlt->error); |
276 | |
277 | foreach my $db (@$databases) { |
278 | $sqlt->reset; |
279 | $sqlt->{schema} = $sqlt_schema; |
280 | $sqlt->producer($db); |
281 | |
3c1b5ee8 |
282 | my $filename = $self->_ddl_schema_out_filename($db, $version, $dir); |
9600776d |
283 | if (-e $filename ) { |
2e68a8e1 |
284 | carp "Overwriting existing DDL file - $filename"; |
285 | unlink $filename; |
286 | } |
287 | |
288 | my $output = $sqlt->translate; |
289 | if(!$output) { |
290 | carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); |
291 | next; |
292 | } |
293 | my $file; |
294 | unless( open $file, q(>), $filename ) { |
295 | $self->throw_exception("Can't open $filename for writing ($!)"); |
296 | next; |
297 | } |
298 | print {$file} $output; |
299 | close $file; |
300 | } |
301 | } |
302 | |
a41a04e5 |
303 | sub prepare_upgrade { |
9600776d |
304 | my ($self, $from_version, $to_version, $version_set) = @_; |
305 | |
306 | $from_version ||= $self->db_version; |
307 | $to_version ||= $self->schema_version; |
308 | |
63e95f13 |
309 | # for updates prepared automatically (rob's stuff) |
310 | # one would want to explicitly set $version_set to |
311 | # [$to_version] |
9600776d |
312 | $version_set ||= [$from_version, $to_version]; |
2e68a8e1 |
313 | my $schema = $self->schema; |
314 | my $databases = $self->databases; |
315 | my $dir = $self->upgrade_directory; |
316 | my $sqltargs = $self->sqltargs; |
317 | |
318 | unless( -d $dir ) { |
319 | carp "Upgrade directory $dir does not exist, using ./\n"; |
320 | $dir = "./"; |
321 | } |
322 | |
9600776d |
323 | my $schema_version = $schema->schema_version; |
2e68a8e1 |
324 | |
325 | $sqltargs = { |
326 | add_drop_table => 1, |
327 | ignore_constraint_names => 1, |
328 | ignore_index_names => 1, |
329 | %{$sqltargs} |
330 | }; |
331 | |
332 | my $sqlt = SQL::Translator->new( $sqltargs ); |
333 | |
334 | $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); |
335 | my $sqlt_schema = $sqlt->translate({ data => $schema }) |
336 | or $self->throw_exception ($sqlt->error); |
337 | |
338 | foreach my $db (@$databases) { |
339 | $sqlt->reset; |
340 | $sqlt->{schema} = $sqlt_schema; |
341 | $sqlt->producer($db); |
342 | |
3c1b5ee8 |
343 | my $prefilename = $self->_ddl_schema_out_filename($db, $from_version, $dir); |
2e68a8e1 |
344 | unless(-e $prefilename) { |
345 | carp("No previous schema file found ($prefilename)"); |
346 | next; |
347 | } |
348 | |
a41a04e5 |
349 | my $diff_file = $self->_ddl_schema_up_out_filename($db, $version_set, $dir ); |
2e68a8e1 |
350 | if(-e $diff_file) { |
a41a04e5 |
351 | carp("Overwriting existing up-diff file - $diff_file"); |
2e68a8e1 |
352 | unlink $diff_file; |
353 | } |
354 | |
355 | my $source_schema; |
356 | { |
357 | my $t = SQL::Translator->new({ |
358 | %{$sqltargs}, |
359 | debug => 0, |
360 | trace => 0, |
361 | }); |
362 | |
363 | $t->parser( $db ) # could this really throw an exception? |
364 | or $self->throw_exception ($t->error); |
365 | |
366 | my $out = $t->translate( $prefilename ) |
367 | or $self->throw_exception ($t->error); |
368 | |
369 | $source_schema = $t->schema; |
370 | |
371 | $source_schema->name( $prefilename ) |
372 | unless $source_schema->name; |
373 | } |
374 | |
375 | # The "new" style of producers have sane normalization and can support |
376 | # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't |
377 | # And we have to diff parsed SQL against parsed SQL. |
378 | my $dest_schema = $sqlt_schema; |
379 | |
380 | unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { |
381 | my $t = SQL::Translator->new({ |
382 | %{$sqltargs}, |
383 | debug => 0, |
384 | trace => 0, |
385 | }); |
386 | |
387 | $t->parser( $db ) # could this really throw an exception? |
388 | or $self->throw_exception ($t->error); |
389 | |
3c1b5ee8 |
390 | my $filename = $self->_ddl_schema_out_filename($db, $to_version, $dir); |
2e68a8e1 |
391 | my $out = $t->translate( $filename ) |
392 | or $self->throw_exception ($t->error); |
393 | |
394 | $dest_schema = $t->schema; |
395 | |
396 | $dest_schema->name( $filename ) |
397 | unless $dest_schema->name; |
398 | } |
399 | |
400 | my $diff = SQL::Translator::Diff::schema_diff( |
401 | $source_schema, $db, |
402 | $dest_schema, $db, |
403 | $sqltargs |
404 | ); |
405 | my $file; |
406 | unless(open $file, q(>), $diff_file) { |
407 | $self->throw_exception("Can't write to $diff_file ($!)"); |
408 | next; |
409 | } |
410 | print {$file} $diff; |
411 | close $file; |
412 | } |
413 | } |
414 | |
334bced5 |
415 | method _read_sql_file($file) { |
416 | return unless $file; |
417 | |
418 | open my $fh, '<', $file or carp("Can't open upgrade file, $file ($!)"); |
419 | my @data = split /\n/, join '', <$fh>; |
420 | close $fh; |
421 | |
422 | @data = grep { |
423 | $_ && |
424 | !/^--/ && |
425 | !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/m |
426 | } split /;/, |
427 | join '', @data; |
428 | |
429 | return \@data; |
430 | } |
431 | |
7521a845 |
432 | sub _upgrade_single_step { |
433 | my $self = shift; |
24f4524b |
434 | my @version_set = @{ shift @_ }; |
a41a04e5 |
435 | my @upgrade_files = @{$self->_ddl_schema_up_in_filenames( |
334bced5 |
436 | $self->storage->sqlt_type, |
24f4524b |
437 | \@version_set, |
3c1b5ee8 |
438 | )}; |
334bced5 |
439 | |
3c1b5ee8 |
440 | for my $upgrade_file (@upgrade_files) { |
441 | unless (-f $upgrade_file) { |
442 | # croak? |
443 | carp "Upgrade not possible, no upgrade file found ($upgrade_file), please create one\n"; |
444 | return; |
445 | } |
334bced5 |
446 | |
3c1b5ee8 |
447 | $self->_filedata($self->_read_sql_file($upgrade_file)); # I don't like this --fREW 2010-02-22 |
448 | $self->schema->txn_do(sub { $self->_do_upgrade }); |
449 | } |
334bced5 |
450 | } |
451 | |
8a7847f1 |
452 | method _do_upgrade { $self->_run_upgrade(qr/.*?/) } |
334bced5 |
453 | |
8a7847f1 |
454 | method _run_upgrade($stm) { |
334bced5 |
455 | return unless $self->_filedata; |
456 | my @statements = grep { $_ =~ $stm } @{$self->_filedata}; |
457 | |
458 | for (@statements) { |
459 | $self->storage->debugobj->query_start($_) if $self->storage->debug; |
8a7847f1 |
460 | $self->_apply_statement($_); |
334bced5 |
461 | $self->storage->debugobj->query_end($_) if $self->storage->debug; |
462 | } |
463 | } |
464 | |
8a7847f1 |
465 | method _apply_statement($statement) { |
334bced5 |
466 | # croak? |
467 | $self->storage->dbh->do($_) or carp "SQL was: $_" |
468 | } |
469 | |
2e68a8e1 |
470 | 1; |
e051bb00 |
471 | |
472 | __END__ |
473 | |
2eaf903b |
474 | vim: ts=2 sw=2 expandtab |