1 package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator;
4 # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories
7 use Carp qw( carp croak );
8 use Log::Contextual::WarnLogger;
9 use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({
10 env_prefix => 'DBICDH'
13 use Method::Signatures::Simple;
17 require SQL::Translator::Diff;
19 require DBIx::Class::Storage; # loaded for type constraint
20 use DBIx::Class::DeploymentHandler::Types;
22 use File::Path 'mkpath';
23 use File::Spec::Functions;
25 with 'DBIx::Class::DeploymentHandler::HandlesDeploy';
28 isa => 'DBIx::Class::Schema',
34 isa => 'DBIx::Class::Storage',
39 method _build_storage {
40 my $s = $self->schema->storage;
41 $s->_determine_driver;
45 has sql_translator_args => (
48 default => sub { {} },
50 has script_directory => (
59 isa => 'DBIx::Class::DeploymentHandler::Databases',
61 default => sub { [qw( MySQL SQLite PostgreSQL )] },
70 has schema_version => (
76 # this will probably never get called as the DBICDH
77 # will be passing down a schema_version normally, which
78 # is built the same way, but we leave this in place
79 method _build_schema_version { $self->schema->schema_version }
86 sub _build__json { require JSON; JSON->new->pretty }
88 method __ddl_consume_with_prefix($type, $versions, $prefix) {
89 my $base_dir = $self->script_directory;
91 my $main = catfile( $base_dir, $type );
92 my $generic = catfile( $base_dir, '_generic' );
94 catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} );
98 $dir = catfile($main, $prefix, join q(-), @{$versions})
99 } elsif (-d $generic) {
100 $dir = catfile($generic, $prefix, join q(-), @{$versions});
102 croak "neither $main or $generic exist; please write/generate some SQL";
105 opendir my($dh), $dir;
106 my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh;
110 opendir my($dh), $common;
111 for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) {
112 unless ($files{$filename}) {
113 $files{$filename} = catfile($common,$filename);
119 return [@files{sort keys %files}]
122 method _ddl_preinstall_consume_filenames($type, $version) {
123 $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall')
126 method _ddl_schema_consume_filenames($type, $version) {
127 $self->__ddl_consume_with_prefix($type, [ $version ], 'schema')
130 method _ddl_schema_produce_filename($type, $version) {
131 my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
132 mkpath($dirname) unless -d $dirname;
134 return catfile( $dirname, '001-auto.sql-json' );
137 method _ddl_schema_up_consume_filenames($type, $versions) {
138 $self->__ddl_consume_with_prefix($type, $versions, 'up')
141 method _ddl_schema_down_consume_filenames($type, $versions) {
142 $self->__ddl_consume_with_prefix($type, $versions, 'down')
145 method _ddl_schema_up_produce_filename($type, $versions) {
146 my $dir = $self->script_directory;
148 my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
149 mkpath($dirname) unless -d $dirname;
151 return catfile( $dirname, '001-auto.sql-json' );
154 method _ddl_schema_down_produce_filename($type, $versions, $dir) {
155 my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
156 mkpath($dirname) unless -d $dirname;
158 return catfile( $dirname, '001-auto.sql-json');
161 method _run_sql_array($sql) {
162 my $storage = $self->storage;
165 $_ && # remove blank lines
166 !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
168 s/^\s+//; s/\s+$//; # trim whitespace
169 join '', grep { !/^--/ } split /\n/ # remove comments
172 Dlog_trace { "[DBICDH] Running SQL $_" } $sql;
173 foreach my $line (@{$sql}) {
174 $storage->_query_start($line);
175 # the whole reason we do this is so that we can see the line that was run
177 $storage->dbh_do (sub { $_[1]->do($line) });
180 die "$_ (running line '$line')"
182 $storage->_query_end($line);
184 return join "\n", @$sql
187 method _run_sql($filename) {
188 log_debug { "[DBICDH] Running SQL from $filename" };
189 return $self->_run_sql_array($self->_read_sql_file($filename));
192 method _run_perl($filename) {
193 log_debug { "[DBICDH] Running Perl from $filename" };
194 my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
196 no warnings 'redefine';
197 my $fn = eval "$filedata";
199 Dlog_trace { "[DBICDH] Running Perl $_" } $fn;
202 carp "$filename failed to compile: $@";
203 } elsif (ref $fn eq 'CODE') {
206 carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
210 method _run_serialized_sql($filename, $type) {
211 if (lc $type eq 'json') {
212 return $self->_run_sql_array($self->_json->decode(
213 do { local( @ARGV, $/ ) = $filename; <> } # slurp
216 croak "$type is not one of the supported serialzed types"
220 method _run_sql_and_perl($filenames) {
221 my @files = @{$filenames};
222 my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
225 for my $filename (@files) {
226 if ($filename =~ /\.sql$/) {
227 $sql .= $self->_run_sql($filename)
228 } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
229 $sql .= $self->_run_serialized_sql($filename, $1)
230 } elsif ( $filename =~ /\.pl$/ ) {
231 $self->_run_perl($filename)
233 croak "A file ($filename) got to deploy that wasn't sql or perl!";
237 $guard->commit if $self->txn_wrap;
244 my $version = (shift @_ || {})->{version} || $self->schema_version;
245 log_info { "[DBICDH] deploying version $version" };
247 return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
248 $self->storage->sqlt_type,
256 my $version = $args->{version} || $self->schema_version;
257 log_info { "[DBICDH] preinstalling version $version" };
258 my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
260 my @files = @{$self->_ddl_preinstall_consume_filenames(
265 for my $filename (@files) {
266 # We ignore sql for now (till I figure out what to do with it)
267 if ( $filename =~ /^(.+)\.pl$/ ) {
268 my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
270 no warnings 'redefine';
271 my $fn = eval "$filedata";
275 carp "$filename failed to compile: $@";
276 } elsif (ref $fn eq 'CODE') {
279 carp "$filename should define an anonymous sub but it didn't!";
282 croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!";
287 sub _prepare_install {
289 my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
291 my $schema = $self->schema;
292 my $databases = $self->databases;
293 my $dir = $self->script_directory;
294 my $version = $self->schema_version;
296 my $sqlt = SQL::Translator->new({
299 ignore_constraint_names => 1,
300 ignore_index_names => 1,
301 parser => 'SQL::Translator::Parser::DBIx::Class',
305 my $sqlt_schema = $sqlt->translate( data => $schema )
306 or croak($sqlt->error);
308 foreach my $db (@$databases) {
310 $sqlt->{schema} = $sqlt_schema;
311 $sqlt->producer($db);
313 my $filename = $self->$to_file($db, $version, $dir);
315 carp "Overwriting existing DDL file - $filename";
319 my $sql = $self->_generate_final_sql($sqlt);
321 carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
324 open my $file, q(>), $filename;
330 method _generate_final_sql($sqlt) {
331 my @output = $sqlt->translate;
332 $self->_json->encode(\@output);
335 sub _resultsource_install_filename {
336 my ($self, $source_name) = @_;
338 my ($self, $type, $version) = @_;
339 my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
340 mkpath($dirname) unless -d $dirname;
342 return catfile( $dirname, "001-auto-$source_name.sql-json" );
346 sub install_resultsource {
347 my ($self, $args) = @_;
348 my $source = $args->{result_source};
349 my $version = $args->{version};
350 log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
351 my $rs_install_file =
352 $self->_resultsource_install_filename($source->source_name);
355 $self->$rs_install_file(
356 $self->storage->sqlt_type,
360 $self->_run_sql_and_perl($files);
363 sub prepare_resultsource_install {
365 my $source = (shift @_)->{result_source};
366 log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
368 my $filename = $self->_resultsource_install_filename($source->source_name);
369 $self->_prepare_install({
370 parser_args => { sources => [$source->source_name], }
375 log_info { '[DBICDH] preparing deploy' };
377 $self->_prepare_install({}, '_ddl_schema_produce_filename');
380 sub prepare_upgrade {
381 my ($self, $args) = @_;
383 '[DBICDH] preparing upgrade ' .
384 "from $args->{from_version} to $args->{to_version}"
386 $self->_prepare_changegrade(
387 $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
391 sub prepare_downgrade {
392 my ($self, $args) = @_;
394 '[DBICDH] preparing downgrade ' .
395 "from $args->{from_version} to $args->{to_version}"
397 $self->_prepare_changegrade(
398 $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
402 method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
403 my $schema = $self->schema;
404 my $databases = $self->databases;
405 my $dir = $self->script_directory;
406 my $sqltargs = $self->sql_translator_args;
408 my $schema_version = $self->schema_version;
413 ignore_constraint_names => 1,
414 ignore_index_names => 1,
418 my $sqlt = SQL::Translator->new( $sqltargs );
420 $sqlt->parser('SQL::Translator::Parser::DBIx::Class');
421 my $sqlt_schema = $sqlt->translate( data => $schema )
422 or croak($sqlt->error);
424 foreach my $db (@$databases) {
426 $sqlt->{schema} = $sqlt_schema;
427 $sqlt->producer($db);
429 my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir);
430 unless(-e $prefilename) {
431 carp("No previous schema file found ($prefilename)");
434 my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
435 my $diff_file = $self->$diff_file_method($db, $version_set, $dir );
437 carp("Overwriting existing $direction-diff file - $diff_file");
443 my $t = SQL::Translator->new({
449 $t->parser( $db ) # could this really throw an exception?
452 my $sql = $self->_default_read_sql_file_as_string($prefilename);
453 my $out = $t->translate( \$sql )
456 $source_schema = $t->schema;
458 $source_schema->name( $prefilename )
459 unless $source_schema->name;
462 # The "new" style of producers have sane normalization and can support
463 # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't
464 # And we have to diff parsed SQL against parsed SQL.
465 my $dest_schema = $sqlt_schema;
467 unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) {
468 my $t = SQL::Translator->new({
474 $t->parser( $db ) # could this really throw an exception?
477 my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
478 my $sql = $self->_default_read_sql_file_as_string($filename);
479 my $out = $t->translate( \$sql )
482 $dest_schema = $t->schema;
484 $dest_schema->name( $filename )
485 unless $dest_schema->name;
488 open my $file, q(>), $diff_file;
490 $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
495 method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
496 $self->_json->encode([
497 SQL::Translator::Diff::schema_diff(
505 method _read_sql_file($file) {
508 open my $fh, '<', $file;
509 my @data = split /;\n/, join '', <$fh>;
515 method _default_read_sql_file_as_string($file) {
516 return join q(), map "$_;\n", @{$self->_json->decode(
517 do { local( @ARGV, $/ ) = $file; <> } # slurp
521 sub downgrade_single_step {
523 my $version_set = (shift @_)->{version_set};
524 Dlog_info { qq([DBICDH] downgrade_single_step'ing $_) } $version_set;
526 my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
527 $self->storage->sqlt_type,
534 sub upgrade_single_step {
536 my $version_set = (shift @_)->{version_set};
537 Dlog_info { qq([DBICDH] upgrade_single_step'ing $_) } $version_set;
539 my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
540 $self->storage->sqlt_type,
546 __PACKAGE__->meta->make_immutable;
550 # vim: ts=2 sw=2 expandtab
556 This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes
557 care of generating serialized sql files representing schemata as well
558 as serialized sql files to move from one version of a schema to the rest.
559 One of the hallmark features of this class is that it allows for multiple sql
560 files for deploy and upgrade, allowing developers to fine tune deployment.
561 In addition it also allows for perl files to be run
562 at any stage of the process.
564 For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
565 documented here is extra fun stuff or private methods.
567 =head1 DIRECTORY LAYOUT
569 Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's
570 heavily based upon L<DBIx::Migration::Directories>, but has some extensions and
571 modifications, so even if you are familiar with it, please read this. I feel
572 like the best way to describe the layout is with the following example:
578 | | `- 001-auto.sql-json
581 | | `- 001-auto.sql-json
584 | | `- 001-auto.sql-json
586 | `- 001-auto.sql-json
590 | | `- 002-remove-customers.pl
593 | `- 002-generate-customers.pl
597 | | `- 001-auto.sql-json
600 | | `- 001-auto.sql-json
603 | |- 001-auto.sql-json
604 | `- 002-create-stored-procedures.sql
608 | `- 001-auto.sql-json
611 | |- 001-create_database.pl
612 | `- 002-create_users_and_permissions.pl
615 | `- 001-auto.sql-json
620 So basically, the code
624 on an C<SQLite> database that would simply run
625 C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next,
627 $dm->upgrade_single_step([1,2])
629 would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by
630 C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
632 C<.pl> files don't have to be in the C<_common> directory, but most of the time
633 they should be, because perl scripts are generally be database independent.
635 C<_generic> exists for when you for some reason are sure that your SQL is
636 generic enough to run on all databases. Good luck with that one.
638 Note that unlike most steps in the process, C<preinstall> will not run SQL, as
639 there may not even be an database at preinstall time. It will run perl scripts
640 just like the other steps in the process, but nothing is passed to them.
641 Until people have used this more it will remain freeform, but a recommended use
642 of preinstall is to have it prompt for username and password, and then call the
643 appropriate C<< CREATE DATABASE >> commands etc.
645 =head1 SERIALIZED SQL
647 The SQL that this module generates and uses is serialized into an array of
648 SQL statements. The reason being that some databases handle multiple
649 statements in a single execution differently. Generally you do not need to
650 worry about this as these are scripts generated for you. If you find that
651 you are editing them on a regular basis something is wrong and you either need
652 to submit a bug or consider writing extra serialized SQL or Perl scripts to run
653 before or after the automatically generated script.
655 B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in
656 patches that will allow more serialization formats if you want that feature,
657 but if you do send me a patch for that realize that I do not want to add YAML
658 support or whatever, I would rather add a generic method of adding any
659 serialization format.
663 A perl script for this tool is very simple. It merely needs to contain an
664 anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument.
665 A very basic perl script might look like:
675 $schema->resultset('Users')->create({
683 The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database
684 and generate the DDL.
688 The L<DBIx::Class::Storage> that is I<actually> used to talk to the database
689 and generate the DDL. This is automatically created with L</_build_storage>.
691 =attr sql_translator_args
693 The arguments that get passed to L<SQL::Translator> when it's used.
695 =attr script_directory
697 The directory (default C<'sql'>) that scripts are stored in
701 The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to
706 Set to true (which is the default) to wrap all upgrades and deploys in a single
711 The version the schema on your harddrive is at. Defaults to
712 C<< $self->schema->schema_version >>.
716 =head2 __ddl_consume_with_prefix
718 $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' )
720 This is the meat of the multi-file upgrade/deploy stuff. It returns a list of
721 files in the order that they should be run for a generic "type" of upgrade.
722 You should not be calling this in user code.
724 =head2 _ddl_schema_consume_filenames
726 $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] )
728 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
731 =head2 _ddl_schema_produce_filename
733 $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] )
735 Returns a single file in which an initial schema will be stored.
737 =head2 _ddl_schema_up_consume_filenames
739 $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] )
741 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
744 =head2 _ddl_schema_down_consume_filenames
746 $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] )
748 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a
751 =head2 _ddl_schema_up_produce_filenames
753 $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
755 Returns a single file in which the sql to upgrade from one schema to another
758 =head2 _ddl_schema_down_produce_filename
760 $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
762 Returns a single file in which the sql to downgrade from one schema to another
765 =head2 _resultsource_install_filename
767 my $filename_fn = $dm->_resultsource_install_filename('User');
768 $dm->$filename_fn('SQLite', '1.00')
770 Returns a function which in turn returns a single filename used to install a
771 single resultsource. Weird interface is convenient for me. Deal with it.
773 =head2 _run_sql_and_perl
775 $dm->_run_sql_and_perl([qw( list of filenames )])
777 Simply put, this runs the list of files passed to it. If the file ends in
778 C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file.
780 Depending on L</txn_wrap> all of the files run will be wrapped in a single
783 =head2 _prepare_install
785 $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' })
787 Generates the sql file for installing the database. First arg is simply
788 L<SQL::Translator> args and the second is a coderef that returns the filename
791 =head2 _prepare_changegrade
793 $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up')
795 Generates the sql file for migrating from one schema version to another. First
796 arg is the version to start from, second is the version to go to, third is the
797 L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the
798 direction of the changegrade, be it 'up' or 'down'.
800 =head2 _read_sql_file
802 $dm->_read_sql_file('foo.sql')
804 Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments,
805 transactions, and blank lines.