1 package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator;
4 # ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories
7 use Carp qw( carp croak );
8 use Log::Contextual::WarnLogger;
9 use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({
10 env_prefix => 'DBICDH'
12 use Data::Dumper::Concise;
14 use Method::Signatures::Simple;
18 require SQL::Translator::Diff;
20 require DBIx::Class::Storage; # loaded for type constraint
21 use DBIx::Class::DeploymentHandler::Types;
23 use File::Path 'mkpath';
24 use File::Spec::Functions;
26 with 'DBIx::Class::DeploymentHandler::HandlesDeploy';
29 isa => 'DBIx::Class::Schema',
35 isa => 'DBIx::Class::Storage',
40 method _build_storage {
41 my $s = $self->schema->storage;
42 $s->_determine_driver;
46 has sql_translator_args => (
49 default => sub { {} },
51 has script_directory => (
60 isa => 'DBIx::Class::DeploymentHandler::Databases',
62 default => sub { [qw( MySQL SQLite PostgreSQL )] },
71 has schema_version => (
77 # this will probably never get called as the DBICDH
78 # will be passing down a schema_version normally, which
79 # is built the same way, but we leave this in place
80 method _build_schema_version { $self->schema->schema_version }
87 sub _build__json { require JSON; JSON->new->pretty }
89 method __ddl_consume_with_prefix($type, $versions, $prefix) {
90 my $base_dir = $self->script_directory;
92 my $main = catfile( $base_dir, $type );
93 my $generic = catfile( $base_dir, '_generic' );
95 catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} );
99 $dir = catfile($main, $prefix, join q(-), @{$versions})
100 } elsif (-d $generic) {
101 $dir = catfile($generic, $prefix, join q(-), @{$versions});
103 croak "neither $main or $generic exist; please write/generate some SQL";
106 opendir my($dh), $dir;
107 my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh;
111 opendir my($dh), $common;
112 for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) {
113 unless ($files{$filename}) {
114 $files{$filename} = catfile($common,$filename);
120 return [@files{sort keys %files}]
123 method _ddl_preinstall_consume_filenames($type, $version) {
124 $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall')
127 method _ddl_schema_consume_filenames($type, $version) {
128 $self->__ddl_consume_with_prefix($type, [ $version ], 'schema')
131 method _ddl_schema_produce_filename($type, $version) {
132 my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
133 mkpath($dirname) unless -d $dirname;
135 return catfile( $dirname, '001-auto.sql-json' );
138 method _ddl_schema_up_consume_filenames($type, $versions) {
139 $self->__ddl_consume_with_prefix($type, $versions, 'up')
142 method _ddl_schema_down_consume_filenames($type, $versions) {
143 $self->__ddl_consume_with_prefix($type, $versions, 'down')
146 method _ddl_schema_up_produce_filename($type, $versions) {
147 my $dir = $self->script_directory;
149 my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
150 mkpath($dirname) unless -d $dirname;
152 return catfile( $dirname, '001-auto.sql-json' );
155 method _ddl_schema_down_produce_filename($type, $versions, $dir) {
156 my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
157 mkpath($dirname) unless -d $dirname;
159 return catfile( $dirname, '001-auto.sql-json');
162 method _run_sql_array($sql) {
163 my $storage = $self->storage;
166 $_ && # remove blank lines
167 !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
169 s/^\s+//; s/\s+$//; # trim whitespace
170 join '', grep { !/^--/ } split /\n/ # remove comments
173 log_trace { '[DBICDH] Running SQL ' . Dumper($sql) };
174 foreach my $line (@{$sql}) {
175 $storage->_query_start($line);
177 # do a dbh_do cycle here, as we need some error checking in
178 # place (even though we will ignore errors)
179 $storage->dbh_do (sub { $_[1]->do($line) });
182 carp "$_ (running '${line}')"
184 $storage->_query_end($line);
186 return join "\n", @$sql
189 method _run_sql($filename) {
190 log_debug { "[DBICDH] Running SQL from $filename" };
191 return $self->_run_sql_array($self->_read_sql_file($filename));
194 method _run_perl($filename) {
195 log_debug { "[DBICDH] Running Perl from $filename" };
196 my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
198 no warnings 'redefine';
199 my $fn = eval "$filedata";
201 log_trace { '[DBICDH] Running Perl ' . Dumper($fn) };
204 carp "$filename failed to compile: $@";
205 } elsif (ref $fn eq 'CODE') {
208 carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
212 method _run_serialized_sql($filename, $type) {
213 if (lc $type eq 'json') {
214 return $self->_run_sql_array($self->_json->decode(
215 do { local( @ARGV, $/ ) = $filename; <> } # slurp
218 croak "$type is not one of the supported serialzed types"
222 method _run_sql_and_perl($filenames) {
223 my @files = @{$filenames};
224 my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
227 for my $filename (@files) {
228 if ($filename =~ /\.sql$/) {
229 $sql .= $self->_run_sql($filename)
230 } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
231 $sql .= $self->_run_serialized_sql($filename, $1)
232 } elsif ( $filename =~ /\.pl$/ ) {
233 $self->_run_perl($filename)
235 croak "A file ($filename) got to deploy that wasn't sql or perl!";
239 $guard->commit if $self->txn_wrap;
246 my $version = (shift @_ || {})->{version} || $self->schema_version;
247 log_info { "[DBICDH] deploying version $version" };
249 return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
250 $self->storage->sqlt_type,
258 my $version = $args->{version} || $self->schema_version;
259 log_info { "[DBICDH] preinstalling version $version" };
260 my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
262 my @files = @{$self->_ddl_preinstall_consume_filenames(
267 for my $filename (@files) {
268 # We ignore sql for now (till I figure out what to do with it)
269 if ( $filename =~ /^(.+)\.pl$/ ) {
270 my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
272 no warnings 'redefine';
273 my $fn = eval "$filedata";
277 carp "$filename failed to compile: $@";
278 } elsif (ref $fn eq 'CODE') {
281 carp "$filename should define an anonymous sub but it didn't!";
284 croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!";
289 sub _prepare_install {
291 my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
293 my $schema = $self->schema;
294 my $databases = $self->databases;
295 my $dir = $self->script_directory;
296 my $version = $self->schema_version;
298 my $sqlt = SQL::Translator->new({
301 ignore_constraint_names => 1,
302 ignore_index_names => 1,
303 parser => 'SQL::Translator::Parser::DBIx::Class',
307 my $sqlt_schema = $sqlt->translate( data => $schema )
308 or croak($sqlt->error);
310 foreach my $db (@$databases) {
312 $sqlt->{schema} = $sqlt_schema;
313 $sqlt->producer($db);
315 my $filename = $self->$to_file($db, $version, $dir);
317 carp "Overwriting existing DDL file - $filename";
321 my $sql = $self->_generate_final_sql($sqlt);
323 carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
326 open my $file, q(>), $filename;
332 method _generate_final_sql($sqlt) {
333 my @output = $sqlt->translate;
334 $self->_json->encode(\@output);
337 sub _resultsource_install_filename {
338 my ($self, $source_name) = @_;
340 my ($self, $type, $version) = @_;
341 my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
342 mkpath($dirname) unless -d $dirname;
344 return catfile( $dirname, "001-auto-$source_name.sql-json" );
348 sub install_resultsource {
349 my ($self, $args) = @_;
350 my $source = $args->{result_source};
351 my $version = $args->{version};
352 log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
353 my $rs_install_file =
354 $self->_resultsource_install_filename($source->source_name);
357 $self->$rs_install_file(
358 $self->storage->sqlt_type,
362 $self->_run_sql_and_perl($files);
365 sub prepare_resultsource_install {
367 my $source = (shift @_)->{result_source};
368 log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
370 my $filename = $self->_resultsource_install_filename($source->source_name);
371 $self->_prepare_install({
372 parser_args => { sources => [$source->source_name], }
377 log_info { '[DBICDH] preparing deploy' };
379 $self->_prepare_install({}, '_ddl_schema_produce_filename');
382 sub prepare_upgrade {
383 my ($self, $args) = @_;
385 '[DBICDH] preparing upgrade ' .
386 "from $args->{from_version} to $args->{to_version}"
388 $self->_prepare_changegrade(
389 $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
393 sub prepare_downgrade {
394 my ($self, $args) = @_;
396 '[DBICDH] preparing downgrade ' .
397 "from $args->{from_version} to $args->{to_version}"
399 $self->_prepare_changegrade(
400 $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
404 method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
405 my $schema = $self->schema;
406 my $databases = $self->databases;
407 my $dir = $self->script_directory;
408 my $sqltargs = $self->sql_translator_args;
410 my $schema_version = $self->schema_version;
415 ignore_constraint_names => 1,
416 ignore_index_names => 1,
420 my $sqlt = SQL::Translator->new( $sqltargs );
422 $sqlt->parser('SQL::Translator::Parser::DBIx::Class');
423 my $sqlt_schema = $sqlt->translate( data => $schema )
424 or croak($sqlt->error);
426 foreach my $db (@$databases) {
428 $sqlt->{schema} = $sqlt_schema;
429 $sqlt->producer($db);
431 my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir);
432 unless(-e $prefilename) {
433 carp("No previous schema file found ($prefilename)");
436 my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
437 my $diff_file = $self->$diff_file_method($db, $version_set, $dir );
439 carp("Overwriting existing $direction-diff file - $diff_file");
445 my $t = SQL::Translator->new({
451 $t->parser( $db ) # could this really throw an exception?
454 my $sql = $self->_default_read_sql_file_as_string($prefilename);
455 my $out = $t->translate( \$sql )
458 $source_schema = $t->schema;
460 $source_schema->name( $prefilename )
461 unless $source_schema->name;
464 # The "new" style of producers have sane normalization and can support
465 # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't
466 # And we have to diff parsed SQL against parsed SQL.
467 my $dest_schema = $sqlt_schema;
469 unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) {
470 my $t = SQL::Translator->new({
476 $t->parser( $db ) # could this really throw an exception?
479 my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
480 my $sql = $self->_default_read_sql_file_as_string($filename);
481 my $out = $t->translate( \$sql )
484 $dest_schema = $t->schema;
486 $dest_schema->name( $filename )
487 unless $dest_schema->name;
490 open my $file, q(>), $diff_file;
492 $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
497 method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
498 $self->_json->encode([
499 SQL::Translator::Diff::schema_diff(
507 method _read_sql_file($file) {
510 open my $fh, '<', $file;
511 my @data = split /;\n/, join '', <$fh>;
517 method _default_read_sql_file_as_string($file) {
518 return join q(), map "$_;\n", @{$self->_json->decode(
519 do { local( @ARGV, $/ ) = $file; <> } # slurp
523 sub downgrade_single_step {
525 my $version_set = (shift @_)->{version_set};
526 log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) };
528 my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
529 $self->storage->sqlt_type,
536 sub upgrade_single_step {
538 my $version_set = (shift @_)->{version_set};
539 log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) };
541 my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
542 $self->storage->sqlt_type,
548 __PACKAGE__->meta->make_immutable;
552 # vim: ts=2 sw=2 expandtab
558 This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes
559 care of generating serialized sql files representing schemata as well
560 as serialized sql files to move from one version of a schema to the rest.
561 One of the hallmark features of this class is that it allows for multiple sql
562 files for deploy and upgrade, allowing developers to fine tune deployment.
563 In addition it also allows for perl files to be run
564 at any stage of the process.
566 For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
567 documented here is extra fun stuff or private methods.
569 =head1 DIRECTORY LAYOUT
571 Arguably this is the best feature of L<DBIx::Class::DeploymentHandler>. It's
572 heavily based upon L<DBIx::Migration::Directories>, but has some extensions and
573 modifications, so even if you are familiar with it, please read this. I feel
574 like the best way to describe the layout is with the following example:
580 | | `- 001-auto.sql-json
583 | | `- 001-auto.sql-json
586 | | `- 001-auto.sql-json
588 | `- 001-auto.sql-json
592 | | `- 002-remove-customers.pl
595 | `- 002-generate-customers.pl
599 | | `- 001-auto.sql-json
602 | | `- 001-auto.sql-json
605 | |- 001-auto.sql-json
606 | `- 002-create-stored-procedures.sql
610 | `- 001-auto.sql-json
613 | |- 001-create_database.pl
614 | `- 002-create_users_and_permissions.pl
617 | `- 001-auto.sql-json
622 So basically, the code
626 on an C<SQLite> database that would simply run
627 C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next,
629 $dm->upgrade_single_step([1,2])
631 would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by
632 C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
634 C<.pl> files don't have to be in the C<_common> directory, but most of the time
635 they should be, because perl scripts are generally be database independent.
637 C<_generic> exists for when you for some reason are sure that your SQL is
638 generic enough to run on all databases. Good luck with that one.
640 Note that unlike most steps in the process, C<preinstall> will not run SQL, as
641 there may not even be an database at preinstall time. It will run perl scripts
642 just like the other steps in the process, but nothing is passed to them.
643 Until people have used this more it will remain freeform, but a recommended use
644 of preinstall is to have it prompt for username and password, and then call the
645 appropriate C<< CREATE DATABASE >> commands etc.
647 =head1 SERIALIZED SQL
649 The SQL that this module generates and uses is serialized into an array of
650 SQL statements. The reason being that some databases handle multiple
651 statements in a single execution differently. Generally you do not need to
652 worry about this as these are scripts generated for you. If you find that
653 you are editing them on a regular basis something is wrong and you either need
654 to submit a bug or consider writing extra serialized SQL or Perl scripts to run
655 before or after the automatically generated script.
657 B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in
658 patches that will allow more serialization formats if you want that feature,
659 but if you do send me a patch for that realize that I do not want to add YAML
660 support or whatever, I would rather add a generic method of adding any
661 serialization format.
665 A perl script for this tool is very simple. It merely needs to contain an
666 anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument.
667 A very basic perl script might look like:
677 $schema->resultset('Users')->create({
685 The L<DBIx::Class::Schema> (B<required>) that is used to talk to the database
686 and generate the DDL.
690 The L<DBIx::Class::Storage> that is I<actually> used to talk to the database
691 and generate the DDL. This is automatically created with L</_build_storage>.
693 =attr sql_translator_args
695 The arguments that get passed to L<SQL::Translator> when it's used.
697 =attr script_directory
699 The directory (default C<'sql'>) that scripts are stored in
703 The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to
708 Set to true (which is the default) to wrap all upgrades and deploys in a single
713 The version the schema on your harddrive is at. Defaults to
714 C<< $self->schema->schema_version >>.
718 =head2 __ddl_consume_with_prefix
720 $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' )
722 This is the meat of the multi-file upgrade/deploy stuff. It returns a list of
723 files in the order that they should be run for a generic "type" of upgrade.
724 You should not be calling this in user code.
726 =head2 _ddl_schema_consume_filenames
728 $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] )
730 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
733 =head2 _ddl_schema_produce_filename
735 $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] )
737 Returns a single file in which an initial schema will be stored.
739 =head2 _ddl_schema_up_consume_filenames
741 $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] )
743 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
746 =head2 _ddl_schema_down_consume_filenames
748 $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] )
750 Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a
753 =head2 _ddl_schema_up_produce_filenames
755 $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
757 Returns a single file in which the sql to upgrade from one schema to another
760 =head2 _ddl_schema_down_produce_filename
762 $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
764 Returns a single file in which the sql to downgrade from one schema to another
767 =head2 _resultsource_install_filename
769 my $filename_fn = $dm->_resultsource_install_filename('User');
770 $dm->$filename_fn('SQLite', '1.00')
772 Returns a function which in turn returns a single filename used to install a
773 single resultsource. Weird interface is convenient for me. Deal with it.
775 =head2 _run_sql_and_perl
777 $dm->_run_sql_and_perl([qw( list of filenames )])
779 Simply put, this runs the list of files passed to it. If the file ends in
780 C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file.
782 Depending on L</txn_wrap> all of the files run will be wrapped in a single
785 =head2 _prepare_install
787 $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' })
789 Generates the sql file for installing the database. First arg is simply
790 L<SQL::Translator> args and the second is a coderef that returns the filename
793 =head2 _prepare_changegrade
795 $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up')
797 Generates the sql file for migrating from one schema version to another. First
798 arg is the version to start from, second is the version to go to, third is the
799 L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the
800 direction of the changegrade, be it 'up' or 'down'.
802 =head2 _read_sql_file
804 $dm->_read_sql_file('foo.sql')
806 Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments,
807 transactions, and blank lines.