X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=b6e78a3a1020552b34b5feb64840c0d9ced2f219;hb=8465e76797613fa0b9c85365c03d26cacbaab31a;hp=4eb5c6f9b53be5ed4e72017466635d21c650c15d;hpb=65622feffe796f5ffe15f705c675df5aa4cb5055;p=dbsrgits%2FDBIx-Class-DeploymentHandler.git diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index 4eb5c6f..b6e78a3 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -5,11 +5,11 @@ use Moose; use autodie; use Carp qw( carp croak ); -use Log::Contextual::WarnLogger; -use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({ - env_prefix => 'DBICDH' -}); -use Data::Dumper::Concise; +use DBIx::Class::DeploymentHandler::Logger; +use Log::Contextual qw(:log :dlog), -default_logger => + DBIx::Class::DeploymentHandler::Logger->new({ + env_prefix => 'DBICDH' + }); use Method::Signatures::Simple; use Try::Tiny; @@ -79,6 +79,13 @@ has schema_version => ( # is built the same way, but we leave this in place method _build_schema_version { $self->schema->schema_version } +has _json => ( + is => 'ro', + lazy_build => 1, +); + +sub _build__json { require JSON; JSON->new->pretty } + method __ddl_consume_with_prefix($type, $versions, $prefix) { my $base_dir = $self->script_directory; @@ -97,7 +104,7 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { } opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh; + my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; closedir $dh; if (-d $common) { @@ -125,7 +132,7 @@ method _ddl_schema_produce_filename($type, $version) { my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql' ); + return catfile( $dirname, '001-auto.sql-json' ); } method _ddl_schema_up_consume_filenames($type, $versions) { @@ -142,60 +149,87 @@ method _ddl_schema_up_produce_filename($type, $versions) { my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql' - ); + return catfile( $dirname, '001-auto.sql-json' ); } method _ddl_schema_down_produce_filename($type, $versions, $dir) { my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql'); + return catfile( $dirname, '001-auto.sql-json'); } -method _run_sql_and_perl($filenames) { - my @files = @{$filenames}; +method _run_sql_array($sql) { my $storage = $self->storage; + $sql = [grep { + $_ && # remove blank lines + !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's + } map { + s/^\s+//; s/\s+$//; # trim whitespace + join '', grep { !/^--/ } split /\n/ # remove comments + } @$sql]; + + Dlog_trace { "[DBICDH] Running SQL $_" } $sql; + foreach my $line (@{$sql}) { + $storage->_query_start($line); + # the whole reason we do this is so that we can see the line that was run + try { + $storage->dbh_do (sub { $_[1]->do($line) }); + } + catch { + die "$_ (running line '$line')" + } + $storage->_query_end($line); + } + return join "\n", @$sql +} - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; +method _run_sql($filename) { + log_debug { "[DBICDH] Running SQL from $filename" }; + return $self->_run_sql_array($self->_read_sql_file($filename)); +} - my $sql; - for my $filename (@files) { - if ($filename =~ /\.sql$/) { - log_debug { "[DBICDH] Running SQL from $filename" }; - my @sql = @{$self->_read_sql_file($filename)}; - $sql .= join "\n", @sql; - log_trace { "[DBICDH] Running SQL $sql" }; - - foreach my $line (@sql) { - $storage->_query_start($line); - try { - # do a dbh_do cycle here, as we need some error checking in - # place (even though we will ignore errors) - $storage->dbh_do (sub { $_[1]->do($line) }); - } - catch { - carp "$_ (running '${line}')" - } - $storage->_query_end($line); - } - } elsif ( $filename =~ /^(.+)\.pl$/ ) { - log_debug { "[DBICDH] Running Perl from $filename" }; - my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; +method _run_perl($filename) { + log_debug { "[DBICDH] Running Perl from $filename" }; + my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - no warnings 'redefine'; - my $fn = eval "$filedata"; - use warnings; - log_trace { '[DBICDH] Running Perl ' . Dumper($fn) }; + no warnings 'redefine'; + my $fn = eval "$filedata"; + use warnings; + Dlog_trace { "[DBICDH] Running Perl $_" } $fn; - if ($@) { - carp "$filename failed to compile: $@"; - } elsif (ref $fn eq 'CODE') { - $fn->($self->schema) - } else { - carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; - } + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->($self->schema) + } else { + carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; + } +} + +method _run_serialized_sql($filename, $type) { + if (lc $type eq 'json') { + return $self->_run_sql_array($self->_json->decode( + do { local( @ARGV, $/ ) = $filename; <> } # slurp + )) + } else { + croak "$type is not one of the supported serialzed types" + } +} + +method _run_sql_and_perl($filenames) { + my @files = @{$filenames}; + my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; + + my $sql = ''; + for my $filename (@files) { + if ($filename =~ /\.sql$/) { + $sql .= $self->_run_sql($filename) + } elsif ( $filename =~ /\.sql-(\w+)$/ ) { + $sql .= $self->_run_serialized_sql($filename, $1) + } elsif ( $filename =~ /\.pl$/ ) { + $self->_run_perl($filename) } else { croak "A file ($filename) got to deploy that wasn't sql or perl!"; } @@ -261,6 +295,7 @@ sub _prepare_install { my $version = $self->schema_version; my $sqlt = SQL::Translator->new({ + no_comments => 1, add_drop_table => 1, ignore_constraint_names => 1, ignore_index_names => 1, @@ -282,17 +317,22 @@ sub _prepare_install { unlink $filename; } - my $output = $sqlt->translate; - if(!$output) { + my $sql = $self->_generate_final_sql($sqlt); + if(!$sql) { carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); next; } open my $file, q(>), $filename; - print {$file} $output; + print {$file} $sql; close $file; } } +method _generate_final_sql($sqlt) { + my @output = $sqlt->translate; + $self->_json->encode(\@output); +} + sub _resultsource_install_filename { my ($self, $source_name) = @_; return sub { @@ -300,7 +340,7 @@ sub _resultsource_install_filename { my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, "001-auto-$source_name.sql" ); + return catfile( $dirname, "001-auto-$source_name.sql-json" ); } } @@ -370,6 +410,7 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction $sqltargs = { add_drop_table => 1, + no_comments => 1, ignore_constraint_names => 1, ignore_index_names => 1, %{$sqltargs} @@ -409,7 +450,8 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction $t->parser( $db ) # could this really throw an exception? or croak($t->error); - my $out = $t->translate( $prefilename ) + my $sql = $self->_default_read_sql_file_as_string($prefilename); + my $out = $t->translate( \$sql ) or croak($t->error); $source_schema = $t->schema; @@ -434,7 +476,8 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction or croak($t->error); my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); - my $out = $t->translate( $filename ) + my $sql = $self->_default_read_sql_file_as_string($filename); + my $out = $t->translate( \$sql ) or croak($t->error); $dest_schema = $t->schema; @@ -443,17 +486,23 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction unless $dest_schema->name; } - my $diff = SQL::Translator::Diff::schema_diff( - $source_schema, $db, - $dest_schema, $db, - $sqltargs - ); open my $file, q(>), $diff_file; - print {$file} $diff; + print {$file} + $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs); close $file; } } +method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) { + $self->_json->encode([ + SQL::Translator::Diff::schema_diff( + $source_schema, $db, + $dest_schema, $db, + $sqltargs + ) + ]) +} + method _read_sql_file($file) { return unless $file; @@ -461,21 +510,19 @@ method _read_sql_file($file) { my @data = split /;\n/, join '', <$fh>; close $fh; - @data = grep { - $_ && # remove blank lines - !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's - } map { - s/^\s+//; s/\s+$//; # trim whitespace - join '', grep { !/^--/ } split /\n/ # remove comments - } @data; - return \@data; } +method _default_read_sql_file_as_string($file) { + return join q(), map "$_;\n", @{$self->_json->decode( + do { local( @ARGV, $/ ) = $file; <> } # slurp + )}; +} + sub downgrade_single_step { my $self = shift; my $version_set = (shift @_)->{version_set}; - log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) }; + Dlog_info { qq([DBICDH] downgrade_single_step'ing $_) } $version_set; my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( $self->storage->sqlt_type, @@ -488,7 +535,7 @@ sub downgrade_single_step { sub upgrade_single_step { my $self = shift; my $version_set = (shift @_)->{version_set}; - log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) }; + Dlog_info { qq([DBICDH] upgrade_single_step'ing $_) } $version_set; my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( $self->storage->sqlt_type, @@ -507,12 +554,13 @@ __END__ =head1 DESCRIPTION -This class is the meat of L. It takes care of -generating sql files representing schemata as well as sql files to move from -one version of a schema to the rest. One of the hallmark features of this -class is that it allows for multiple sql files for deploy and upgrade, allowing -developers to fine tune deployment. In addition it also allows for perl files -to be run at any stage of the process. +This class is the meat of L. It takes +care of generating serialized sql files representing schemata as well +as serialized sql files to move from one version of a schema to the rest. +One of the hallmark features of this class is that it allows for multiple sql +files for deploy and upgrade, allowing developers to fine tune deployment. +In addition it also allows for perl files to be run +at any stage of the process. For basic usage see L. What's documented here is extra fun stuff or private methods. @@ -528,15 +576,15 @@ like the best way to describe the layout is with the following example: |- SQLite | |- down | | `- 2-1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | |- schema | | `- 1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- up | |- 1-2 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- 2-3 - | `- 001-auto.sql + | `- 001-auto.sql-json |- _common | |- down | | `- 2-1 @@ -547,44 +595,43 @@ like the best way to describe the layout is with the following example: |- _generic | |- down | | `- 2-1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | |- schema | | `- 1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- up | `- 1-2 - | |- 001-auto.sql + | |- 001-auto.sql-json | `- 002-create-stored-procedures.sql `- MySQL |- down | `- 2-1 - | `- 001-auto.sql + | `- 001-auto.sql-json |- preinstall | `- 1 | |- 001-create_database.pl | `- 002-create_users_and_permissions.pl |- schema | `- 1 - | `- 001-auto.sql + | `- 001-auto.sql-json `- up `- 1-2 - `- 001-auto.sql + `- 001-auto.sql-json So basically, the code $dm->deploy(1) on an C database that would simply run -C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next, +C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next, $dm->upgrade_single_step([1,2]) -would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by +would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. -Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of -the time it probably should be, since perl scripts will mostly be database -independent. +C<.pl> files don't have to be in the C<_common> directory, but most of the time +they should be, because perl scripts are generally be database independent. C<_generic> exists for when you for some reason are sure that your SQL is generic enough to run on all databases. Good luck with that one. @@ -596,6 +643,22 @@ Until people have used this more it will remain freeform, but a recommended use of preinstall is to have it prompt for username and password, and then call the appropriate C<< CREATE DATABASE >> commands etc. +=head1 SERIALIZED SQL + +The SQL that this module generates and uses is serialized into an array of +SQL statements. The reason being that some databases handle multiple +statements in a single execution differently. Generally you do not need to +worry about this as these are scripts generated for you. If you find that +you are editing them on a regular basis something is wrong and you either need +to submit a bug or consider writing extra serialized SQL or Perl scripts to run +before or after the automatically generated script. + +B Currently the SQL is serialized into JSON. I am willing to merge in +patches that will allow more serialization formats if you want that feature, +but if you do send me a patch for that realize that I do not want to add YAML +support or whatever, I would rather add a generic method of adding any +serialization format. + =head1 PERL SCRIPTS A perl script for this tool is very simple. It merely needs to contain an