X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=affcf5695647323e25a59dea15b4d3e1b55fa7c5;hb=0824f31fb9814cf3bbecf8720e4e8de7cf252b8e;hp=996a58b11564bd3b997ecc5b5b60a55b7a2055b0;hpb=fc4b7602052e267368ca63a07f882be01a2ba8a7;p=dbsrgits%2FDBIx-Class-DeploymentHandler.git diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index 996a58b..affcf56 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -5,6 +5,11 @@ use Moose; use autodie; use Carp qw( carp croak ); +use Log::Contextual::WarnLogger; +use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({ + env_prefix => 'DBICDH' +}); +use Data::Dumper::Concise; use Method::Signatures::Simple; use Try::Tiny; @@ -43,7 +48,7 @@ has sql_translator_args => ( is => 'ro', default => sub { {} }, ); -has upgrade_directory => ( +has script_directory => ( isa => 'Str', is => 'ro', required => 1, @@ -65,13 +70,24 @@ has txn_wrap => ( has schema_version => ( is => 'ro', + isa => 'Str', lazy_build => 1, ); +# this will probably never get called as the DBICDH +# will be passing down a schema_version normally, which +# is built the same way, but we leave this in place method _build_schema_version { $self->schema->schema_version } +has _json => ( + is => 'ro', + lazy_build => 1, +); + +sub _build__json { require JSON; JSON->new->pretty } + method __ddl_consume_with_prefix($type, $versions, $prefix) { - my $base_dir = $self->upgrade_directory; + my $base_dir = $self->script_directory; my $main = catfile( $base_dir, $type ); my $generic = catfile( $base_dir, '_generic' ); @@ -88,7 +104,7 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { } opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh; + my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh; closedir $dh; if (-d $common) { @@ -113,10 +129,10 @@ method _ddl_schema_consume_filenames($type, $version) { } method _ddl_schema_produce_filename($type, $version) { - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); + my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql' ); + return catfile( $dirname, '001-auto.sql-json' ); } method _ddl_schema_up_consume_filenames($type, $versions) { @@ -128,62 +144,93 @@ method _ddl_schema_down_consume_filenames($type, $versions) { } method _ddl_schema_up_produce_filename($type, $versions) { - my $dir = $self->upgrade_directory; + my $dir = $self->script_directory; my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql' - ); + return catfile( $dirname, '001-auto.sql-json' ); } method _ddl_schema_down_produce_filename($type, $versions, $dir) { my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql'); + return catfile( $dirname, '001-auto.sql-json'); } -method _run_sql_and_perl($filenames) { - my @files = @{$filenames}; +method _run_sql_array($sql) { my $storage = $self->storage; + $sql = [grep { + $_ && # remove blank lines + !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's + } map { + s/^\s+//; s/\s+$//; # trim whitespace + join '', grep { !/^--/ } split /\n/ # remove comments + } @$sql]; + + log_trace { '[DBICDH] Running SQL ' . Dumper($sql) }; + foreach my $line (@{$sql}) { + $storage->_query_start($line); + try { + # do a dbh_do cycle here, as we need some error checking in + # place (even though we will ignore errors) + $storage->dbh_do (sub { $_[1]->do($line) }); + } + catch { + carp "$_ (running '${line}')" + } + $storage->_query_end($line); + } + return join "\n", @$sql +} - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; +method _run_sql($filename) { + log_debug { "[DBICDH] Running SQL from $filename" }; + return $self->_run_sql_array($self->_read_sql_file($filename)); +} - my $sql; - for my $filename (@files) { - if ($filename =~ /\.sql$/) { - my @sql = @{$self->_read_sql_file($filename)}; - $sql .= join "\n", @sql; - - foreach my $line (@sql) { - $storage->_query_start($line); - try { - # do a dbh_do cycle here, as we need some error checking in - # place (even though we will ignore errors) - $storage->dbh_do (sub { $_[1]->do($line) }); - } - catch { - carp "$_ (running '${line}')" - } - $storage->_query_end($line); - } - } elsif ( $filename =~ /^(.+)\.pl$/ ) { - my $package = $1; - my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - # make the package name more palateable to perl - $package =~ s/\W/_/g; +method _run_perl($filename) { + log_debug { "[DBICDH] Running Perl from $filename" }; + my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - no warnings 'redefine'; - eval "package $package;\n\n$filedata"; - use warnings; + no warnings 'redefine'; + my $fn = eval "$filedata"; + use warnings; + log_trace { '[DBICDH] Running Perl ' . Dumper($fn) }; - if (my $fn = $package->can('run')) { - $fn->($self->schema); - } else { - carp "$filename should define a run method that takes a schema but it didn't!"; - } + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->($self->schema) + } else { + carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; + } +} + +method _run_serialized_sql($filename, $type) { + if (lc $type eq 'json') { + return $self->_run_sql_array($self->_json->decode( + do { local( @ARGV, $/ ) = $filename; <> } # slurp + )) + } else { + croak "$type is not one of the supported serialzed types" + } +} + +method _run_sql_and_perl($filenames) { + my @files = @{$filenames}; + my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; + + my $sql = ''; + for my $filename (@files) { + if ($filename =~ /\.sql$/) { + $sql .= $self->_run_sql($filename) + } elsif ( $filename =~ /\.sql-(\w+)$/ ) { + $sql .= $self->_run_serialized_sql($filename, $1) + } elsif ( $filename =~ /\.pl$/ ) { + $self->_run_perl($filename) } else { croak "A file ($filename) got to deploy that wasn't sql or perl!"; } @@ -196,7 +243,8 @@ method _run_sql_and_perl($filenames) { sub deploy { my $self = shift; - my $version = shift || $self->schema_version; + my $version = (shift @_ || {})->{version} || $self->schema_version; + log_info { "[DBICDH] deploying version $version" }; return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( $self->storage->sqlt_type, @@ -204,30 +252,33 @@ sub deploy { )); } -sub preinstall_scripts { - my $self = shift; - my $version = shift || $self->schema_version; +sub preinstall { + my $self = shift; + my $args = shift; + my $version = $args->{version} || $self->schema_version; + log_info { "[DBICDH] preinstalling version $version" }; + my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; my @files = @{$self->_ddl_preinstall_consume_filenames( - $self->storage->sqlt_type, + $storage_type, $version, )}; for my $filename (@files) { # We ignore sql for now (till I figure out what to do with it) if ( $filename =~ /^(.+)\.pl$/ ) { - my $package = $1; my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - # make the package name more palateable to perl - $package =~ s/\W/_/g; no warnings 'redefine'; - eval "package $package;\n\n$filedata"; + my $fn = eval "$filedata"; use warnings; - if (my $fn = $package->can('run')) { + + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { $fn->() } else { - carp "$filename should define a run sub but it didn't!"; + carp "$filename should define an anonymous sub but it didn't!"; } } else { croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; @@ -241,10 +292,11 @@ sub _prepare_install { my $to_file = shift; my $schema = $self->schema; my $databases = $self->databases; - my $dir = $self->upgrade_directory; + my $dir = $self->script_directory; my $version = $self->schema_version; my $sqlt = SQL::Translator->new({ + no_comments => 1, add_drop_table => 1, ignore_constraint_names => 1, ignore_index_names => 1, @@ -266,31 +318,38 @@ sub _prepare_install { unlink $filename; } - my $output = $sqlt->translate; - if(!$output) { + my $sql = $self->_generate_final_sql($sqlt); + if(!$sql) { carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); next; } open my $file, q(>), $filename; - print {$file} $output; + print {$file} $sql; close $file; } } +method _generate_final_sql($sqlt) { + my @output = $sqlt->translate; + $self->_json->encode(\@output); +} + sub _resultsource_install_filename { my ($self, $source_name) = @_; return sub { my ($self, $type, $version) = @_; - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); + my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, "001-auto-$source_name.sql" ); + return catfile( $dirname, "001-auto-$source_name.sql-json" ); } } sub install_resultsource { - my ($self, $source, $version) = @_; - + my ($self, $args) = @_; + my $source = $args->{result_source}; + my $version = $args->{version}; + log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" }; my $rs_install_file = $self->_resultsource_install_filename($source->source_name); @@ -305,7 +364,8 @@ sub install_resultsource { sub prepare_resultsource_install { my $self = shift; - my $source = shift; + my $source = (shift @_)->{result_source}; + log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name }; my $filename = $self->_resultsource_install_filename($source->source_name); $self->_prepare_install({ @@ -314,31 +374,44 @@ sub prepare_resultsource_install { } sub prepare_deploy { + log_info { '[DBICDH] preparing deploy' }; my $self = shift; $self->_prepare_install({}, '_ddl_schema_produce_filename'); } sub prepare_upgrade { - my ($self, $from_version, $to_version, $version_set) = @_; - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up'); + my ($self, $args) = @_; + log_info { + '[DBICDH] preparing upgrade ' . + "from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' + ); } sub prepare_downgrade { - my ($self, $from_version, $to_version, $version_set) = @_; - - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down'); + my ($self, $args) = @_; + log_info { + '[DBICDH] preparing downgrade ' . + "from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' + ); } method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { my $schema = $self->schema; my $databases = $self->databases; - my $dir = $self->upgrade_directory; + my $dir = $self->script_directory; my $sqltargs = $self->sql_translator_args; my $schema_version = $self->schema_version; $sqltargs = { add_drop_table => 1, + no_comments => 1, ignore_constraint_names => 1, ignore_index_names => 1, %{$sqltargs} @@ -378,7 +451,8 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction $t->parser( $db ) # could this really throw an exception? or croak($t->error); - my $out = $t->translate( $prefilename ) + my $sql = $self->_default_read_sql_file_as_string($prefilename); + my $out = $t->translate( \$sql ) or croak($t->error); $source_schema = $t->schema; @@ -403,7 +477,8 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction or croak($t->error); my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); - my $out = $t->translate( $filename ) + my $sql = $self->_default_read_sql_file_as_string($filename); + my $out = $t->translate( \$sql ) or croak($t->error); $dest_schema = $t->schema; @@ -412,17 +487,23 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction unless $dest_schema->name; } - my $diff = SQL::Translator::Diff::schema_diff( - $source_schema, $db, - $dest_schema, $db, - $sqltargs - ); open my $file, q(>), $diff_file; - print {$file} $diff; + print {$file} + $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs); close $file; } } +method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) { + $self->_json->encode([ + SQL::Translator::Diff::schema_diff( + $source_schema, $db, + $dest_schema, $db, + $sqltargs + ) + ]) +} + method _read_sql_file($file) { return unless $file; @@ -430,20 +511,19 @@ method _read_sql_file($file) { my @data = split /;\n/, join '', <$fh>; close $fh; - @data = grep { - $_ && # remove blank lines - !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's - } map { - s/^\s+//; s/\s+$//; # trim whitespace - join '', grep { !/^--/ } split /\n/ # remove comments - } @data; - return \@data; } +method _default_read_sql_file_as_string($file) { + return join q(), map "$_;\n", @{$self->_json->decode( + do { local( @ARGV, $/ ) = $file; <> } # slurp + )}; +} + sub downgrade_single_step { my $self = shift; - my $version_set = shift @_; + my $version_set = (shift @_)->{version_set}; + log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) }; my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( $self->storage->sqlt_type, @@ -455,7 +535,8 @@ sub downgrade_single_step { sub upgrade_single_step { my $self = shift; - my $version_set = shift @_; + my $version_set = (shift @_)->{version_set}; + log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) }; my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( $self->storage->sqlt_type, @@ -474,12 +555,13 @@ __END__ =head1 DESCRIPTION -This class is the meat of L. It takes care of -generating sql files representing schemata as well as sql files to move from -one version of a schema to the rest. One of the hallmark features of this -class is that it allows for multiple sql files for deploy and upgrade, allowing -developers to fine tune deployment. In addition it also allows for perl files -to be run at any stage of the process. +This class is the meat of L. It takes +care of generating serialized sql files representing schemata as well +as serialized sql files to move from one version of a schema to the rest. +One of the hallmark features of this class is that it allows for multiple sql +files for deploy and upgrade, allowing developers to fine tune deployment. +In addition it also allows for perl files to be run +at any stage of the process. For basic usage see L. What's documented here is extra fun stuff or private methods. @@ -494,68 +576,94 @@ like the best way to describe the layout is with the following example: $sql_migration_dir |- SQLite | |- down - | | `- 1-2 - | | `- 001-auto.sql + | | `- 2-1 + | | `- 001-auto.sql-json | |- schema | | `- 1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- up | |- 1-2 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- 2-3 - | `- 001-auto.sql + | `- 001-auto.sql-json |- _common | |- down - | | `- 1-2 + | | `- 2-1 | | `- 002-remove-customers.pl | `- up | `- 1-2 | `- 002-generate-customers.pl |- _generic | |- down - | | `- 1-2 - | | `- 001-auto.sql + | | `- 2-1 + | | `- 001-auto.sql-json | |- schema | | `- 1 - | | `- 001-auto.sql + | | `- 001-auto.sql-json | `- up | `- 1-2 - | |- 001-auto.sql + | |- 001-auto.sql-json | `- 002-create-stored-procedures.sql `- MySQL |- down - | `- 1-2 - | `- 001-auto.sql + | `- 2-1 + | `- 001-auto.sql-json + |- preinstall + | `- 1 + | |- 001-create_database.pl + | `- 002-create_users_and_permissions.pl |- schema | `- 1 - | `- 001-auto.sql + | `- 001-auto.sql-json `- up `- 1-2 - `- 001-auto.sql + `- 001-auto.sql-json So basically, the code $dm->deploy(1) on an C database that would simply run -C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next, +C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next, $dm->upgrade_single_step([1,2]) -would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by +would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. -Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of -the time it probably should be, since perl scripts will mostly be database -independent. +C<.pl> files don't have to be in the C<_common> directory, but most of the time +they should be, because perl scripts are generally be database independent. C<_generic> exists for when you for some reason are sure that your SQL is generic enough to run on all databases. Good luck with that one. +Note that unlike most steps in the process, C will not run SQL, as +there may not even be an database at preinstall time. It will run perl scripts +just like the other steps in the process, but nothing is passed to them. +Until people have used this more it will remain freeform, but a recommended use +of preinstall is to have it prompt for username and password, and then call the +appropriate C<< CREATE DATABASE >> commands etc. + +=head1 SERIALIZED SQL + +The SQL that this module generates and uses is serialized into an array of +SQL statements. The reason being that some databases handle multiple +statements in a single execution differently. Generally you do not need to +worry about this as these are scripts generated for you. If you find that +you are editing them on a regular basis something is wrong and you either need +to submit a bug or consider writing extra serialized SQL or Perl scripts to run +before or after the automatically generated script. + +B Currently the SQL is serialized into JSON. I am willing to merge in +patches that will allow more serialization formats if you want that feature, +but if you do send me a patch for that realize that I do not want to add YAML +support or whatever, I would rather add a generic method of adding any +serialization format. + =head1 PERL SCRIPTS -A perl script for this tool is very simple. It merely needs to contain a -sub called C that takes a L as it's only argument. +A perl script for this tool is very simple. It merely needs to contain an +anonymous sub that takes a L as it's only argument. A very basic perl script might look like: #!perl @@ -563,7 +671,7 @@ A very basic perl script might look like: use strict; use warnings; - sub run { + sub { my $schema = shift; $schema->resultset('Users')->create({ @@ -586,9 +694,9 @@ and generate the DDL. This is automatically created with L. The arguments that get passed to L when it's used. -=attr upgrade_directory +=attr script_directory -The directory (default C<'sql'>) that upgrades are stored in +The directory (default C<'sql'>) that scripts are stored in =attr databases @@ -605,7 +713,9 @@ transaction. The version the schema on your harddrive is at. Defaults to C<< $self->schema->schema_version >>. -=method __ddl_consume_with_prefix +=begin comment + +=head2 __ddl_consume_with_prefix $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) @@ -613,48 +723,48 @@ This is the meat of the multi-file upgrade/deploy stuff. It returns a list of files in the order that they should be run for a generic "type" of upgrade. You should not be calling this in user code. -=method _ddl_schema_consume_filenames +=head2 _ddl_schema_consume_filenames $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for an initial deploy. -=method _ddl_schema_produce_filename +=head2 _ddl_schema_produce_filename $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) Returns a single file in which an initial schema will be stored. -=method _ddl_schema_up_consume_filenames +=head2 _ddl_schema_up_consume_filenames $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for an upgrade. -=method _ddl_schema_down_consume_filenames +=head2 _ddl_schema_down_consume_filenames $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for a downgrade. -=method _ddl_schema_up_produce_filenames +=head2 _ddl_schema_up_produce_filenames $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) Returns a single file in which the sql to upgrade from one schema to another will be stored. -=method _ddl_schema_down_produce_filename +=head2 _ddl_schema_down_produce_filename $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) Returns a single file in which the sql to downgrade from one schema to another will be stored. -=method _resultsource_install_filename +=head2 _resultsource_install_filename my $filename_fn = $dm->_resultsource_install_filename('User'); $dm->$filename_fn('SQLite', '1.00') @@ -662,7 +772,7 @@ will be stored. Returns a function which in turn returns a single filename used to install a single resultsource. Weird interface is convenient for me. Deal with it. -=method _run_sql_and_perl +=head2 _run_sql_and_perl $dm->_run_sql_and_perl([qw( list of filenames )]) @@ -672,7 +782,7 @@ C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. Depending on L all of the files run will be wrapped in a single transaction. -=method _prepare_install +=head2 _prepare_install $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) @@ -680,7 +790,7 @@ Generates the sql file for installing the database. First arg is simply L args and the second is a coderef that returns the filename to store the sql in. -=method _prepare_changegrade +=head2 _prepare_changegrade $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') @@ -689,10 +799,11 @@ arg is the version to start from, second is the version to go to, third is the L, and last is the direction of the changegrade, be it 'up' or 'down'. -=method _read_sql_file +=head2 _read_sql_file $dm->_read_sql_file('foo.sql') Reads a sql file and returns lines in an C. Strips out comments, transactions, and blank lines. +=end comment