X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=32d4a0ea537fe2a26386bdd35a11fd699bd26cb0;hb=8a3edcedddb89028adcd9942b04013967c071900;hp=551c8558a2da1112cc2b3cc57263e3f3e2c66969;hpb=d3d6512cf7768478349c9f2264ba823013831283;p=dbsrgits%2FDBIx-Class-DeploymentHandler.git diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index 551c855..32d4a0e 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -31,6 +31,12 @@ has ignore_ddl => ( default => undef, ); +has force_overwrite => ( + isa => 'Bool', + is => 'ro', + default => undef, +); + has schema => ( isa => 'DBIx::Class::Schema', is => 'ro', @@ -54,6 +60,7 @@ has sql_translator_args => ( is => 'ro', default => sub { {} }, ); + has script_directory => ( isa => 'Str', is => 'ro', @@ -61,6 +68,37 @@ has script_directory => ( default => 'sql', ); +has _filesystem_interface => ( + is => 'ro', + lazy_build => 1, + handles => { + _ddl_initialize_consume_filenames => '_ddl_initialize_consume_filenames', + _ddl_schema_consume_filenames => '_ddl_schema_consume_filenames', + _ddl_protoschema_deploy_consume_filenames => '_ddl_protoschema_deploy_consume_filenames', + _ddl_protoschema_upgrade_consume_filenames => '_ddl_protoschema_upgrade_consume_filenames', + _ddl_protoschema_downgrade_consume_filenames => '_ddl_protoschema_downgrade_consume_filenames', + _ddl_protoschema_produce_filename => '_ddl_protoschema_produce_filename', + _ddl_schema_produce_filename => '_ddl_schema_produce_filename', + _ddl_schema_upgrade_consume_filenames => '_ddl_schema_upgrade_consume_filenames', + _ddl_schema_downgrade_consume_filenames => '_ddl_schema_downgrade_consume_filenames', + _ddl_schema_upgrade_produce_filename => '_ddl_schema_upgrade_produce_filename', + _ddl_schema_downgrade_produce_filename => '_ddl_schema_downgrade_produce_filename', + + _read_sql_file => '_read_sql_file', + _coderefs_per_files => '_coderefs_per_files', + _write_data_string => '_write_data_string', + _write_data_list => '_write_data_list', + }, +); + +sub _build__filesystem_interface { + use DBIx::Class::DeploymentHandler::Filesystem; + DBIx::Class::DeploymentHandler::Filesystem->new( + script_directory => $_[0]->script_directory, + ignore_ddl => $_[0]->ignore_ddl + ) +} + has databases => ( coerce => 1, isa => 'DBIx::Class::DeploymentHandler::Databases', @@ -85,122 +123,6 @@ has schema_version => ( # is built the same way, but we leave this in place method _build_schema_version { $self->schema->schema_version } -method __ddl_consume_with_prefix($type, $versions, $prefix) { - my $base_dir = $self->script_directory; - - my $main = catfile( $base_dir, $type ); - my $common = - catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); - - my $dir; - if (-d $main) { - $dir = catfile($main, $prefix, join q(-), @{$versions}) - } else { - if ($self->ignore_ddl) { - return [] - } else { - croak "$main does not exist; please write/generate some SQL" - } - } - - my %files; - try { - opendir my($dh), $dir; - %files = - map { $_ => "$dir/$_" } - grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } - readdir $dh; - closedir $dh; - } catch { - die $_ unless $self->ignore_ddl; - }; - if (-d $common) { - opendir my($dh), $common; - for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { - unless ($files{$filename}) { - $files{$filename} = catfile($common,$filename); - } - } - closedir $dh; - } - - return [@files{sort keys %files}] -} - -method _ddl_initialize_consume_filenames($type, $version) { - $self->__ddl_consume_with_prefix($type, [ $version ], 'initialize') -} - -method _ddl_schema_consume_filenames($type, $version) { - $self->__ddl_consume_with_prefix($type, [ $version ], 'deploy') -} - -method _ddl_protoschema_upgrade_consume_filenames($versions) { - my $base_dir = $self->script_directory; - - my $dir = catfile( $base_dir, '_preprocess_schema', 'upgrade', join q(-), @{$versions}); - - return [] unless -d $dir; - - opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; - closedir $dh; - - return [@files{sort keys %files}] -} - -method _ddl_protoschema_downgrade_consume_filenames($versions) { - my $base_dir = $self->script_directory; - - my $dir = catfile( $base_dir, '_preprocess_schema', 'downgrade', join q(-), @{$versions}); - - return [] unless -d $dir; - - opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; - closedir $dh; - - return [@files{sort keys %files}] -} - -method _ddl_protoschema_produce_filename($version) { - my $dirname = catfile( $self->script_directory, '_source', 'deploy', $version ); - mkpath($dirname) unless -d $dirname; - - return catfile( $dirname, '001-auto.yml' ); -} - -method _ddl_schema_produce_filename($type, $version) { - my $dirname = catfile( $self->script_directory, $type, 'deploy', $version ); - mkpath($dirname) unless -d $dirname; - - return catfile( $dirname, '001-auto.sql' ); -} - -method _ddl_schema_upgrade_consume_filenames($type, $versions) { - $self->__ddl_consume_with_prefix($type, $versions, 'upgrade') -} - -method _ddl_schema_downgrade_consume_filenames($type, $versions) { - $self->__ddl_consume_with_prefix($type, $versions, 'downgrade') -} - -method _ddl_schema_upgrade_produce_filename($type, $versions) { - my $dir = $self->script_directory; - - my $dirname = catfile( $dir, $type, 'upgrade', join q(-), @{$versions}); - mkpath($dirname) unless -d $dirname; - - return catfile( $dirname, '001-auto.sql' ); -} - -method _ddl_schema_downgrade_produce_filename($type, $versions, $dir) { - my $dirname = catfile( $dir, $type, 'downgrade', join q(-), @{$versions} ); - mkpath($dirname) unless -d $dirname; - - return catfile( $dirname, '001-auto.sql'); -} - method _run_sql_array($sql) { my $storage = $self->storage; @@ -232,7 +154,7 @@ method _run_sql($filename) { return $self->_run_sql_array($self->_read_sql_file($filename)); } -method _run_perl($filename) { +method _run_perl($filename, $versions) { log_debug { "Running Perl from $filename" }; my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; @@ -244,13 +166,13 @@ method _run_perl($filename) { if ($@) { carp "$filename failed to compile: $@"; } elsif (ref $fn eq 'CODE') { - $fn->($self->schema) + $fn->($self->schema, $versions) } else { carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; } } -method _run_sql_and_perl($filenames, $sql_to_run) { +method _run_sql_and_perl($filenames, $sql_to_run, $versions) { my @files = @{$filenames}; my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; @@ -264,7 +186,7 @@ method _run_sql_and_perl($filenames, $sql_to_run) { } elsif ($filename =~ /\.sql$/) { $sql .= $self->_run_sql($filename) } elsif ( $filename =~ /\.pl$/ ) { - $self->_run_perl($filename) + $self->_run_perl($filename, $versions) } else { croak "A file ($filename) got to deploy that wasn't sql or perl!"; } @@ -283,13 +205,13 @@ sub deploy { my $sql; if ($self->ignore_ddl) { $sql = $self->_sql_from_yaml({}, - '_ddl_protoschema_produce_filename', $sqlt_type + '_ddl_protoschema_deploy_consume_filenames', $sqlt_type ); } return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( $sqlt_type, $version, - ), $sql); + ), $sql, [$version]); } sub initialize { @@ -400,19 +322,25 @@ method _sql_from_yaml($sqltargs, $from_file, $db) { my $schema = $self->schema; my $version = $self->schema_version; - my $sqlt = SQL::Translator->new({ - add_drop_table => 0, - parser => 'SQL::Translator::Parser::YAML', - %{$sqltargs}, - producer => $db, - }); + my @sql; - my $yaml_filename = $self->$from_file($version); - - my @sql = $sqlt->translate($yaml_filename); - if(!@sql) { - carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); - return undef; + my $actual_file = $self->$from_file($version); + for my $yaml_filename (@{ + DlogS_trace { "generating SQL from Serialized SQL Files: $_" } + (ref $actual_file?$actual_file:[$actual_file]) + }) { + my $sqlt = SQL::Translator->new({ + add_drop_table => 0, + parser => 'SQL::Translator::Parser::YAML', + %{$sqltargs}, + producer => $db, + }); + + push @sql, $sqlt->translate($yaml_filename); + if(!@sql) { + carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); + return undef; + } } return \@sql; } @@ -431,12 +359,15 @@ sub _prepare_install { my $filename = $self->$to_file($db, $version, $dir); if (-e $filename ) { - carp "Overwriting existing DDL file - $filename"; - unlink $filename; + if ($self->force_overwrite) { + carp "Overwriting existing DDL file - $filename"; + unlink $filename; + } else { + die "Cannot overwrite '$filename', either enable force_overwrite or delete it" + } } - open my $file, q(>), $filename; - print {$file} join ";\n", @$sql; - close $file; + + $self->_write_data_list($filename, $sql); } } @@ -464,8 +395,10 @@ sub _resultsource_protoschema_filename { sub install_resultsource { my ($self, $args) = @_; - my $source = $args->{result_source}; - my $version = $args->{version}; + my $source = $args->{result_source} + or die 'result_source must be passed to install_resultsource'; + my $version = $args->{version} + or die 'version must be passed to install_resultsource'; log_info { 'installing_resultsource ' . $source->source_name . ", version $version" }; my $rs_install_file = $self->_resultsource_install_filename($source->source_name); @@ -476,7 +409,7 @@ sub install_resultsource { $version, ) ]; - $self->_run_sql_and_perl($files); + $self->_run_sql_and_perl($files, '', [$version]); } sub prepare_resultsource_install { @@ -522,11 +455,6 @@ sub prepare_downgrade { ); } -method _coderefs_per_files($files) { - no warnings 'redefine'; - [map eval do { local( @ARGV, $/ ) = $_; <> }, @$files] -} - method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { my $schema = $self->schema; my $databases = $self->databases; @@ -537,34 +465,20 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction foreach my $db (@$databases) { my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); if(-e $diff_file) { - carp("Overwriting existing $direction-diff file - $diff_file"); - unlink $diff_file; + if ($self->force_overwrite) { + carp("Overwriting existing $direction-diff file - $diff_file"); + unlink $diff_file; + } else { + die "Cannot overwrite '$diff_file', either enable force_overwrite or delete it" + } } - open my $file, q(>), $diff_file; - print {$file} join ";\n", @{$self->_sqldiff_from_yaml($from_version, $to_version, $db, $direction)}; - close $file; + $self->_write_data_list($diff_file, + $self->_sqldiff_from_yaml($from_version, $to_version, $db, $direction) + ); } } -method _read_sql_file($file) { - return unless $file; - - open my $fh, '<', $file; - my @data = split /;\n/, join '', <$fh>; - close $fh; - - @data = grep { - $_ && # remove blank lines - !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's - } map { - s/^\s+//; s/\s+$//; # trim whitespace - join '', grep { !/^--/ } split /\n/ # remove comments - } @data; - - return \@data; -} - sub downgrade_single_step { my $self = shift; my $version_set = (shift @_)->{version_set}; @@ -580,7 +494,7 @@ sub downgrade_single_step { my $sql = $self->_run_sql_and_perl($self->_ddl_schema_downgrade_consume_filenames( $sqlt_type, $version_set, - ), $sql_to_run); + ), $sql_to_run, $version_set); return ['', $sql]; } @@ -600,7 +514,7 @@ sub upgrade_single_step { my $sql = $self->_run_sql_and_perl($self->_ddl_schema_upgrade_consume_filenames( $sqlt_type, $version_set, - ), $sql_to_run); + ), $sql_to_run, $version_set); return ['', $sql]; } @@ -626,13 +540,15 @@ sub prepare_protoschema { unless $yml; if (-e $filename ) { - carp "Overwriting existing DDL-YML file - $filename"; - unlink $filename; + if ($self->force_overwrite) { + carp "Overwriting existing DDL-YML file - $filename"; + unlink $filename; + } else { + die "Cannot overwrite '$filename', either enable force_overwrite or delete it" + } } - open my $file, q(>), $filename; - print {$file} $yml; - close $file; + $self->_write_data_string($filename, $yml); } __PACKAGE__->meta->make_immutable; @@ -690,7 +606,9 @@ the following example: | | `- 002-remove-customers.pl | `- upgrade | `- 1-2 - | `- 002-generate-customers.pl + | | `- 002-generate-customers.pl + | `- _any + | `- 999-bump-action.pl `- MySQL |- downgrade | `- 2-1 @@ -716,7 +634,9 @@ C<$sql_migration_dir/SQLite/deploy/1/001-auto.sql>. Next, $dm->upgrade_single_step([1,2]) would run C<$sql_migration_dir/SQLite/upgrade/1-2/001-auto.sql> followed by -C<$sql_migration_dir/_common/upgrade/1-2/002-generate-customers.pl>. +C<$sql_migration_dir/_common/upgrade/1-2/002-generate-customers.pl>, and +finally punctuated by +C<$sql_migration_dir/_common/upgrade/_any/999-bump-action.pl>. C<.pl> files don't have to be in the C<_common> directory, but most of the time they should be, because perl scripts are generally database independent. @@ -767,7 +687,7 @@ L objects. your storage type is. If you are not sure what your storage type is, take a look at the producers listed for L. Also note, C<_common> is a special case. C<_common> will get merged into whatever other files you -already have. This directory can containt the following directories itself: +already have. This directory can contain the following directories itself: =over 2 @@ -799,10 +719,17 @@ to L. =back +Note that there can be an C<_any> in the place of any of the versions (like +C<1-2> or C<1>), which means those scripts will be run B time. So if +you have an C<_any> in C<_common/upgrade>, that script will get run for every +upgrade. + =head1 PERL SCRIPTS A perl script for this tool is very simple. It merely needs to contain an -anonymous sub that takes a L as it's only argument. +anonymous sub that takes a L and the version set as it's +arguments. + A very basic perl script might look like: #!perl @@ -813,6 +740,9 @@ A very basic perl script might look like: sub { my $schema = shift; + # [1] for deploy, [1,2] for upgrade or downgrade, probably used with _any + my $versions = shift; + $schema->resultset('Users')->create({ name => 'root', password => 'root', @@ -827,6 +757,12 @@ instead of any pregenerated SQL. If you have a development server this is probably the best plan of action as you will not be putting as many generated files in your version control. Goes well with with C of C<[]>. +=attr force_overwrite + +When this attribute is true generated files will be overwritten when the +methods which create such files are run again. The default is false, in which +case the program will die with a message saying which file needs to be deleted. + =attr schema The L (B) that is used to talk to the database