X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?p=dbsrgits%2FDBIx-Class-DeploymentHandler.git;a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=0b00dba58fd645455430d204ef0428c5105bbdcf;hp=1082ab2a0a8ee0feb273ee46c17908d0685fe492;hb=b6bd39e5d75c994c09f3d0ae0661c572e1c71f05;hpb=bcc722970a94f2ba2debdeb630168bde1f8a6830 diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index 1082ab2..0b00dba 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -1,10 +1,13 @@ package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; use Moose; +# ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories + use autodie; use Carp qw( carp croak ); +use DBIx::Class::DeploymentHandler::LogImporter qw(:log :dlog); +use Context::Preserve; -use Method::Signatures::Simple; use Try::Tiny; use SQL::Translator; @@ -13,16 +16,25 @@ require SQL::Translator::Diff; require DBIx::Class::Storage; # loaded for type constraint use DBIx::Class::DeploymentHandler::Types; -use File::Path 'mkpath'; -use File::Spec::Functions; +use Path::Class qw(file dir); with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; +has ignore_ddl => ( + isa => 'Bool', + is => 'ro', + default => undef, +); + +has force_overwrite => ( + isa => 'Bool', + is => 'ro', + default => undef, +); + has schema => ( - isa => 'DBIx::Class::Schema', is => 'ro', required => 1, - handles => [qw( schema_version )], ); has storage => ( @@ -31,18 +43,19 @@ has storage => ( lazy_build => 1, ); -method _build_storage { +sub _build_storage { + my $self = shift; my $s = $self->schema->storage; $s->_determine_driver; $s } -has sqltargs => ( +has sql_translator_args => ( isa => 'HashRef', is => 'ro', default => sub { {} }, ); -has upgrade_directory => ( +has script_directory => ( isa => 'Str', is => 'ro', required => 1, @@ -62,32 +75,59 @@ has txn_wrap => ( default => 1, ); -method __ddl_consume_with_prefix($type, $versions, $prefix) { - my $base_dir = $self->upgrade_directory; +has schema_version => ( + is => 'ro', + isa => 'Str', + lazy_build => 1, +); + +# this will probably never get called as the DBICDH +# will be passing down a schema_version normally, which +# is built the same way, but we leave this in place +sub _build_schema_version { + my $self = shift; + $self->schema->schema_version +} + +sub __ddl_consume_with_prefix { + my ($self, $type, $versions, $prefix) = @_; + my $base_dir = $self->script_directory; - my $main = catfile( $base_dir, $type ); - my $generic = catfile( $base_dir, '_generic' ); + my $main = dir( $base_dir, $type ); my $common = - catfile( $base_dir, '_common', $prefix, join q(-), @{$versions} ); + dir( $base_dir, '_common', $prefix, join q(-), @{$versions} ); + + my $common_any = + dir( $base_dir, '_common', $prefix, '_any' ); my $dir; if (-d $main) { - $dir = catfile($main, $prefix, join q(-), @{$versions}) - } elsif (-d $generic) { - $dir = catfile($generic, $prefix, join q(-), @{$versions}); + $dir = dir($main, $prefix, join q(-), @{$versions}) } else { - croak "neither $main or $generic exist; please write/generate some SQL"; + if ($self->ignore_ddl) { + return [] + } else { + croak "$main does not exist; please write/generate some SQL" + } } - - opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh; - closedir $dh; - - if (-d $common) { - opendir my($dh), $common; - for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { + my $dir_any = dir($main, $prefix, '_any'); + + my %files; + try { + opendir my($dh), $dir; + %files = + map { $_ => "$dir/$_" } + grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } + readdir $dh; + closedir $dh; + } catch { + die $_ unless $self->ignore_ddl; + }; + for my $dirname (grep { -d $_ } $common, $common_any, $dir_any) { + opendir my($dh), $dirname; + for my $filename (grep { /\.(?:sql|pl)$/ && -f file($dirname,$_) } readdir $dh) { unless ($files{$filename}) { - $files{$filename} = catfile($common,$filename); + $files{$filename} = file($dirname,$filename); } } closedir $dh; @@ -96,139 +136,398 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { return [@files{sort keys %files}] } -method _ddl_schema_consume_filenames($type, $version) { - $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') +sub _ddl_initialize_consume_filenames { + my ($self, $type, $version) = @_; + $self->__ddl_consume_with_prefix($type, [ $version ], 'initialize') } -method _ddl_schema_produce_filename($type, $version) { - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); - mkpath($dirname) unless -d $dirname; +sub _ddl_schema_consume_filenames { + my ($self, $type, $version) = @_; + $self->__ddl_consume_with_prefix($type, [ $version ], 'deploy') +} + +sub _ddl_protoschema_deploy_consume_filenames { + my ($self, $version) = @_; + my $base_dir = $self->script_directory; - return catfile( $dirname, '001-auto.sql' ); + my $dir = dir( $base_dir, '_source', 'deploy', $version); + return [] unless -d $dir; + + opendir my($dh), $dir; + my %files = map { $_ => "$dir/$_" } grep { /\.yml$/ && -f "$dir/$_" } readdir $dh; + closedir $dh; + + return [@files{sort keys %files}] } -method _ddl_schema_up_consume_filenames($type, $versions) { - $self->__ddl_consume_with_prefix($type, $versions, 'up') +sub _ddl_protoschema_upgrade_consume_filenames { + my ($self, $versions) = @_; + my $base_dir = $self->script_directory; + + my $dir = dir( $base_dir, '_preprocess_schema', 'upgrade', join q(-), @{$versions}); + + return [] unless -d $dir; + + opendir my($dh), $dir; + my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; + closedir $dh; + + return [@files{sort keys %files}] } -method _ddl_schema_down_consume_filenames($type, $versions) { - $self->__ddl_consume_with_prefix($type, $versions, 'down') +sub _ddl_protoschema_downgrade_consume_filenames { + my ($self, $versions) = @_; + my $base_dir = $self->script_directory; + + my $dir = dir( $base_dir, '_preprocess_schema', 'downgrade', join q(-), @{$versions}); + + return [] unless -d $dir; + + opendir my($dh), $dir; + my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; + closedir $dh; + + return [@files{sort keys %files}] } -method _ddl_schema_up_produce_filename($type, $versions) { - my $dir = $self->upgrade_directory; +sub _ddl_protoschema_produce_filename { + my ($self, $version) = @_; + my $dirname = dir( $self->script_directory, '_source', 'deploy', $version ); + $dirname->mkpath unless -d $dirname; - my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); - mkpath($dirname) unless -d $dirname; + return "" . file( $dirname, '001-auto.yml' ); +} - return catfile( $dirname, '001-auto.sql' - ); +sub _ddl_schema_produce_filename { + my ($self, $type, $version) = @_; + my $dirname = dir( $self->script_directory, $type, 'deploy', $version ); + $dirname->mkpath unless -d $dirname; + + return "" . file( $dirname, '001-auto.sql' ); +} + +sub _ddl_schema_upgrade_consume_filenames { + my ($self, $type, $versions) = @_; + $self->__ddl_consume_with_prefix($type, $versions, 'upgrade') +} + +sub _ddl_schema_downgrade_consume_filenames { + my ($self, $type, $versions) = @_; + $self->__ddl_consume_with_prefix($type, $versions, 'downgrade') +} + +sub _ddl_schema_upgrade_produce_filename { + my ($self, $type, $versions) = @_; + my $dir = $self->script_directory; + + my $dirname = dir( $dir, $type, 'upgrade', join q(-), @{$versions}); + $dirname->mkpath unless -d $dirname; + + return "" . file( $dirname, '001-auto.sql' ); } -method _ddl_schema_down_produce_filename($type, $versions, $dir) { - my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} ); - mkpath($dirname) unless -d $dirname; +sub _ddl_schema_downgrade_produce_filename { + my ($self, $type, $versions, $dir) = @_; + my $dirname = dir( $dir, $type, 'downgrade', join q(-), @{$versions} ); + $dirname->mkpath unless -d $dirname; - return catfile( $dirname, '001-auto.sql'); + return "" . file( $dirname, '001-auto.sql'); } -method _run_sql_and_perl($filenames) { - my @files = @{$filenames}; +sub _run_sql_array { + my ($self, $sql) = @_; my $storage = $self->storage; + $sql = [ _split_sql_chunk( @$sql ) ]; + + Dlog_trace { "Running SQL $_" } $sql; + foreach my $line (@{$sql}) { + $storage->_query_start($line); + # the whole reason we do this is so that we can see the line that was run + try { + $storage->dbh_do (sub { $_[1]->do($line) }); + } + catch { + die "$_ (running line '$line')" + }; + $storage->_query_end($line); + } + return join "\n", @$sql +} - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; +# split a chunk o' SQL into statements +sub _split_sql_chunk { + my @sql = map { split /;\n/, $_ } @_; + for ( @sql ) { + # strip transactions + s/^(?:BEGIN|BEGIN TRANSACTION|COMMIT).*//mgi; + + # trim whitespaces + s/^\s+//gm; + s/\s+$//gm; + + # remove comments + s/^--.*//gm; + + # remove blank lines + s/^\n//gm; + + # put on single line + s/\n/ /g; + } + + return grep $_, @sql; +} + +sub _run_sql { + my ($self, $filename) = @_; + log_debug { "Running SQL from $filename" }; + return $self->_run_sql_array($self->_read_sql_file($filename)); +} + +sub _load_sandbox { + my $_file = shift; + + my $_package = $_file; + $_package =~ s/([^A-Za-z0-9_])/sprintf("_%2x", ord($1))/eg; + + my $fn = eval sprintf <<'END_EVAL', $_package; +package DBICDH::Sandbox::%s; +{ + our $app; + $app ||= require $_file; + if ( !$app && ( my $error = $@ || $! )) { die $error; } + $app; +} +END_EVAL + + croak $@ if $@; + + croak "$_file should define an anonymous sub that takes a schema but it didn't!" + unless ref $fn && ref $fn eq 'CODE'; + + return $fn; +} + +sub _run_perl { + my ($self, $filename, $versions) = @_; + log_debug { "Running Perl from $filename" }; + + my $fn = _load_sandbox($filename); + + Dlog_trace { "Running Perl $_" } $fn; + + $fn->($self->schema, $versions) +} + +sub txn_do { + my ( $self, $code ) = @_; + return $code->() unless $self->txn_wrap; + + my $guard = $self->schema->txn_scope_guard; + + return preserve_context { $code->() } after => sub { $guard->commit }; +} + +sub _run_sql_and_perl { + my ($self, $filenames, $sql_to_run, $versions) = @_; + my @files = @{$filenames}; + $self->txn_do(sub { + $self->_run_sql_array($sql_to_run) if $self->ignore_ddl; + + my $sql = ($sql_to_run)?join ";\n", @$sql_to_run:''; + FILENAME: + for my $filename (map file($_), @files) { + if ($self->ignore_ddl && $filename->basename =~ /^[^-]*-auto.*\.sql$/) { + next FILENAME + } elsif ($filename =~ /\.sql$/) { + $sql .= $self->_run_sql($filename) + } elsif ( $filename =~ /\.pl$/ ) { + $self->_run_perl($filename, $versions) + } else { + croak "A file ($filename) got to deploy that wasn't sql or perl!"; + } + } + + return $sql; + }); +} + +sub deploy { + my $self = shift; + my $version = (shift @_ || {})->{version} || $self->schema_version; + log_info { "deploying version $version" }; + my $sqlt_type = $self->storage->sqlt_type; my $sql; + my $sqltargs = $self->sql_translator_args; + if ($self->ignore_ddl) { + $sql = $self->_sql_from_yaml($sqltargs, + '_ddl_protoschema_deploy_consume_filenames', $sqlt_type + ); + } + return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( + $sqlt_type, + $version, + ), $sql, [$version]); +} + +sub initialize { + my $self = shift; + my $args = shift; + my $version = $args->{version} || $self->schema_version; + log_info { "initializing version $version" }; + my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; + + my @files = @{$self->_ddl_initialize_consume_filenames( + $storage_type, + $version, + )}; + for my $filename (@files) { - if ($filename =~ /\.sql$/) { - my @sql = @{$self->_read_sql_file($filename)}; - $sql .= join "\n", @sql; - - foreach my $line (@sql) { - $storage->_query_start($line); - try { - # do a dbh_do cycle here, as we need some error checking in - # place (even though we will ignore errors) - $storage->dbh_do (sub { $_[1]->do($line) }); - } - catch { - carp "$_ (running '${line}')" - } - $storage->_query_end($line); - } - } elsif ( $filename =~ /^(.+)\.pl$/ ) { - my $package = $1; + # We ignore sql for now (till I figure out what to do with it) + if ( $filename =~ /^(.+)\.pl$/ ) { my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - # make the package name more palateable to perl - $package =~ s/\W/_/g; no warnings 'redefine'; - eval "package $package;\n\n$filedata"; + my $fn = eval "$filedata"; use warnings; - if (my $fn = $package->can('run')) { - $fn->($self->schema); + if ($@) { + croak "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->() } else { - carp "$filename should define a run method that takes a schema but it didn't!"; + croak "$filename should define an anonymous sub but it didn't!"; } } else { - croak "A file got to deploy that wasn't sql or perl!"; + croak "A file ($filename) got to initialize_scripts that wasn't sql or perl!"; } } +} - $guard->commit if $self->txn_wrap; +sub _sqldiff_from_yaml { + my ($self, $from_version, $to_version, $db, $direction) = @_; + my $dir = $self->script_directory; + my $sqltargs = { + add_drop_table => 1, + ignore_constraint_names => 1, + ignore_index_names => 1, + %{$self->sql_translator_args} + }; - return $sql; -} + my $source_schema; + { + my $prefilename = $self->_ddl_protoschema_produce_filename($from_version, $dir); -sub deploy { - my $self = shift; + # should probably be a croak + carp("No previous schema file found ($prefilename)") + unless -e $prefilename; - return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( - $self->storage->sqlt_type, - $self->schema_version - )); + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + parser => 'SQL::Translator::Parser::YAML', + }); + + my $out = $t->translate( $prefilename ) + or croak($t->error); + + $source_schema = $t->schema; + + $source_schema->name( $prefilename ) + unless $source_schema->name; + } + + my $dest_schema; + { + my $filename = $self->_ddl_protoschema_produce_filename($to_version, $dir); + + # should probably be a croak + carp("No next schema file found ($filename)") + unless -e $filename; + + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + parser => 'SQL::Translator::Parser::YAML', + }); + + my $out = $t->translate( $filename ) + or croak($t->error); + + $dest_schema = $t->schema; + + $dest_schema->name( $filename ) + unless $dest_schema->name; + } + + my $transform_files_method = "_ddl_protoschema_${direction}_consume_filenames"; + my $transforms = $self->_coderefs_per_files( + $self->$transform_files_method([$from_version, $to_version]) + ); + $_->($source_schema, $dest_schema) for @$transforms; + + return [SQL::Translator::Diff::schema_diff( + $source_schema, $db, + $dest_schema, $db, + $sqltargs + )]; +} + +sub _sql_from_yaml { + my ($self, $sqltargs, $from_file, $db) = @_; + my $schema = $self->schema; + my $version = $self->schema_version; + + my @sql; + + my $actual_file = $self->$from_file($version); + for my $yaml_filename (@{( + DlogS_trace { "generating SQL from Serialized SQL Files: $_" } + (ref $actual_file?$actual_file:[$actual_file]) + )}) { + my $sqlt = SQL::Translator->new({ + add_drop_table => 0, + parser => 'SQL::Translator::Parser::YAML', + %{$sqltargs}, + producer => $db, + }); + + push @sql, $sqlt->translate($yaml_filename); + if(!@sql) { + carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); + return undef; + } + } + return \@sql; } sub _prepare_install { - my $self = shift; - my $sqltargs = { %{$self->sqltargs}, %{shift @_} }; + my $self = shift; + my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; + my $from_file = shift; my $to_file = shift; - my $schema = $self->schema; + my $dir = $self->script_directory; my $databases = $self->databases; - my $dir = $self->upgrade_directory; - my $version = $schema->schema_version; - - my $sqlt = SQL::Translator->new({ - add_drop_table => 1, - ignore_constraint_names => 1, - ignore_index_names => 1, - parser => 'SQL::Translator::Parser::DBIx::Class', - %{$sqltargs} - }); - - my $sqlt_schema = $sqlt->translate( data => $schema ) - or croak($sqlt->error); + my $version = $self->schema_version; foreach my $db (@$databases) { - $sqlt->reset; - $sqlt->{schema} = $sqlt_schema; - $sqlt->producer($db); + my $sql = $self->_sql_from_yaml($sqltargs, $from_file, $db ) or next; my $filename = $self->$to_file($db, $version, $dir); if (-e $filename ) { - carp "Overwriting existing DDL file - $filename"; - unlink $filename; - } - - my $output = $sqlt->translate; - if(!$output) { - carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); - next; + if ($self->force_overwrite) { + carp "Overwriting existing DDL file - $filename"; + unlink $filename; + } else { + die "Cannot overwrite '$filename', either enable force_overwrite or delete it" + } } open my $file, q(>), $filename; - print {$file} $output; + print {$file} join ";\n", @$sql, ''; close $file; } } @@ -237,16 +536,31 @@ sub _resultsource_install_filename { my ($self, $source_name) = @_; return sub { my ($self, $type, $version) = @_; - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); - mkpath($dirname) unless -d $dirname; + my $dirname = dir( $self->script_directory, $type, 'deploy', $version ); + $dirname->mkpath unless -d $dirname; - return catfile( $dirname, "001-auto-$source_name.sql" ); + return "" . file( $dirname, "001-auto-$source_name.sql" ); } } -sub install_resultsource { - my ($self, $source, $version) = @_; +sub _resultsource_protoschema_filename { + my ($self, $source_name) = @_; + return sub { + my ($self, $version) = @_; + my $dirname = dir( $self->script_directory, '_source', 'deploy', $version ); + $dirname->mkpath unless -d $dirname; + return "" . file( $dirname, "001-auto-$source_name.yml" ); + } +} + +sub install_resultsource { + my ($self, $args) = @_; + my $source = $args->{result_source} + or die 'result_source must be passed to install_resultsource'; + my $version = $args->{version} + or die 'version must be passed to install_resultsource'; + log_info { 'installing_resultsource ' . $source->source_name . ", version $version" }; my $rs_install_file = $self->_resultsource_install_filename($source->source_name); @@ -256,170 +570,166 @@ sub install_resultsource { $version, ) ]; - $self->_run_sql_and_perl($files); + $self->_run_sql_and_perl($files, '', [$version]); } sub prepare_resultsource_install { my $self = shift; - my $source = shift; + my $source = (shift @_)->{result_source}; + log_info { 'preparing install for resultsource ' . $source->source_name }; - my $filename = $self->_resultsource_install_filename($source->source_name); - $self->_prepare_install({ + my $install_filename = $self->_resultsource_install_filename($source->source_name); + my $proto_filename = $self->_resultsource_protoschema_filename($source->source_name); + $self->prepare_protoschema({ parser_args => { sources => [$source->source_name], } - }, $filename); + }, $proto_filename); + $self->_prepare_install({}, $proto_filename, $install_filename); } -sub prepare_install { +sub prepare_deploy { + log_info { 'preparing deploy' }; my $self = shift; - $self->_prepare_install({}, '_ddl_schema_produce_filename'); + $self->prepare_protoschema({ + # Exclude __VERSION so that it gets installed separately + parser_args => { sources => [grep { $_ ne '__VERSION' } $self->schema->sources], } + }, '_ddl_protoschema_produce_filename'); + $self->_prepare_install({}, '_ddl_protoschema_produce_filename', '_ddl_schema_produce_filename'); } sub prepare_upgrade { - my ($self, $from_version, $to_version, $version_set) = @_; - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up'); + my ($self, $args) = @_; + log_info { + "preparing upgrade from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'upgrade' + ); } sub prepare_downgrade { - my ($self, $from_version, $to_version, $version_set) = @_; + my ($self, $args) = @_; + log_info { + "preparing downgrade from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'downgrade' + ); +} - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down'); +sub _coderefs_per_files { + my ($self, $files) = @_; + no warnings 'redefine'; + [map eval do { local( @ARGV, $/ ) = $_; <> }, @$files] } -method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { +sub _prepare_changegrade { + my ($self, $from_version, $to_version, $version_set, $direction) = @_; my $schema = $self->schema; my $databases = $self->databases; - my $dir = $self->upgrade_directory; - my $sqltargs = $self->sqltargs; - - my $schema_version = $schema->schema_version; - - $sqltargs = { - add_drop_table => 1, - ignore_constraint_names => 1, - ignore_index_names => 1, - %{$sqltargs} - }; - - my $sqlt = SQL::Translator->new( $sqltargs ); - - $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); - my $sqlt_schema = $sqlt->translate( data => $schema ) - or croak($sqlt->error); + my $dir = $self->script_directory; + my $schema_version = $self->schema_version; + my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; foreach my $db (@$databases) { - $sqlt->reset; - $sqlt->{schema} = $sqlt_schema; - $sqlt->producer($db); - - my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); - unless(-e $prefilename) { - carp("No previous schema file found ($prefilename)"); - next; - } - my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); if(-e $diff_file) { - carp("Overwriting existing $direction-diff file - $diff_file"); - unlink $diff_file; - } - - my $source_schema; - { - my $t = SQL::Translator->new({ - %{$sqltargs}, - debug => 0, - trace => 0, - }); - - $t->parser( $db ) # could this really throw an exception? - or croak($t->error); - - my $out = $t->translate( $prefilename ) - or croak($t->error); - - $source_schema = $t->schema; - - $source_schema->name( $prefilename ) - unless $source_schema->name; - } - - # The "new" style of producers have sane normalization and can support - # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't - # And we have to diff parsed SQL against parsed SQL. - my $dest_schema = $sqlt_schema; - - unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { - my $t = SQL::Translator->new({ - %{$sqltargs}, - debug => 0, - trace => 0, - }); - - $t->parser( $db ) # could this really throw an exception? - or croak($t->error); - - my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); - my $out = $t->translate( $filename ) - or croak($t->error); - - $dest_schema = $t->schema; - - $dest_schema->name( $filename ) - unless $dest_schema->name; + if ($self->force_overwrite) { + carp("Overwriting existing $direction-diff file - $diff_file"); + unlink $diff_file; + } else { + die "Cannot overwrite '$diff_file', either enable force_overwrite or delete it" + } } - my $diff = SQL::Translator::Diff::schema_diff( - $source_schema, $db, - $dest_schema, $db, - $sqltargs - ); open my $file, q(>), $diff_file; - print {$file} $diff; + print {$file} join ";\n", @{$self->_sqldiff_from_yaml($from_version, $to_version, $db, $direction)}; close $file; } } -method _read_sql_file($file) { +sub _read_sql_file { + my ($self, $file) = @_; return unless $file; - open my $fh, '<', $file; - my @data = split /;\n/, join '', <$fh>; - close $fh; - - @data = grep { - $_ && # remove blank lines - !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's - } map { - s/^\s+//; s/\s+$//; # trim whitespace - join '', grep { !/^--/ } split /\n/ # remove comments - } @data; + local $/ = undef; #sluuuuuurp - return \@data; + open my $fh, '<', $file; + return [ _split_sql_chunk( <$fh> ) ]; } sub downgrade_single_step { my $self = shift; - my $version_set = shift @_; - - my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( - $self->storage->sqlt_type, + my $version_set = (shift @_)->{version_set}; + Dlog_info { "downgrade_single_step'ing $_" } $version_set; + + my $sqlt_type = $self->storage->sqlt_type; + my $sql_to_run; + if ($self->ignore_ddl) { + $sql_to_run = $self->_sqldiff_from_yaml( + $version_set->[0], $version_set->[1], $sqlt_type, 'downgrade', + ); + } + my $sql = $self->_run_sql_and_perl($self->_ddl_schema_downgrade_consume_filenames( + $sqlt_type, $version_set, - )); + ), $sql_to_run, $version_set); return ['', $sql]; } sub upgrade_single_step { my $self = shift; - my $version_set = shift @_; - - my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( - $self->storage->sqlt_type, + my $version_set = (shift @_)->{version_set}; + Dlog_info { "upgrade_single_step'ing $_" } $version_set; + + my $sqlt_type = $self->storage->sqlt_type; + my $sql_to_run; + if ($self->ignore_ddl) { + $sql_to_run = $self->_sqldiff_from_yaml( + $version_set->[0], $version_set->[1], $sqlt_type, 'upgrade', + ); + } + my $sql = $self->_run_sql_and_perl($self->_ddl_schema_upgrade_consume_filenames( + $sqlt_type, $version_set, - )); + ), $sql_to_run, $version_set); return ['', $sql]; } +sub prepare_protoschema { + my $self = shift; + my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; + my $to_file = shift; + my $filename + = $self->$to_file($self->schema_version); + + # we do this because the code that uses this sets parser args, + # so we just need to merge in the package + my $sqlt = SQL::Translator->new({ + parser => 'SQL::Translator::Parser::DBIx::Class', + producer => 'SQL::Translator::Producer::YAML', + %{ $sqltargs }, + }); + + my $yml = $sqlt->translate(data => $self->schema); + + croak("Failed to translate to YAML: " . $sqlt->error) + unless $yml; + + if (-e $filename ) { + if ($self->force_overwrite) { + carp "Overwriting existing DDL-YML file - $filename"; + unlink $filename; + } else { + die "Cannot overwrite '$filename', either enable force_overwrite or delete it" + } + } + + open my $file, q(>), $filename; + print {$file} $yml; + close $file; +} + __PACKAGE__->meta->make_immutable; 1; @@ -430,136 +740,251 @@ __END__ =head1 DESCRIPTION -This class is the meat of L. It takes care of -generating sql files representing schemata as well as sql files to move from -one version of a schema to the rest. One of the hallmark features of this -class is that it allows for multiple sql files for deploy and upgrade, allowing -developers to fine tune deployment. In addition it also allows for perl files -to be run at any stage of the process. +This class is the meat of L. It takes care +of generating serialized schemata as well as sql files to move from one +version of a schema to the rest. One of the hallmark features of this class +is that it allows for multiple sql files for deploy and upgrade, allowing +developers to fine tune deployment. In addition it also allows for perl +files to be run at any stage of the process. For basic usage see L. What's documented here is extra fun stuff or private methods. =head1 DIRECTORY LAYOUT -It's heavily based upon L. - -=attr schema - -The L (B) that is used to talk to the database -and generate the DDL. - -=attr storage - -The L that is I used to talk to the database -and generate the DDL. This is automatically created with L. - -=attr sqltargs +Arguably this is the best feature of L. +It's spiritually based upon L, but has a +lot of extensions and modifications, so even if you are familiar with it, +please read this. I feel like the best way to describe the layout is with +the following example: + + $sql_migration_dir + |- _source + | |- deploy + | |- 1 + | | `- 001-auto.yml + | |- 2 + | | `- 001-auto.yml + | `- 3 + | `- 001-auto.yml + |- SQLite + | |- downgrade + | | `- 2-1 + | | `- 001-auto.sql + | |- deploy + | | `- 1 + | | `- 001-auto.sql + | `- upgrade + | |- 1-2 + | | `- 001-auto.sql + | `- 2-3 + | `- 001-auto.sql + |- _common + | |- downgrade + | | `- 2-1 + | | `- 002-remove-customers.pl + | `- upgrade + | `- 1-2 + | | `- 002-generate-customers.pl + | `- _any + | `- 999-bump-action.pl + `- MySQL + |- downgrade + | `- 2-1 + | `- 001-auto.sql + |- initialize + | `- 1 + | |- 001-create_database.pl + | `- 002-create_users_and_permissions.pl + |- deploy + | `- 1 + | `- 001-auto.sql + `- upgrade + `- 1-2 + `- 001-auto.sql + +So basically, the code + + $dm->deploy(1) + +on an C database that would simply run +C<$sql_migration_dir/SQLite/deploy/1/001-auto.sql>. Next, + + $dm->upgrade_single_step([1,2]) + +would run C<$sql_migration_dir/SQLite/upgrade/1-2/001-auto.sql> followed by +C<$sql_migration_dir/_common/upgrade/1-2/002-generate-customers.pl>, and +finally punctuated by +C<$sql_migration_dir/_common/upgrade/_any/999-bump-action.pl>. + +C<.pl> files don't have to be in the C<_common> directory, but most of the time +they should be, because perl scripts are generally database independent. + +Note that unlike most steps in the process, C will not run SQL, as +there may not even be an database at initialize time. It will run perl scripts +just like the other steps in the process, but nothing is passed to them. +Until people have used this more it will remain freeform, but a recommended use +of initialize is to have it prompt for username and password, and then call the +appropriate C<< CREATE DATABASE >> commands etc. + +=head2 Directory Specification + +The following subdirectories are recognized by this DeployMethod: + +=over 2 + +=item C<_source> This directory can contain the following directories: + +=over 2 + +=item C This directory merely contains directories named after schema +versions, which in turn contain C files that are serialized versions +of the schema at that version. These files are not for editing by hand. + +=back + +=item C<_preprocess_schema> This directory can contain the following +directories: + +=over 2 + +=item C This directory merely contains directories named after +migrations, which are of the form C<$from_version-$to_version>. Inside of +these directories you may put Perl scripts which are to return a subref +that takes the arguments C<< $from_schema, $to_schema >>, which are +L objects. + +=item C This directory merely contains directories named after +migrations, which are of the form C<$from_version-$to_version>. Inside of +these directories you may put Perl scripts which are to return a subref +that takes the arguments C<< $from_schema, $to_schema >>, which are +L objects. + +=back + +=item C<$storage_type> This is a set of scripts that gets run depending on what +your storage type is. If you are not sure what your storage type is, take a +look at the producers listed for L. Also note, C<_common> +is a special case. C<_common> will get merged into whatever other files you +already have. This directory can contain the following directories itself: + +=over 2 + +=item C Gets run before the C is Ced. Has the +same structure as the C subdirectory as well; that is, it has a +directory for each schema version. Unlike C, C, and C +though, it can only run C<.pl> files, and the coderef in the perl files get +no arguments passed to them. + +=item C Gets run when the schema is Ced. Structure is a +directory per schema version, and then files are merged with C<_common> and run +in filename order. C<.sql> files are merely run, as expected. C<.pl> files are +run according to L. + +=item C Gets run when the schema is Cd. Structure is a directory +per upgrade step, (for example, C<1-2> for upgrading from version 1 to version +2,) and then files are merged with C<_common> and run in filename order. +C<.sql> files are merely run, as expected. C<.pl> files are run according +to L. -#rename +=item C Gets run when the schema is Cd. Structure is a directory +per downgrade step, (for example, C<2-1> for downgrading from version 2 to version +1,) and then files are merged with C<_common> and run in filename order. +C<.sql> files are merely run, as expected. C<.pl> files are run according +to L. -=attr upgrade_directory -The directory (default C<'sql'>) that upgrades are stored in +=back -=attr databases - -The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to -generate files for +=back -=attr txn_wrap - -Set to true (which is the default) to wrap all upgrades and deploys in a single -transaction. - -=method __ddl_consume_with_prefix - - $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) +Note that there can be an C<_any> in the place of any of the versions (like +C<1-2> or C<1>), which means those scripts will be run B time. So if +you have an C<_any> in C<_common/upgrade>, that script will get run for every +upgrade. -This is the meat of the multi-file upgrade/deploy stuff. It returns a list of -files in the order that they should be run for a generic "type" of upgrade. -You should not be calling this in user code. +=head1 PERL SCRIPTS -=method _ddl_schema_consume_filenames +A perl script for this tool is very simple. It merely needs to contain an +anonymous sub that takes a L and the version set as it's +arguments. - $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) +A very basic perl script might look like: -Just a curried L. Get's a list of files for an -initial deploy. + #!perl -=method _ddl_schema_produce_filename + use strict; + use warnings; - $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) + use DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator::ScriptHelpers + 'schema_from_schema_loader'; -Returns a single file in which an initial schema will be stored. + schema_from_schema_loader({ naming => 'v4' }, sub { + my $schema = shift; -=method _ddl_schema_up_consume_filenames + # [1] for deploy, [1,2] for upgrade or downgrade, probably used with _any + my $versions = shift; - $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) + $schema->resultset('Users')->create({ + name => 'root', + password => 'root', + }) + }) -Just a curried L. Get's a list of files for an -upgrade. +Note that the above uses +L. +Using a raw coderef is strongly discouraged as it is likely to break as you +modify your schema. -=method _ddl_schema_down_consume_filenames +=attr ignore_ddl - $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) +This attribute will, when set to true (default is false), cause the DM to use +L to use the C<_source>'s serialized SQL::Translator::Schema +instead of any pregenerated SQL. If you have a development server this is +probably the best plan of action as you will not be putting as many generated +files in your version control. Goes well with with C of C<[]>. -Just a curried L. Get's a list of files for a -downgrade. +=attr force_overwrite -=method _ddl_schema_up_produce_filenames +When this attribute is true generated files will be overwritten when the +methods which create such files are run again. The default is false, in which +case the program will die with a message saying which file needs to be deleted. - $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) +=attr schema -Returns a single file in which the sql to upgrade from one schema to another -will be stored. +The L (B) that is used to talk to the database +and generate the DDL. -=method _ddl_schema_down_produce_filename +=attr storage - $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) +The L that is I used to talk to the database +and generate the DDL. This is automatically created with L. -Returns a single file in which the sql to downgrade from one schema to another -will be stored. +=attr sql_translator_args -=method _resultsource_install_filename +The arguments that get passed to L when it's used. - my $filename_fn = $dm->_resultsource_install_filename('User'); - $dm->$filename_fn('SQLite', '1.00') +=attr script_directory -Returns a function which in turn returns a single filename used to install a -single resultsource. Weird interface is convenient for me. Deal with it. +The directory (default C<'sql'>) that scripts are stored in -=method _run_sql_and_perl +=attr databases - $dm->_run_sql_and_perl([qw( list of filenames )]) +The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to +generate files for -Simply put, this runs the list of files passed to it. If the file ends in -C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. +=attr txn_wrap -Depending on L all of the files run will be wrapped in a single +Set to true (which is the default) to wrap all upgrades and deploys in a single transaction. -=method _prepare_install - - $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) - -Generates the sql file for installing the database. First arg is simply -L args and the second is a coderef that returns the filename -to store the sql in. - -=method _prepare_changegrade - - $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') - -Generates the sql file for migrating from one schema version to another. First -arg is the version to start from, second is the version to go to, third is the -L, and last is the -direction of the changegrade, be it 'up' or 'down'. - -=method _read_sql_file +=attr schema_version - $dm->_read_sql_file('foo.sql') +The version the schema on your harddrive is at. Defaults to +C<< $self->schema->schema_version >>. -Reads a sql file and returns lines in an C. Strips out comments, -transactions, and blank lines. +=head1 SEE ALSO +This class is an implementation of +L. Pretty much all the +documentation is there.