X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler.pm;h=e78d07f87f000126308fa1004e16c1a30743788e;hb=cf400f483c5280a6d302f715b0d730ae829a4523;hp=6bbde02ca08299c8f87bc7f8cdf8d93adf73c378;hpb=ceef4ff5cf15c44f451cd4d3300dea25fcf58599;p=dbsrgits%2FDBIx-Class-DeploymentHandler.git diff --git a/lib/DBIx/Class/DeploymentHandler.pm b/lib/DBIx/Class/DeploymentHandler.pm index 6bbde02..e78d07f 100644 --- a/lib/DBIx/Class/DeploymentHandler.pm +++ b/lib/DBIx/Class/DeploymentHandler.pm @@ -6,19 +6,31 @@ require DBIx::Class::Schema; # loaded for type constraint require DBIx::Class::Storage; # loaded for type constraint require DBIx::Class::ResultSet; # loaded for type constraint use Carp::Clan '^DBIx::Class::DeploymentHandler'; +use SQL::Translator; + +BEGIN { + use Moose::Util::TypeConstraints; + subtype 'DBIx::Class::DeploymentHandler::Databases' + => as 'ArrayRef[Str]'; + + coerce 'DBIx::Class::DeploymentHandler::Databases' + => from 'Str' + => via { [$_] }; + no Moose::Util::TypeConstraints; +} has schema => ( isa => 'DBIx::Class::Schema', is => 'ro', required => 1, - handles => [qw( schema_version )], + handles => [qw( ddl_filename schema_version )], ); has upgrade_directory => ( isa => 'Str', is => 'ro', required => 1, - default => 'upgrades', + default => 'sql', ); has backup_directory => ( @@ -39,7 +51,7 @@ method _build_storage { } has _filedata => ( - isa => 'Str', + isa => 'ArrayRef[Str]', is => 'rw', ); @@ -62,11 +74,25 @@ has version_rs => ( handles => [qw( is_installed db_version )], ); -method _build_version_rs { $self->schema->resultset('VersionResult') } +has databases => ( + coerce => 1, + isa => 'DBIx::Class::DeploymentHandler::Databases', + is => 'ro', + default => sub { [qw( MySQL SQLite PostgreSQL )] }, +); -method backup { $self->storage->backup($self->backup_directory) } +has sqltargs => ( + isa => 'HashRef', + is => 'ro', + default => sub { {} }, +); -method create_ddl_dir { $self->storage->create_ddl_dir( $self->schema, @_ ) } +method _build_version_rs { + $self->schema->set_us_up_the_bomb; + $self->schema->resultset('__VERSION') +} + +method backup { $self->storage->backup($self->backup_directory) } method install($new_version) { carp 'Install not possible as versions table already exists in database' @@ -87,7 +113,7 @@ method install($new_version) { method create_upgrade_path { } -method ordered_schema_versions { } +method ordered_schema_versions { undef } method upgrade { my $db_version = $self->db_version; @@ -163,6 +189,131 @@ method upgrade_single_step($db_version, $target_version) { }); } +method create_ddl_dir($version, $preversion) { + my $schema = $self->schema; + my $databases = $self->databases; + my $dir = $self->upgrade_directory; + my $sqltargs = $self->sqltargs; + unless( -d $dir ) { + carp "Upgrade directory $dir does not exist, using ./\n"; + $dir = "./"; + } + + my $schema_version = $schema->schema_version || '1.x'; + $version ||= $schema_version; + + $sqltargs = { + add_drop_table => 1, + ignore_constraint_names => 1, + ignore_index_names => 1, + %{$sqltargs || {}} + }; + + my $sqlt = SQL::Translator->new( $sqltargs ); + + $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); + my $sqlt_schema = $sqlt->translate({ data => $schema }) + or $self->throw_exception ($sqlt->error); + + foreach my $db (@$databases) { + $sqlt->reset; + $sqlt->{schema} = $sqlt_schema; + $sqlt->producer($db); + + my $filename = $self->ddl_filename($db, $version, $dir); + if (-e $filename && ($version eq $schema_version )) { + # if we are dumping the current version, overwrite the DDL + carp "Overwriting existing DDL file - $filename"; + unlink $filename; + } + + my $output = $sqlt->translate; + if(!$output) { + carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); + next; + } + my $file; + unless( open $file, q(>), $filename ) { + $self->throw_exception("Can't open $filename for writing ($!)"); + next; + } + print {$file} $output; + close $file; + + next unless $preversion; + + require SQL::Translator::Diff; + + my $prefilename = $self->ddl_filename($db, $preversion, $dir); + unless(-e $prefilename) { + carp("No previous schema file found ($prefilename)"); + next; + } + + my $diff_file = $self->ddl_filename($db, $version, $dir, $preversion); + if(-e $diff_file) { + carp("Overwriting existing diff file - $diff_file"); + unlink $diff_file; + } + + my $source_schema; + { + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + }); + + $t->parser( $db ) # could this really throw an exception? + or $self->throw_exception ($t->error); + + my $out = $t->translate( $prefilename ) + or $self->throw_exception ($t->error); + + $source_schema = $t->schema; + + $source_schema->name( $prefilename ) + unless $source_schema->name; + } + + # The "new" style of producers have sane normalization and can support + # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't + # And we have to diff parsed SQL against parsed SQL. + my $dest_schema = $sqlt_schema; + + unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + }); + + $t->parser( $db ) # could this really throw an exception? + or $self->throw_exception ($t->error); + + my $out = $t->translate( $filename ) + or $self->throw_exception ($t->error); + + $dest_schema = $t->schema; + + $dest_schema->name( $filename ) + unless $dest_schema->name; + } + + my $diff = SQL::Translator::Diff::schema_diff( + $source_schema, $db, + $dest_schema, $db, + $sqltargs + ); + unless(open $file, q(>), $diff_file) { + $self->throw_exception("Can't write to $diff_file ($!)"); + next; + } + print {$file} $diff; + close $file; + } +} + method do_upgrade { $self->run_upgrade(qr/.*?/) } method run_upgrade($stm) {