X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=84340c5b06ab6999aace06daea62a58eabf8a6a0;hb=4f85efc6aee3c4bbb15b6bd3e6d44837fa97f360;hp=f8377bfe008a6b4ed40b4a359f4c4b0d03f436c7;hpb=7d2a697450860554093a1021d3fe4405bac6328e;p=dbsrgits%2FDBIx-Class-DeploymentHandler.git diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index f8377bf..84340c5 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -1,6 +1,8 @@ package DBIx::Class::DeploymentHandler::DeployMethod::SQL::Translator; use Moose; +# ABSTRACT: Manage your SQL and Perl migrations in nicely laid out directories + use autodie; use Carp qw( carp croak ); @@ -22,7 +24,6 @@ has schema => ( isa => 'DBIx::Class::Schema', is => 'ro', required => 1, - handles => [qw( schema_version )], ); has storage => ( @@ -37,7 +38,7 @@ method _build_storage { $s } -has sqltargs => ( +has sql_translator_args => ( isa => 'HashRef', is => 'ro', default => sub { {} }, @@ -56,18 +57,19 @@ has databases => ( default => sub { [qw( MySQL SQLite PostgreSQL )] }, ); -has _filedata => ( - isa => 'ArrayRef[Str]', - is => 'rw', - default => sub { [] }, -); - has txn_wrap => ( is => 'ro', isa => 'Bool', default => 1, ); +has schema_version => ( + is => 'ro', + lazy_build => 1, +); + +method _build_schema_version { $self->schema->schema_version } + method __ddl_consume_with_prefix($type, $versions, $prefix) { my $base_dir = $self->upgrade_directory; @@ -86,12 +88,12 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { } opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.sql$/ && -f "$dir/$_" } readdir $dh; + my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh; closedir $dh; if (-d $common) { opendir my($dh), $common; - for my $filename (grep { /\.sql$/ && -f catfile($common,$_) } readdir $dh) { + for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { unless ($files{$filename}) { $files{$filename} = catfile($common,$filename); } @@ -102,6 +104,10 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { return [@files{sort keys %files}] } +method _ddl_preinstall_consume_filenames($type, $version) { + $self->__ddl_consume_with_prefix($type, [ $version ], 'preinstall') +} + method _ddl_schema_consume_filenames($type, $version) { $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') } @@ -138,41 +144,104 @@ method _ddl_schema_down_produce_filename($type, $versions, $dir) { return catfile( $dirname, '001-auto.sql'); } -sub deploy { - my $self = shift; - my $storage = $self->storage; +method _run_sql_and_perl($filenames) { + my @files = @{$filenames}; + my $storage = $self->storage; + my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; - my @sql = map @{$self->_read_sql_file($_)}, @{$self->_ddl_schema_consume_filenames( - $self->storage->sqlt_type, - $self->schema_version - )}; - - foreach my $line (@sql) { - $storage->_query_start($line); - try { - # do a dbh_do cycle here, as we need some error checking in - # place (even though we will ignore errors) - $storage->dbh_do (sub { $_[1]->do($line) }); - } - catch { - carp "$_ (running '${line}')" + my $sql; + for my $filename (@files) { + if ($filename =~ /\.sql$/) { + my @sql = @{$self->_read_sql_file($filename)}; + $sql .= join "\n", @sql; + + foreach my $line (@sql) { + $storage->_query_start($line); + try { + # do a dbh_do cycle here, as we need some error checking in + # place (even though we will ignore errors) + $storage->dbh_do (sub { $_[1]->do($line) }); + } + catch { + carp "$_ (running '${line}')" + } + $storage->_query_end($line); + } + } elsif ( $filename =~ /^(.+)\.pl$/ ) { + my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; + + no warnings 'redefine'; + my $fn = eval "$filedata"; + use warnings; + + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->($self->schema) + } else { + carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; + } + } else { + croak "A file ($filename) got to deploy that wasn't sql or perl!"; } - $storage->_query_end($line); } $guard->commit if $self->txn_wrap; - return join "\n", @sql; + + return $sql; +} + +sub deploy { + my $self = shift; + my $version = shift || $self->schema_version; + + return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( + $self->storage->sqlt_type, + $version, + )); } -sub prepare_install { +sub preinstall_scripts { my $self = shift; + my $version = shift || $self->schema_version; + + my @files = @{$self->_ddl_preinstall_consume_filenames( + $self->storage->sqlt_type, + $version, + )}; + + for my $filename (@files) { + # We ignore sql for now (till I figure out what to do with it) + if ( $filename =~ /^(.+)\.pl$/ ) { + my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; + + no warnings 'redefine'; + my $fn = eval "$filedata"; + use warnings; + + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->() + } else { + carp "$filename should define an anonymous sub but it didn't!"; + } + } else { + croak "A file ($filename) got to preinstall_scripts that wasn't sql or perl!"; + } + } +} + +sub _prepare_install { + my $self = shift; + my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; + my $to_file = shift; my $schema = $self->schema; my $databases = $self->databases; my $dir = $self->upgrade_directory; - my $sqltargs = $self->sqltargs; - my $version = $schema->schema_version; + my $version = $self->schema_version; my $sqlt = SQL::Translator->new({ add_drop_table => 1, @@ -190,7 +259,7 @@ sub prepare_install { $sqlt->{schema} = $sqlt_schema; $sqlt->producer($db); - my $filename = $self->_ddl_schema_produce_filename($db, $version, $dir); + my $filename = $self->$to_file($db, $version, $dir); if (-e $filename ) { carp "Overwriting existing DDL file - $filename"; unlink $filename; @@ -207,31 +276,55 @@ sub prepare_install { } } -sub prepare_upgrade { - my ($self, $from_version, $to_version, $version_set) = @_; +sub _resultsource_install_filename { + my ($self, $source_name) = @_; + return sub { + my ($self, $type, $version) = @_; + my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); + mkpath($dirname) unless -d $dirname; + + return catfile( $dirname, "001-auto-$source_name.sql" ); + } +} - $from_version ||= '1.0'; #$self->database_version; - $to_version ||= $self->schema_version; +sub install_resultsource { + my ($self, $source, $version) = @_; - # for updates prepared automatically (rob's stuff) - # one would want to explicitly set $version_set to - # [$to_version] - $version_set ||= [$from_version, $to_version]; + my $rs_install_file = + $self->_resultsource_install_filename($source->source_name); + my $files = [ + $self->$rs_install_file( + $self->storage->sqlt_type, + $version, + ) + ]; + $self->_run_sql_and_perl($files); +} + +sub prepare_resultsource_install { + my $self = shift; + my $source = shift; + + my $filename = $self->_resultsource_install_filename($source->source_name); + $self->_prepare_install({ + parser_args => { sources => [$source->source_name], } + }, $filename); +} + +sub prepare_deploy { + my $self = shift; + $self->_prepare_install({}, '_ddl_schema_produce_filename'); +} + +sub prepare_upgrade { + my ($self, $from_version, $to_version, $version_set) = @_; $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up'); } sub prepare_downgrade { my ($self, $from_version, $to_version, $version_set) = @_; - $from_version ||= $self->db_version; - $to_version ||= $self->schema_version; - - # for updates prepared automatically (rob's stuff) - # one would want to explicitly set $version_set to - # [$to_version] - $version_set ||= [$from_version, $to_version]; - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down'); } @@ -239,9 +332,9 @@ method _prepare_changegrade($from_version, $to_version, $version_set, $direction my $schema = $self->schema; my $databases = $self->databases; my $dir = $self->upgrade_directory; - my $sqltargs = $self->sqltargs; + my $sqltargs = $self->sql_translator_args; - my $schema_version = $schema->schema_version; + my $schema_version = $self->schema_version; $sqltargs = { add_drop_table => 1, @@ -349,62 +442,256 @@ method _read_sql_file($file) { sub downgrade_single_step { my $self = shift; - my @version_set = @{ shift @_ }; - my @downgrade_files = @{$self->_ddl_schema_down_consume_filenames( - $self->storage->sqlt_type, - \@version_set, - )}; + my $version_set = shift @_; - for my $downgrade_file (@downgrade_files) { - $self->_filedata($self->_read_sql_file($downgrade_file)); # I don't like this --fREW 2010-02-22 + my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( + $self->storage->sqlt_type, + $version_set, + )); - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; - $self->_do_upgrade; - $guard->commit if $self->txn_wrap; - } + return ['', $sql]; } sub upgrade_single_step { my $self = shift; - my @version_set = @{ shift @_ }; - my @upgrade_files = @{$self->_ddl_schema_up_consume_filenames( - $self->storage->sqlt_type, - \@version_set, - )}; + my $version_set = shift @_; - my $upgrade_sql; - for my $upgrade_file (@upgrade_files) { - my $up = $self->_read_sql_file($upgrade_file); - $upgrade_sql .= $up; - $self->_filedata($up); # I don't like this --fREW 2010-02-22 - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; - $self->_do_upgrade; - $guard->commit if $self->txn_wrap; - } - return ['', $upgrade_sql]; + my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( + $self->storage->sqlt_type, + $version_set, + )); + return ['', $sql]; } -method _do_upgrade { $self->_run_upgrade(qr/.*?/) } +__PACKAGE__->meta->make_immutable; -method _run_upgrade($stm) { - my @statements = grep { $_ =~ $stm } @{$self->_filedata}; +1; - for (@statements) { - $self->storage->debugobj->query_start($_) if $self->storage->debug; - $self->_apply_statement($_); - $self->storage->debugobj->query_end($_) if $self->storage->debug; - } -} +# vim: ts=2 sw=2 expandtab -method _apply_statement($statement) { - # croak? - $self->storage->dbh->do($_) or carp "SQL was: $_" -} +__END__ -__PACKAGE__->meta->make_immutable; +=head1 DESCRIPTION + +This class is the meat of L. It takes care of +generating sql files representing schemata as well as sql files to move from +one version of a schema to the rest. One of the hallmark features of this +class is that it allows for multiple sql files for deploy and upgrade, allowing +developers to fine tune deployment. In addition it also allows for perl files +to be run at any stage of the process. + +For basic usage see L. What's +documented here is extra fun stuff or private methods. + +=head1 DIRECTORY LAYOUT + +Arguably this is the best feature of L. It's +heavily based upon L, but has some extensions and +modifications, so even if you are familiar with it, please read this. I feel +like the best way to describe the layout is with the following example: + + $sql_migration_dir + |- SQLite + | |- down + | | `- 2-1 + | | `- 001-auto.sql + | |- schema + | | `- 1 + | | `- 001-auto.sql + | `- up + | |- 1-2 + | | `- 001-auto.sql + | `- 2-3 + | `- 001-auto.sql + |- _common + | |- down + | | `- 2-1 + | | `- 002-remove-customers.pl + | `- up + | `- 1-2 + | `- 002-generate-customers.pl + |- _generic + | |- down + | | `- 2-1 + | | `- 001-auto.sql + | |- schema + | | `- 1 + | | `- 001-auto.sql + | `- up + | `- 1-2 + | |- 001-auto.sql + | `- 002-create-stored-procedures.sql + `- MySQL + |- down + | `- 2-1 + | `- 001-auto.sql + |- schema + | `- 1 + | `- 001-auto.sql + `- up + `- 1-2 + `- 001-auto.sql -1; +So basically, the code -__END__ + $dm->deploy(1) + +on an C database that would simply run +C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next, + + $dm->upgrade_single_step([1,2]) + +would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by +C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. + +Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of +the time it probably should be, since perl scripts will mostly be database +independent. + +C<_generic> exists for when you for some reason are sure that your SQL is +generic enough to run on all databases. Good luck with that one. + +=head1 PERL SCRIPTS + +A perl script for this tool is very simple. It merely needs to contain an +anonymous sub that takes a L as it's only argument. +A very basic perl script might look like: + + #!perl + + use strict; + use warnings; + + sub { + my $schema = shift; + + $schema->resultset('Users')->create({ + name => 'root', + password => 'root', + }) + } + +=attr schema + +The L (B) that is used to talk to the database +and generate the DDL. + +=attr storage + +The L that is I used to talk to the database +and generate the DDL. This is automatically created with L. + +=attr sql_translator_args + +The arguments that get passed to L when it's used. + +=attr upgrade_directory + +The directory (default C<'sql'>) that upgrades are stored in + +=attr databases + +The types of databases (default C<< [qw( MySQL SQLite PostgreSQL )] >>) to +generate files for + +=attr txn_wrap + +Set to true (which is the default) to wrap all upgrades and deploys in a single +transaction. + +=attr schema_version + +The version the schema on your harddrive is at. Defaults to +C<< $self->schema->schema_version >>. + +=method __ddl_consume_with_prefix + + $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) + +This is the meat of the multi-file upgrade/deploy stuff. It returns a list of +files in the order that they should be run for a generic "type" of upgrade. +You should not be calling this in user code. + +=method _ddl_schema_consume_filenames + + $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) + +Just a curried L. Get's a list of files for an +initial deploy. + +=method _ddl_schema_produce_filename + + $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) + +Returns a single file in which an initial schema will be stored. + +=method _ddl_schema_up_consume_filenames + + $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) + +Just a curried L. Get's a list of files for an +upgrade. + +=method _ddl_schema_down_consume_filenames + + $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) + +Just a curried L. Get's a list of files for a +downgrade. + +=method _ddl_schema_up_produce_filenames + + $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) + +Returns a single file in which the sql to upgrade from one schema to another +will be stored. + +=method _ddl_schema_down_produce_filename + + $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) + +Returns a single file in which the sql to downgrade from one schema to another +will be stored. + +=method _resultsource_install_filename + + my $filename_fn = $dm->_resultsource_install_filename('User'); + $dm->$filename_fn('SQLite', '1.00') + +Returns a function which in turn returns a single filename used to install a +single resultsource. Weird interface is convenient for me. Deal with it. + +=method _run_sql_and_perl + + $dm->_run_sql_and_perl([qw( list of filenames )]) + +Simply put, this runs the list of files passed to it. If the file ends in +C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. + +Depending on L all of the files run will be wrapped in a single +transaction. + +=method _prepare_install + + $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) + +Generates the sql file for installing the database. First arg is simply +L args and the second is a coderef that returns the filename +to store the sql in. + +=method _prepare_changegrade + + $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') + +Generates the sql file for migrating from one schema version to another. First +arg is the version to start from, second is the version to go to, third is the +L, and last is the +direction of the changegrade, be it 'up' or 'down'. + +=method _read_sql_file + + $dm->_read_sql_file('foo.sql') + +Reads a sql file and returns lines in an C. Strips out comments, +transactions, and blank lines. -vim: ts=2 sw=2 expandtab