use autodie;
use Carp qw( carp croak );
-use Log::Contextual::WarnLogger;
-use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({
- env_prefix => 'DBICDH'
-});
-use Data::Dumper::Concise;
+use DBIx::Class::DeploymentHandler::Logger;
+use Log::Contextual qw(:log :dlog), -default_logger =>
+ DBIx::Class::DeploymentHandler::Logger->new({
+ env_prefix => 'DBICDH'
+ });
use Method::Signatures::Simple;
use Try::Tiny;
# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
-has _json => (
- is => 'ro',
- lazy_build => 1,
-);
-
-sub _build__json { require JSON; JSON->new->pretty }
-
method __ddl_consume_with_prefix($type, $versions, $prefix) {
my $base_dir = $self->script_directory;
$self->__ddl_consume_with_prefix($type, [ $version ], 'schema')
}
+method _ddl_protoschema_produce_filename($version) {
+ my $dirname = catfile( $self->script_directory, '_protoschema', $version );
+ mkpath($dirname) unless -d $dirname;
+
+ return catfile( $dirname, '001-auto.yml' );
+}
+
method _ddl_schema_produce_filename($type, $version) {
my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql-json' );
+ return catfile( $dirname, '001-auto.sql' );
}
method _ddl_schema_up_consume_filenames($type, $versions) {
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql-json' );
+ return catfile( $dirname, '001-auto.sql' );
}
method _ddl_schema_down_produce_filename($type, $versions, $dir) {
my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql-json');
+ return catfile( $dirname, '001-auto.sql');
}
method _run_sql_array($sql) {
join '', grep { !/^--/ } split /\n/ # remove comments
} @$sql];
- log_trace { '[DBICDH] Running SQL ' . Dumper($sql) };
+ Dlog_trace { "Running SQL $_" } $sql;
foreach my $line (@{$sql}) {
$storage->_query_start($line);
+ # the whole reason we do this is so that we can see the line that was run
try {
- # do a dbh_do cycle here, as we need some error checking in
- # place (even though we will ignore errors)
$storage->dbh_do (sub { $_[1]->do($line) });
}
catch {
- carp "$_ (running '${line}')"
+ die "$_ (running line '$line')"
}
$storage->_query_end($line);
}
}
method _run_sql($filename) {
- log_debug { "[DBICDH] Running SQL from $filename" };
+ log_debug { "Running SQL from $filename" };
return $self->_run_sql_array($self->_read_sql_file($filename));
}
method _run_perl($filename) {
- log_debug { "[DBICDH] Running Perl from $filename" };
+ log_debug { "Running Perl from $filename" };
my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
no warnings 'redefine';
my $fn = eval "$filedata";
use warnings;
- log_trace { '[DBICDH] Running Perl ' . Dumper($fn) };
+ Dlog_trace { "Running Perl $_" } $fn;
if ($@) {
carp "$filename failed to compile: $@";
}
}
-method _run_serialized_sql($filename, $type) {
- if (lc $type eq 'json') {
- return $self->_run_sql_array($self->_json->decode(
- do { local( @ARGV, $/ ) = $filename; <> } # slurp
- ))
- } else {
- croak "$type is not one of the supported serialzed types"
- }
-}
-
method _run_sql_and_perl($filenames) {
my @files = @{$filenames};
my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
for my $filename (@files) {
if ($filename =~ /\.sql$/) {
$sql .= $self->_run_sql($filename)
- } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
- $sql .= $self->_run_serialized_sql($filename, $1)
} elsif ( $filename =~ /\.pl$/ ) {
$self->_run_perl($filename)
} else {
sub deploy {
my $self = shift;
my $version = (shift @_ || {})->{version} || $self->schema_version;
- log_info { "[DBICDH] deploying version $version" };
+ log_info { "deploying version $version" };
return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
$self->storage->sqlt_type,
my $self = shift;
my $args = shift;
my $version = $args->{version} || $self->schema_version;
- log_info { "[DBICDH] preinstalling version $version" };
+ log_info { "preinstalling version $version" };
my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
my @files = @{$self->_ddl_preinstall_consume_filenames(
sub _prepare_install {
my $self = shift;
my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
+ my $from_file = shift;
my $to_file = shift;
my $schema = $self->schema;
my $databases = $self->databases;
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
- no_comments => 1,
add_drop_table => 1,
- ignore_constraint_names => 1,
- ignore_index_names => 1,
- parser => 'SQL::Translator::Parser::DBIx::Class',
+ parser => 'SQL::Translator::Parser::YAML',
%{$sqltargs}
});
- my $sqlt_schema = $sqlt->translate( data => $schema )
- or croak($sqlt->error);
+ my $yaml_filename = $self->$from_file($version);
foreach my $db (@$databases) {
$sqlt->reset;
- $sqlt->{schema} = $sqlt_schema;
$sqlt->producer($db);
my $filename = $self->$to_file($db, $version, $dir);
unlink $filename;
}
- my $sql = $self->_generate_final_sql($sqlt);
+ my $sql = $sqlt->translate($yaml_filename);
if(!$sql) {
carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
next;
}
}
-method _generate_final_sql($sqlt) {
- my @output = $sqlt->translate;
- $self->_json->encode(\@output);
-}
-
sub _resultsource_install_filename {
my ($self, $source_name) = @_;
return sub {
my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, "001-auto-$source_name.sql-json" );
+ return catfile( $dirname, "001-auto-$source_name.sql" );
+ }
+}
+
+sub _resultsource_protoschema_filename {
+ my ($self, $source_name) = @_;
+ return sub {
+ my ($self, $version) = @_;
+ my $dirname = catfile( $self->script_directory, '_protoschema', $version );
+ mkpath($dirname) unless -d $dirname;
+
+ return catfile( $dirname, "001-auto-$source_name.yml" );
}
}
my ($self, $args) = @_;
my $source = $args->{result_source};
my $version = $args->{version};
- log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
+ log_info { 'installing_resultsource ' . $source->source_name . ", version $version" };
my $rs_install_file =
$self->_resultsource_install_filename($source->source_name);
sub prepare_resultsource_install {
my $self = shift;
my $source = (shift @_)->{result_source};
- log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
+ log_info { 'preparing install for resultsource ' . $source->source_name };
- my $filename = $self->_resultsource_install_filename($source->source_name);
- $self->_prepare_install({
+ my $install_filename = $self->_resultsource_install_filename($source->source_name);
+ my $proto_filename = $self->_resultsource_protoschema_filename($source->source_name);
+ $self->_generate_protoschema({
parser_args => { sources => [$source->source_name], }
- }, $filename);
+ }, $proto_filename);
+ $self->_prepare_install({}, $proto_filename, $install_filename);
}
sub prepare_deploy {
- log_info { '[DBICDH] preparing deploy' };
+ log_info { 'preparing deploy' };
my $self = shift;
- $self->_prepare_install({}, '_ddl_schema_produce_filename');
+ $self->_generate_protoschema({}, '_ddl_protoschema_produce_filename');
+ $self->_prepare_install({}, '_ddl_protoschema_produce_filename', '_ddl_schema_produce_filename');
}
sub prepare_upgrade {
my ($self, $args) = @_;
log_info {
- '[DBICDH] preparing upgrade ' .
- "from $args->{from_version} to $args->{to_version}"
+ "preparing upgrade from $args->{from_version} to $args->{to_version}"
};
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
sub prepare_downgrade {
my ($self, $args) = @_;
log_info {
- '[DBICDH] preparing downgrade ' .
- "from $args->{from_version} to $args->{to_version}"
+ "preparing downgrade from $args->{from_version} to $args->{to_version}"
};
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
$sqltargs = {
add_drop_table => 1,
- no_comments => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
%{$sqltargs}
};
- my $sqlt = SQL::Translator->new( $sqltargs );
-
- $sqlt->parser('SQL::Translator::Parser::DBIx::Class');
- my $sqlt_schema = $sqlt->translate( data => $schema )
- or croak($sqlt->error);
-
- foreach my $db (@$databases) {
- $sqlt->reset;
- $sqlt->{schema} = $sqlt_schema;
- $sqlt->producer($db);
-
- my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir);
- unless(-e $prefilename) {
- carp("No previous schema file found ($prefilename)");
- next;
- }
- my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
- my $diff_file = $self->$diff_file_method($db, $version_set, $dir );
- if(-e $diff_file) {
- carp("Overwriting existing $direction-diff file - $diff_file");
- unlink $diff_file;
- }
+ my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
+ my $source_schema;
+ {
+ my $prefilename = $self->_ddl_protoschema_produce_filename($from_version, $dir);
- my $source_schema;
- {
- my $t = SQL::Translator->new({
- %{$sqltargs},
- debug => 0,
- trace => 0,
- });
+ # should probably be a croak
+ carp("No previous schema file found ($prefilename)")
+ unless -e $prefilename;
- $t->parser( $db ) # could this really throw an exception?
- or croak($t->error);
+ my $t = SQL::Translator->new({
+ %{$sqltargs},
+ debug => 0,
+ trace => 0,
+ parser => 'SQL::Translator::Parser::YAML',
+ });
- my $sql = $self->_default_read_sql_file_as_string($prefilename);
- my $out = $t->translate( \$sql )
- or croak($t->error);
+ my $out = $t->translate( $prefilename )
+ or croak($t->error);
- $source_schema = $t->schema;
+ $source_schema = $t->schema;
- $source_schema->name( $prefilename )
- unless $source_schema->name;
- }
+ $source_schema->name( $prefilename )
+ unless $source_schema->name;
+ }
- # The "new" style of producers have sane normalization and can support
- # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't
- # And we have to diff parsed SQL against parsed SQL.
- my $dest_schema = $sqlt_schema;
+ my $dest_schema;
+ {
+ my $filename = $self->_ddl_protoschema_produce_filename($to_version, $dir);
- unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) {
- my $t = SQL::Translator->new({
- %{$sqltargs},
- debug => 0,
- trace => 0,
- });
+ # should probably be a croak
+ carp("No next schema file found ($filename)")
+ unless -e $filename;
- $t->parser( $db ) # could this really throw an exception?
- or croak($t->error);
+ my $t = SQL::Translator->new({
+ %{$sqltargs},
+ debug => 0,
+ trace => 0,
+ parser => 'SQL::Translator::Parser::YAML',
+ });
- my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
- my $sql = $self->_default_read_sql_file_as_string($filename);
- my $out = $t->translate( \$sql )
- or croak($t->error);
+ my $out = $t->translate( $filename )
+ or croak($t->error);
- $dest_schema = $t->schema;
+ $dest_schema = $t->schema;
- $dest_schema->name( $filename )
- unless $dest_schema->name;
+ $dest_schema->name( $filename )
+ unless $dest_schema->name;
+ }
+ foreach my $db (@$databases) {
+ my $diff_file = $self->$diff_file_method($db, $version_set, $dir );
+ if(-e $diff_file) {
+ carp("Overwriting existing $direction-diff file - $diff_file");
+ unlink $diff_file;
}
+ my $diff = SQL::Translator::Diff::schema_diff(
+ $source_schema, $db,
+ $dest_schema, $db,
+ $sqltargs
+ );
open my $file, q(>), $diff_file;
- print {$file}
- $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
+ print {$file} $diff;
close $file;
}
}
-method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
- $self->_json->encode([
- SQL::Translator::Diff::schema_diff(
- $source_schema, $db,
- $dest_schema, $db,
- $sqltargs
- )
- ])
-}
-
method _read_sql_file($file) {
return unless $file;
my @data = split /;\n/, join '', <$fh>;
close $fh;
- return \@data;
-}
+ @data = grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @data;
-method _default_read_sql_file_as_string($file) {
- return join q(), map "$_;\n", @{$self->_json->decode(
- do { local( @ARGV, $/ ) = $file; <> } # slurp
- )};
+ return \@data;
}
sub downgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
- log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) };
+ Dlog_info { "downgrade_single_step'ing $_" } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
$self->storage->sqlt_type,
sub upgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
- log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) };
+ Dlog_info { "upgrade_single_step'ing $_" } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
$self->storage->sqlt_type,
return ['', $sql];
}
+sub _generate_protoschema {
+ my $self = shift;
+ my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
+ my $to_file = shift;
+ my $filename
+ = $self->$to_file($self->schema_version);
+
+ # we do this because the code that uses this sets parser args,
+ # so we just need to merge in the package
+ $sqltargs->{parser_args}{package} = $self->schema;
+ my $sqlt = SQL::Translator->new({
+ parser => 'SQL::Translator::Parser::DBIx::Class',
+ producer => 'SQL::Translator::Producer::YAML',
+ %{ $sqltargs },
+ });
+
+ my $yml = $sqlt->translate;
+
+ croak("Failed to translate to YAML: " . $sqlt->error)
+ unless $yml;
+
+ if (-e $filename ) {
+ carp "Overwriting existing DDL-YML file - $filename";
+ unlink $filename;
+ }
+
+ open my $file, q(>), $filename;
+ print {$file} $yml;
+ close $file;
+}
+
__PACKAGE__->meta->make_immutable;
1;
=head1 DESCRIPTION
-This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes
-care of generating serialized sql files representing schemata as well
-as serialized sql files to move from one version of a schema to the rest.
-One of the hallmark features of this class is that it allows for multiple sql
-files for deploy and upgrade, allowing developers to fine tune deployment.
-In addition it also allows for perl files to be run
-at any stage of the process.
+This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care
+of generating serialized schemata as well as sql files to move from one
+version of a schema to the rest. One of the hallmark features of this class
+is that it allows for multiple sql files for deploy and upgrade, allowing
+developers to fine tune deployment. In addition it also allows for perl
+files to be run at any stage of the process.
For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
documented here is extra fun stuff or private methods.
|- SQLite
| |- down
| | `- 2-1
- | | `- 001-auto.sql-json
+ | | `- 001-auto.sql
| |- schema
| | `- 1
- | | `- 001-auto.sql-json
+ | | `- 001-auto.sql
| `- up
| |- 1-2
- | | `- 001-auto.sql-json
+ | | `- 001-auto.sql
| `- 2-3
- | `- 001-auto.sql-json
+ | `- 001-auto.sql
|- _common
| |- down
| | `- 2-1
|- _generic
| |- down
| | `- 2-1
- | | `- 001-auto.sql-json
+ | | `- 001-auto.sql
| |- schema
| | `- 1
- | | `- 001-auto.sql-json
+ | | `- 001-auto.sql
| `- up
| `- 1-2
- | |- 001-auto.sql-json
+ | |- 001-auto.sql
| `- 002-create-stored-procedures.sql
`- MySQL
|- down
| `- 2-1
- | `- 001-auto.sql-json
+ | `- 001-auto.sql
|- preinstall
| `- 1
| |- 001-create_database.pl
| `- 002-create_users_and_permissions.pl
|- schema
| `- 1
- | `- 001-auto.sql-json
+ | `- 001-auto.sql
`- up
`- 1-2
- `- 001-auto.sql-json
+ `- 001-auto.sql
So basically, the code
$dm->deploy(1)
on an C<SQLite> database that would simply run
-C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next,
+C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next,
$dm->upgrade_single_step([1,2])
-would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by
+would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by
C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
C<.pl> files don't have to be in the C<_common> directory, but most of the time
of preinstall is to have it prompt for username and password, and then call the
appropriate C<< CREATE DATABASE >> commands etc.
-=head1 SERIALIZED SQL
-
-The SQL that this module generates and uses is serialized into an array of
-SQL statements. The reason being that some databases handle multiple
-statements in a single execution differently. Generally you do not need to
-worry about this as these are scripts generated for you. If you find that
-you are editing them on a regular basis something is wrong and you either need
-to submit a bug or consider writing extra serialized SQL or Perl scripts to run
-before or after the automatically generated script.
-
-B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in
-patches that will allow more serialization formats if you want that feature,
-but if you do send me a patch for that realize that I do not want to add YAML
-support or whatever, I would rather add a generic method of adding any
-serialization format.
-
=head1 PERL SCRIPTS
A perl script for this tool is very simple. It merely needs to contain an