use autodie;
use Carp qw( carp croak );
+use DBIx::Class::DeploymentHandler::Logger;
+use Log::Contextual qw(:log :dlog), -default_logger =>
+ DBIx::Class::DeploymentHandler::Logger->new({
+ env_prefix => 'DBICDH'
+ });
use Method::Signatures::Simple;
use Try::Tiny;
is => 'ro',
default => sub { {} },
);
-has upgrade_directory => (
+has script_directory => (
isa => 'Str',
is => 'ro',
required => 1,
has schema_version => (
is => 'ro',
+ isa => 'Str',
lazy_build => 1,
);
+# this will probably never get called as the DBICDH
+# will be passing down a schema_version normally, which
+# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
+has _json => (
+ is => 'ro',
+ lazy_build => 1,
+);
+
+sub _build__json { require JSON; JSON->new->pretty }
+
method __ddl_consume_with_prefix($type, $versions, $prefix) {
- my $base_dir = $self->upgrade_directory;
+ my $base_dir = $self->script_directory;
my $main = catfile( $base_dir, $type );
my $generic = catfile( $base_dir, '_generic' );
}
opendir my($dh), $dir;
- my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh;
+ my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh;
closedir $dh;
if (-d $common) {
}
method _ddl_schema_produce_filename($type, $version) {
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql' );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_up_consume_filenames($type, $versions) {
}
method _ddl_schema_up_produce_filename($type, $versions) {
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql'
- );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_down_produce_filename($type, $versions, $dir) {
my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql');
+ return catfile( $dirname, '001-auto.sql-json');
}
-method _run_sql_and_perl($filenames) {
- my @files = @{$filenames};
+method _run_sql_array($sql) {
my $storage = $self->storage;
+ $sql = [grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @$sql];
+
+ Dlog_trace { "[DBICDH] Running SQL $_" } $sql;
+ foreach my $line (@{$sql}) {
+ $storage->_query_start($line);
+ # the whole reason we do this is so that we can see the line that was run
+ try {
+ $storage->dbh_do (sub { $_[1]->do($line) });
+ }
+ catch {
+ die "$_ (running line '$line')"
+ }
+ $storage->_query_end($line);
+ }
+ return join "\n", @$sql
+}
- my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+method _run_sql($filename) {
+ log_debug { "[DBICDH] Running SQL from $filename" };
+ return $self->_run_sql_array($self->_read_sql_file($filename));
+}
- my $sql;
- for my $filename (@files) {
- if ($filename =~ /\.sql$/) {
- my @sql = @{$self->_read_sql_file($filename)};
- $sql .= join "\n", @sql;
-
- foreach my $line (@sql) {
- $storage->_query_start($line);
- try {
- # do a dbh_do cycle here, as we need some error checking in
- # place (even though we will ignore errors)
- $storage->dbh_do (sub { $_[1]->do($line) });
- }
- catch {
- carp "$_ (running '${line}')"
- }
- $storage->_query_end($line);
- }
- } elsif ( $filename =~ /^(.+)\.pl$/ ) {
- my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
+method _run_perl($filename) {
+ log_debug { "[DBICDH] Running Perl from $filename" };
+ my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
- my $fn = eval "$filedata";
- use warnings;
+ no warnings 'redefine';
+ my $fn = eval "$filedata";
+ use warnings;
+ Dlog_trace { "[DBICDH] Running Perl $_" } $fn;
- if ($@) {
- carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
- $fn->($self->schema)
- } else {
- carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
- }
+ if ($@) {
+ carp "$filename failed to compile: $@";
+ } elsif (ref $fn eq 'CODE') {
+ $fn->($self->schema)
+ } else {
+ carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
+ }
+}
+
+method _run_serialized_sql($filename, $type) {
+ if (lc $type eq 'json') {
+ return $self->_run_sql_array($self->_json->decode(
+ do { local( @ARGV, $/ ) = $filename; <> } # slurp
+ ))
+ } else {
+ croak "$type is not one of the supported serialzed types"
+ }
+}
+
+method _run_sql_and_perl($filenames) {
+ my @files = @{$filenames};
+ my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+
+ my $sql = '';
+ for my $filename (@files) {
+ if ($filename =~ /\.sql$/) {
+ $sql .= $self->_run_sql($filename)
+ } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
+ $sql .= $self->_run_serialized_sql($filename, $1)
+ } elsif ( $filename =~ /\.pl$/ ) {
+ $self->_run_perl($filename)
} else {
croak "A file ($filename) got to deploy that wasn't sql or perl!";
}
sub deploy {
my $self = shift;
my $version = (shift @_ || {})->{version} || $self->schema_version;
+ log_info { "[DBICDH] deploying version $version" };
return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
$self->storage->sqlt_type,
my $self = shift;
my $args = shift;
my $version = $args->{version} || $self->schema_version;
+ log_info { "[DBICDH] preinstalling version $version" };
my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
my @files = @{$self->_ddl_preinstall_consume_filenames(
my $to_file = shift;
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
+ no_comments => 1,
add_drop_table => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
unlink $filename;
}
- my $output = $sqlt->translate;
- if(!$output) {
+ my $sql = $self->_generate_final_sql($sqlt);
+ if(!$sql) {
carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
next;
}
open my $file, q(>), $filename;
- print {$file} $output;
+ print {$file} $sql;
close $file;
}
}
+method _generate_final_sql($sqlt) {
+ my @output = $sqlt->translate;
+ $self->_json->encode(\@output);
+}
+
sub _resultsource_install_filename {
my ($self, $source_name) = @_;
return sub {
my ($self, $type, $version) = @_;
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, "001-auto-$source_name.sql" );
+ return catfile( $dirname, "001-auto-$source_name.sql-json" );
}
}
my ($self, $args) = @_;
my $source = $args->{result_source};
my $version = $args->{version};
+ log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
my $rs_install_file =
$self->_resultsource_install_filename($source->source_name);
sub prepare_resultsource_install {
my $self = shift;
my $source = (shift @_)->{result_source};
+ log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
my $filename = $self->_resultsource_install_filename($source->source_name);
$self->_prepare_install({
}
sub prepare_deploy {
+ log_info { '[DBICDH] preparing deploy' };
my $self = shift;
$self->_prepare_install({}, '_ddl_schema_produce_filename');
}
sub prepare_upgrade {
my ($self, $args) = @_;
+ log_info {
+ '[DBICDH] preparing upgrade ' .
+ "from $args->{from_version} to $args->{to_version}"
+ };
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
);
sub prepare_downgrade {
my ($self, $args) = @_;
+ log_info {
+ '[DBICDH] preparing downgrade ' .
+ "from $args->{from_version} to $args->{to_version}"
+ };
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
);
method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $sqltargs = $self->sql_translator_args;
my $schema_version = $self->schema_version;
$sqltargs = {
add_drop_table => 1,
+ no_comments => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
%{$sqltargs}
$t->parser( $db ) # could this really throw an exception?
or croak($t->error);
- my $out = $t->translate( $prefilename )
+ my $sql = $self->_default_read_sql_file_as_string($prefilename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$source_schema = $t->schema;
or croak($t->error);
my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
- my $out = $t->translate( $filename )
+ my $sql = $self->_default_read_sql_file_as_string($filename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$dest_schema = $t->schema;
unless $dest_schema->name;
}
- my $diff = SQL::Translator::Diff::schema_diff(
- $source_schema, $db,
- $dest_schema, $db,
- $sqltargs
- );
open my $file, q(>), $diff_file;
- print {$file} $diff;
+ print {$file}
+ $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
close $file;
}
}
+method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
+ $self->_json->encode([
+ SQL::Translator::Diff::schema_diff(
+ $source_schema, $db,
+ $dest_schema, $db,
+ $sqltargs
+ )
+ ])
+}
+
method _read_sql_file($file) {
return unless $file;
my @data = split /;\n/, join '', <$fh>;
close $fh;
- @data = grep {
- $_ && # remove blank lines
- !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
- } map {
- s/^\s+//; s/\s+$//; # trim whitespace
- join '', grep { !/^--/ } split /\n/ # remove comments
- } @data;
-
return \@data;
}
+method _default_read_sql_file_as_string($file) {
+ return join q(), map "$_;\n", @{$self->_json->decode(
+ do { local( @ARGV, $/ ) = $file; <> } # slurp
+ )};
+}
+
sub downgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
+ Dlog_info { qq([DBICDH] downgrade_single_step'ing $_) } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
$self->storage->sqlt_type,
sub upgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
+ Dlog_info { qq([DBICDH] upgrade_single_step'ing $_) } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
$self->storage->sqlt_type,
=head1 DESCRIPTION
-This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care of
-generating sql files representing schemata as well as sql files to move from
-one version of a schema to the rest. One of the hallmark features of this
-class is that it allows for multiple sql files for deploy and upgrade, allowing
-developers to fine tune deployment. In addition it also allows for perl files
-to be run at any stage of the process.
+This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes
+care of generating serialized sql files representing schemata as well
+as serialized sql files to move from one version of a schema to the rest.
+One of the hallmark features of this class is that it allows for multiple sql
+files for deploy and upgrade, allowing developers to fine tune deployment.
+In addition it also allows for perl files to be run
+at any stage of the process.
For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
documented here is extra fun stuff or private methods.
|- SQLite
| |- down
| | `- 2-1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| |- schema
| | `- 1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- up
| |- 1-2
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- 2-3
- | `- 001-auto.sql
+ | `- 001-auto.sql-json
|- _common
| |- down
| | `- 2-1
|- _generic
| |- down
| | `- 2-1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| |- schema
| | `- 1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- up
| `- 1-2
- | |- 001-auto.sql
+ | |- 001-auto.sql-json
| `- 002-create-stored-procedures.sql
`- MySQL
|- down
| `- 2-1
- | `- 001-auto.sql
+ | `- 001-auto.sql-json
|- preinstall
| `- 1
| |- 001-create_database.pl
| `- 002-create_users_and_permissions.pl
|- schema
| `- 1
- | `- 001-auto.sql
+ | `- 001-auto.sql-json
`- up
`- 1-2
- `- 001-auto.sql
+ `- 001-auto.sql-json
So basically, the code
$dm->deploy(1)
on an C<SQLite> database that would simply run
-C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next,
+C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next,
$dm->upgrade_single_step([1,2])
-would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by
+would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by
C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
-Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of
-the time it probably should be, since perl scripts will mostly be database
-independent.
+C<.pl> files don't have to be in the C<_common> directory, but most of the time
+they should be, because perl scripts are generally be database independent.
C<_generic> exists for when you for some reason are sure that your SQL is
generic enough to run on all databases. Good luck with that one.
of preinstall is to have it prompt for username and password, and then call the
appropriate C<< CREATE DATABASE >> commands etc.
+=head1 SERIALIZED SQL
+
+The SQL that this module generates and uses is serialized into an array of
+SQL statements. The reason being that some databases handle multiple
+statements in a single execution differently. Generally you do not need to
+worry about this as these are scripts generated for you. If you find that
+you are editing them on a regular basis something is wrong and you either need
+to submit a bug or consider writing extra serialized SQL or Perl scripts to run
+before or after the automatically generated script.
+
+B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in
+patches that will allow more serialization formats if you want that feature,
+but if you do send me a patch for that realize that I do not want to add YAML
+support or whatever, I would rather add a generic method of adding any
+serialization format.
+
=head1 PERL SCRIPTS
A perl script for this tool is very simple. It merely needs to contain an
The arguments that get passed to L<SQL::Translator> when it's used.
-=attr upgrade_directory
+=attr script_directory
-The directory (default C<'sql'>) that upgrades are stored in
+The directory (default C<'sql'>) that scripts are stored in
=attr databases
The version the schema on your harddrive is at. Defaults to
C<< $self->schema->schema_version >>.
-=method __ddl_consume_with_prefix
+=begin comment
+
+=head2 __ddl_consume_with_prefix
$dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' )
files in the order that they should be run for a generic "type" of upgrade.
You should not be calling this in user code.
-=method _ddl_schema_consume_filenames
+=head2 _ddl_schema_consume_filenames
$dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
initial deploy.
-=method _ddl_schema_produce_filename
+=head2 _ddl_schema_produce_filename
$dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] )
Returns a single file in which an initial schema will be stored.
-=method _ddl_schema_up_consume_filenames
+=head2 _ddl_schema_up_consume_filenames
$dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
upgrade.
-=method _ddl_schema_down_consume_filenames
+=head2 _ddl_schema_down_consume_filenames
$dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a
downgrade.
-=method _ddl_schema_up_produce_filenames
+=head2 _ddl_schema_up_produce_filenames
$dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to upgrade from one schema to another
will be stored.
-=method _ddl_schema_down_produce_filename
+=head2 _ddl_schema_down_produce_filename
$dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to downgrade from one schema to another
will be stored.
-=method _resultsource_install_filename
+=head2 _resultsource_install_filename
my $filename_fn = $dm->_resultsource_install_filename('User');
$dm->$filename_fn('SQLite', '1.00')
Returns a function which in turn returns a single filename used to install a
single resultsource. Weird interface is convenient for me. Deal with it.
-=method _run_sql_and_perl
+=head2 _run_sql_and_perl
$dm->_run_sql_and_perl([qw( list of filenames )])
Depending on L</txn_wrap> all of the files run will be wrapped in a single
transaction.
-=method _prepare_install
+=head2 _prepare_install
$dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' })
L<SQL::Translator> args and the second is a coderef that returns the filename
to store the sql in.
-=method _prepare_changegrade
+=head2 _prepare_changegrade
$dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up')
L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the
direction of the changegrade, be it 'up' or 'down'.
-=method _read_sql_file
+=head2 _read_sql_file
$dm->_read_sql_file('foo.sql')
Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments,
transactions, and blank lines.
+=end comment