use autodie;
use Carp qw( carp croak );
+use Log::Contextual::WarnLogger;
+use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({
+ env_prefix => 'DBICDH'
+});
+use Data::Dumper::Concise;
use Method::Signatures::Simple;
use Try::Tiny;
is => 'ro',
default => sub { {} },
);
-has upgrade_directory => (
+has script_directory => (
isa => 'Str',
is => 'ro',
required => 1,
has schema_version => (
is => 'ro',
+ isa => 'Str',
lazy_build => 1,
);
+# this will probably never get called as the DBICDH
+# will be passing down a schema_version normally, which
+# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
+has _json => (
+ is => 'ro',
+ lazy_build => 1,
+);
+
+sub _build__json { require JSON; JSON->new->pretty }
+
method __ddl_consume_with_prefix($type, $versions, $prefix) {
- my $base_dir = $self->upgrade_directory;
+ my $base_dir = $self->script_directory;
my $main = catfile( $base_dir, $type );
my $generic = catfile( $base_dir, '_generic' );
}
opendir my($dh), $dir;
- my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh;
+ my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh;
closedir $dh;
if (-d $common) {
}
method _ddl_schema_produce_filename($type, $version) {
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql' );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_up_consume_filenames($type, $versions) {
}
method _ddl_schema_up_produce_filename($type, $versions) {
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql'
- );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_down_produce_filename($type, $versions, $dir) {
my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql');
+ return catfile( $dirname, '001-auto.sql-json');
}
-method _run_sql_and_perl($filenames) {
- my @files = @{$filenames};
+method _run_sql_array($sql) {
my $storage = $self->storage;
+ $sql = [grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @$sql];
+
+ log_trace { '[DBICDH] Running SQL ' . Dumper($sql) };
+ foreach my $line (@{$sql}) {
+ $storage->_query_start($line);
+ try {
+ # do a dbh_do cycle here, as we need some error checking in
+ # place (even though we will ignore errors)
+ $storage->dbh_do (sub { $_[1]->do($line) });
+ }
+ catch {
+ carp "$_ (running '${line}')"
+ }
+ $storage->_query_end($line);
+ }
+ return join "\n", @$sql
+}
- my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+method _run_sql($filename) {
+ log_debug { "[DBICDH] Running SQL from $filename" };
+ return $self->_run_sql_array($self->_read_sql_file($filename));
+}
- my $sql;
- for my $filename (@files) {
- if ($filename =~ /\.sql$/) {
- my @sql = @{$self->_read_sql_file($filename)};
- $sql .= join "\n", @sql;
-
- foreach my $line (@sql) {
- $storage->_query_start($line);
- try {
- # do a dbh_do cycle here, as we need some error checking in
- # place (even though we will ignore errors)
- $storage->dbh_do (sub { $_[1]->do($line) });
- }
- catch {
- carp "$_ (running '${line}')"
- }
- $storage->_query_end($line);
- }
- } elsif ( $filename =~ /^(.+)\.pl$/ ) {
- my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
+method _run_perl($filename) {
+ log_debug { "[DBICDH] Running Perl from $filename" };
+ my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
- my $fn = eval "$filedata";
- use warnings;
+ no warnings 'redefine';
+ my $fn = eval "$filedata";
+ use warnings;
+ log_trace { '[DBICDH] Running Perl ' . Dumper($fn) };
- if ($@) {
- carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
- $fn->($self->schema)
- } else {
- carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
- }
+ if ($@) {
+ carp "$filename failed to compile: $@";
+ } elsif (ref $fn eq 'CODE') {
+ $fn->($self->schema)
+ } else {
+ carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
+ }
+}
+
+method _run_serialized_sql($filename, $type) {
+ if (lc $type eq 'json') {
+ return $self->_run_sql_array($self->_json->decode(
+ do { local( @ARGV, $/ ) = $filename; <> } # slurp
+ ))
+ } else {
+ croak "$type is not one of the supported serialzed types"
+ }
+}
+
+method _run_sql_and_perl($filenames) {
+ my @files = @{$filenames};
+ my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+
+ my $sql = '';
+ for my $filename (@files) {
+ if ($filename =~ /\.sql$/) {
+ $sql .= $self->_run_sql($filename)
+ } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
+ $sql .= $self->_run_serialized_sql($filename, $1)
+ } elsif ( $filename =~ /\.pl$/ ) {
+ $self->_run_perl($filename)
} else {
croak "A file ($filename) got to deploy that wasn't sql or perl!";
}
sub deploy {
my $self = shift;
- my $version = shift || $self->schema_version;
+ my $version = (shift @_ || {})->{version} || $self->schema_version;
+ log_info { "[DBICDH] deploying version $version" };
return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
$self->storage->sqlt_type,
}
sub preinstall {
- my $self = shift;
- my $version = shift || $self->schema_version;
+ my $self = shift;
+ my $args = shift;
+ my $version = $args->{version} || $self->schema_version;
+ log_info { "[DBICDH] preinstalling version $version" };
+ my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
my @files = @{$self->_ddl_preinstall_consume_filenames(
- $self->storage->sqlt_type,
+ $storage_type,
$version,
)};
if ( $filename =~ /^(.+)\.pl$/ ) {
my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
+ no warnings 'redefine';
my $fn = eval "$filedata";
use warnings;
- if ($@) {
+ if ($@) {
carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
+ } elsif (ref $fn eq 'CODE') {
$fn->()
} else {
carp "$filename should define an anonymous sub but it didn't!";
my $to_file = shift;
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
+ no_comments => 1,
add_drop_table => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
unlink $filename;
}
- my $output = $sqlt->translate;
- if(!$output) {
+ my $sql = $self->_generate_final_sql($sqlt);
+ if(!$sql) {
carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
next;
}
open my $file, q(>), $filename;
- print {$file} $output;
+ print {$file} $sql;
close $file;
}
}
+method _generate_final_sql($sqlt) {
+ my @output = $sqlt->translate;
+ $self->_json->encode(\@output);
+}
+
sub _resultsource_install_filename {
my ($self, $source_name) = @_;
return sub {
my ($self, $type, $version) = @_;
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, "001-auto-$source_name.sql" );
+ return catfile( $dirname, "001-auto-$source_name.sql-json" );
}
}
sub install_resultsource {
- my ($self, $source, $version) = @_;
-
+ my ($self, $args) = @_;
+ my $source = $args->{result_source};
+ my $version = $args->{version};
+ log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
my $rs_install_file =
$self->_resultsource_install_filename($source->source_name);
sub prepare_resultsource_install {
my $self = shift;
- my $source = shift;
+ my $source = (shift @_)->{result_source};
+ log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
my $filename = $self->_resultsource_install_filename($source->source_name);
$self->_prepare_install({
}
sub prepare_deploy {
+ log_info { '[DBICDH] preparing deploy' };
my $self = shift;
$self->_prepare_install({}, '_ddl_schema_produce_filename');
}
sub prepare_upgrade {
- my ($self, $from_version, $to_version, $version_set) = @_;
- $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up');
+ my ($self, $args) = @_;
+ log_info {
+ '[DBICDH] preparing upgrade ' .
+ "from $args->{from_version} to $args->{to_version}"
+ };
+ $self->_prepare_changegrade(
+ $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
+ );
}
sub prepare_downgrade {
- my ($self, $from_version, $to_version, $version_set) = @_;
-
- $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down');
+ my ($self, $args) = @_;
+ log_info {
+ '[DBICDH] preparing downgrade ' .
+ "from $args->{from_version} to $args->{to_version}"
+ };
+ $self->_prepare_changegrade(
+ $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
+ );
}
method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $sqltargs = $self->sql_translator_args;
my $schema_version = $self->schema_version;
$sqltargs = {
add_drop_table => 1,
+ no_comments => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
%{$sqltargs}
$t->parser( $db ) # could this really throw an exception?
or croak($t->error);
- my $out = $t->translate( $prefilename )
+ my $sql = $self->_default_read_sql_file_as_string($prefilename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$source_schema = $t->schema;
or croak($t->error);
my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
- my $out = $t->translate( $filename )
+ my $sql = $self->_default_read_sql_file_as_string($filename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$dest_schema = $t->schema;
unless $dest_schema->name;
}
- my $diff = SQL::Translator::Diff::schema_diff(
- $source_schema, $db,
- $dest_schema, $db,
- $sqltargs
- );
open my $file, q(>), $diff_file;
- print {$file} $diff;
+ print {$file}
+ $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
close $file;
}
}
+method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
+ $self->_json->encode([
+ SQL::Translator::Diff::schema_diff(
+ $source_schema, $db,
+ $dest_schema, $db,
+ $sqltargs
+ )
+ ])
+}
+
method _read_sql_file($file) {
return unless $file;
my @data = split /;\n/, join '', <$fh>;
close $fh;
- @data = grep {
- $_ && # remove blank lines
- !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
- } map {
- s/^\s+//; s/\s+$//; # trim whitespace
- join '', grep { !/^--/ } split /\n/ # remove comments
- } @data;
-
return \@data;
}
+method _default_read_sql_file_as_string($file) {
+ return join q(), map "$_;\n", @{$self->_json->decode(
+ do { local( @ARGV, $/ ) = $file; <> } # slurp
+ )};
+}
+
sub downgrade_single_step {
my $self = shift;
- my $version_set = shift @_;
+ my $version_set = (shift @_)->{version_set};
+ log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) };
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
$self->storage->sqlt_type,
sub upgrade_single_step {
my $self = shift;
- my $version_set = shift @_;
+ my $version_set = (shift @_)->{version_set};
+ log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) };
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
$self->storage->sqlt_type,
The arguments that get passed to L<SQL::Translator> when it's used.
-=attr upgrade_directory
+=attr script_directory
-The directory (default C<'sql'>) that upgrades are stored in
+The directory (default C<'sql'>) that scripts are stored in
=attr databases
The version the schema on your harddrive is at. Defaults to
C<< $self->schema->schema_version >>.
-=method __ddl_consume_with_prefix
+=begin comment
+
+=head2 __ddl_consume_with_prefix
$dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' )
files in the order that they should be run for a generic "type" of upgrade.
You should not be calling this in user code.
-=method _ddl_schema_consume_filenames
+=head2 _ddl_schema_consume_filenames
$dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
initial deploy.
-=method _ddl_schema_produce_filename
+=head2 _ddl_schema_produce_filename
$dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] )
Returns a single file in which an initial schema will be stored.
-=method _ddl_schema_up_consume_filenames
+=head2 _ddl_schema_up_consume_filenames
$dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
upgrade.
-=method _ddl_schema_down_consume_filenames
+=head2 _ddl_schema_down_consume_filenames
$dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a
downgrade.
-=method _ddl_schema_up_produce_filenames
+=head2 _ddl_schema_up_produce_filenames
$dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to upgrade from one schema to another
will be stored.
-=method _ddl_schema_down_produce_filename
+=head2 _ddl_schema_down_produce_filename
$dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to downgrade from one schema to another
will be stored.
-=method _resultsource_install_filename
+=head2 _resultsource_install_filename
my $filename_fn = $dm->_resultsource_install_filename('User');
$dm->$filename_fn('SQLite', '1.00')
Returns a function which in turn returns a single filename used to install a
single resultsource. Weird interface is convenient for me. Deal with it.
-=method _run_sql_and_perl
+=head2 _run_sql_and_perl
$dm->_run_sql_and_perl([qw( list of filenames )])
Depending on L</txn_wrap> all of the files run will be wrapped in a single
transaction.
-=method _prepare_install
+=head2 _prepare_install
$dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' })
L<SQL::Translator> args and the second is a coderef that returns the filename
to store the sql in.
-=method _prepare_changegrade
+=head2 _prepare_changegrade
$dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up')
L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the
direction of the changegrade, be it 'up' or 'down'.
-=method _read_sql_file
+=head2 _read_sql_file
$dm->_read_sql_file('foo.sql')
Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments,
transactions, and blank lines.
+=end comment