# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
+has _json => (
+ is => 'ro',
+ lazy_build => 1,
+);
+
+sub _build__json { require JSON; JSON->new->pretty }
+
method __ddl_consume_with_prefix($type, $versions, $prefix) {
my $base_dir = $self->script_directory;
my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql' );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_up_consume_filenames($type, $versions) {
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql'
- );
+ return catfile( $dirname, '001-auto.sql-json' );
}
method _ddl_schema_down_produce_filename($type, $versions, $dir) {
my $dirname = catfile( $dir, $type, 'down', join q(-), @{$versions} );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql');
+ return catfile( $dirname, '001-auto.sql-json');
}
method _run_sql_array($sql) {
my $storage = $self->storage;
- my $ret = join "\n", @$sql;
- log_trace { "[DBICDH] Running SQL $sql" };
+ $sql = [grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @$sql];
+
+ log_trace { '[DBICDH] Running SQL ' . Dumper($sql) };
foreach my $line (@{$sql}) {
$storage->_query_start($line);
try {
}
$storage->_query_end($line);
}
- return $ret
+ return join "\n", @$sql
}
method _run_sql($filename) {
carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
}
}
-{
- my $json;
-
- method _run_serialized_sql($filename, $type) {
- if ($type eq 'json') {
- require JSON;
- $json ||= JSON->new->pretty;
- my @sql = @{$json->decode($filename)};
- } else {
- croak "A file ($filename) got to deploy that wasn't sql or perl!";
- }
- }
+method _run_serialized_sql($filename, $type) {
+ if (lc $type eq 'json') {
+ return $self->_run_sql_array($self->_json->decode(
+ do { local( @ARGV, $/ ) = $filename; <> } # slurp
+ ))
+ } else {
+ croak "$type is not one of the supported serialzed types"
+ }
}
method _run_sql_and_perl($filenames) {
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
+ no_comments => 1,
add_drop_table => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
unlink $filename;
}
- my $output = $sqlt->translate;
- if(!$output) {
+ my $sql = $self->_generate_final_sql($sqlt);
+ if(!$sql) {
carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
next;
}
open my $file, q(>), $filename;
- print {$file} $output;
+ print {$file} $sql;
close $file;
}
}
+method _generate_final_sql($sqlt) {
+ my @output = $sqlt->translate;
+ $self->_json->encode(\@output);
+}
+
sub _resultsource_install_filename {
my ($self, $source_name) = @_;
return sub {
my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, "001-auto-$source_name.sql" );
+ return catfile( $dirname, "001-auto-$source_name.sql-json" );
}
}
$sqltargs = {
add_drop_table => 1,
+ no_comments => 1,
ignore_constraint_names => 1,
ignore_index_names => 1,
%{$sqltargs}
$t->parser( $db ) # could this really throw an exception?
or croak($t->error);
- my $out = $t->translate( $prefilename )
+ my $sql = $self->_default_read_sql_file_as_string($prefilename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$source_schema = $t->schema;
or croak($t->error);
my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
- my $out = $t->translate( $filename )
+ my $sql = $self->_default_read_sql_file_as_string($filename);
+ my $out = $t->translate( \$sql )
or croak($t->error);
$dest_schema = $t->schema;
unless $dest_schema->name;
}
- my $diff = SQL::Translator::Diff::schema_diff(
- $source_schema, $db,
- $dest_schema, $db,
- $sqltargs
- );
open my $file, q(>), $diff_file;
- print {$file} $diff;
+ print {$file}
+ $self->_generate_final_diff($source_schema, $dest_schema, $db, $sqltargs);
close $file;
}
}
+method _generate_final_diff($source_schema, $dest_schema, $db, $sqltargs) {
+ $self->_json->encode([
+ SQL::Translator::Diff::schema_diff(
+ $source_schema, $db,
+ $dest_schema, $db,
+ $sqltargs
+ )
+ ])
+}
+
method _read_sql_file($file) {
return unless $file;
my @data = split /;\n/, join '', <$fh>;
close $fh;
- @data = grep {
- $_ && # remove blank lines
- !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
- } map {
- s/^\s+//; s/\s+$//; # trim whitespace
- join '', grep { !/^--/ } split /\n/ # remove comments
- } @data;
-
return \@data;
}
+method _default_read_sql_file_as_string($file) {
+ return join q(), map "$_;\n", @{$self->_json->decode(
+ do { local( @ARGV, $/ ) = $file; <> } # slurp
+ )};
+}
+
sub downgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};