use autodie;
use Carp qw( carp croak );
-use Log::Contextual::WarnLogger;
-use Log::Contextual qw(:log :dlog), -default_logger => Log::Contextual::WarnLogger->new({
- env_prefix => 'DBICDH'
-});
-use Data::Dumper::Concise;
+use DBIx::Class::DeploymentHandler::Logger;
+use Log::Contextual qw(:log :dlog), -default_logger =>
+ DBIx::Class::DeploymentHandler::Logger->new({
+ env_prefix => 'DBICDH'
+ });
use Method::Signatures::Simple;
use Try::Tiny;
with 'DBIx::Class::DeploymentHandler::HandlesDeploy';
+has ignore_ddl => (
+ isa => 'Bool',
+ is => 'ro',
+ default => undef,
+);
+
has schema => (
isa => 'DBIx::Class::Schema',
is => 'ro',
# this will probably never get called as the DBICDH
# will be passing down a schema_version normally, which
-# is built the same way
+# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
method __ddl_consume_with_prefix($type, $versions, $prefix) {
croak "neither $main or $generic exist; please write/generate some SQL";
}
- opendir my($dh), $dir;
- my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh;
- closedir $dh;
-
+ my %files;
+ try {
+ opendir my($dh), $dir;
+ %files =
+ map { $_ => "$dir/$_" }
+ grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" }
+ readdir $dh;
+ closedir $dh;
+ } catch {
+ die $_ unless $self->ignore_ddl;
+ };
if (-d $common) {
opendir my($dh), $common;
for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) {
$self->__ddl_consume_with_prefix($type, [ $version ], 'schema')
}
+method _ddl_protoschema_up_consume_filenames($versions) {
+ my $base_dir = $self->script_directory;
+
+ my $dir = catfile( $base_dir, '_protoschema', 'up', join q(-), @{$versions});
+
+ return [] unless -d $dir;
+
+ opendir my($dh), $dir;
+ my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh;
+ closedir $dh;
+
+ return [@files{sort keys %files}]
+}
+
+method _ddl_protoschema_down_consume_filenames($versions) {
+ my $base_dir = $self->script_directory;
+
+ my $dir = catfile( $base_dir, '_protoschema', 'down', join q(-), @{$versions});
+
+ return [] unless -d $dir;
+
+ opendir my($dh), $dir;
+ my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh;
+ closedir $dh;
+
+ return [@files{sort keys %files}]
+}
+
+method _ddl_protoschema_produce_filename($version) {
+ my $dirname = catfile( $self->script_directory, '_protoschema', 'schema', $version );
+ mkpath($dirname) unless -d $dirname;
+
+ return catfile( $dirname, '001-auto.yml' );
+}
+
method _ddl_schema_produce_filename($type, $version) {
my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
- return catfile( $dirname, '001-auto.sql'
- );
+ return catfile( $dirname, '001-auto.sql' );
}
method _ddl_schema_down_produce_filename($type, $versions, $dir) {
return catfile( $dirname, '001-auto.sql');
}
-method _run_sql_and_perl($filenames) {
- my @files = @{$filenames};
+method _run_sql_array($sql) {
my $storage = $self->storage;
+ $sql = [grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @$sql];
+
+ Dlog_trace { "Running SQL $_" } $sql;
+ foreach my $line (@{$sql}) {
+ $storage->_query_start($line);
+ # the whole reason we do this is so that we can see the line that was run
+ try {
+ $storage->dbh_do (sub { $_[1]->do($line) });
+ }
+ catch {
+ die "$_ (running line '$line')"
+ }
+ $storage->_query_end($line);
+ }
+ return join "\n", @$sql
+}
- my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+method _run_sql($filename) {
+ log_debug { "Running SQL from $filename" };
+ return $self->_run_sql_array($self->_read_sql_file($filename));
+}
- my $sql;
- for my $filename (@files) {
- if ($filename =~ /\.sql$/) {
- log_debug { "[DBICDH] Running SQL from $filename" };
- my @sql = @{$self->_read_sql_file($filename)};
- $sql .= join "\n", @sql;
- log_trace { "[DBICDH] Running SQL $sql" };
-
- foreach my $line (@sql) {
- $storage->_query_start($line);
- try {
- # do a dbh_do cycle here, as we need some error checking in
- # place (even though we will ignore errors)
- $storage->dbh_do (sub { $_[1]->do($line) });
- }
- catch {
- carp "$_ (running '${line}')"
- }
- $storage->_query_end($line);
- }
- } elsif ( $filename =~ /^(.+)\.pl$/ ) {
- log_debug { "[DBICDH] Running Perl from $filename" };
- my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
+method _run_perl($filename) {
+ log_debug { "Running Perl from $filename" };
+ my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
- my $fn = eval "$filedata";
- use warnings;
- log_trace { '[DBICDH] Running Perl ' . Dumper($fn) };
+ no warnings 'redefine';
+ my $fn = eval "$filedata";
+ use warnings;
+ Dlog_trace { "Running Perl $_" } $fn;
- if ($@) {
- carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
- $fn->($self->schema)
- } else {
- carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
- }
+ if ($@) {
+ carp "$filename failed to compile: $@";
+ } elsif (ref $fn eq 'CODE') {
+ $fn->($self->schema)
+ } else {
+ carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
+ }
+}
+
+method _run_sql_and_perl($filenames, $sql_to_run) {
+ my @files = @{$filenames};
+ my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+
+ $self->_run_sql_array($sql_to_run) if $self->ignore_ddl;
+
+ my $sql = ($sql_to_run)?join ";\n", @$sql_to_run:'';
+ FILENAME:
+ for my $filename (@files) {
+ if ($self->ignore_ddl && $filename =~ /^[^_]*-auto.*\.sql$/) {
+ next FILENAME
+ } elsif ($filename =~ /\.sql$/) {
+ $sql .= $self->_run_sql($filename)
+ } elsif ( $filename =~ /\.pl$/ ) {
+ $self->_run_perl($filename)
} else {
croak "A file ($filename) got to deploy that wasn't sql or perl!";
}
sub deploy {
my $self = shift;
my $version = (shift @_ || {})->{version} || $self->schema_version;
- log_info { "[DBICDH] deploying version $version" };
-
+ log_info { "deploying version $version" };
+ my $sqlt_type = $self->storage->sqlt_type;
+ my $sql;
+ if ($self->ignore_ddl) {
+ $sql = $self->_sql_from_yaml({},
+ '_ddl_protoschema_produce_filename', $sqlt_type
+ );
+ }
return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
- $self->storage->sqlt_type,
+ $sqlt_type,
$version,
- ));
+ ), $sql);
}
sub preinstall {
my $self = shift;
my $args = shift;
my $version = $args->{version} || $self->schema_version;
- log_info { "[DBICDH] preinstalling version $version" };
+ log_info { "preinstalling version $version" };
my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
my @files = @{$self->_ddl_preinstall_consume_filenames(
}
}
-sub _prepare_install {
- my $self = shift;
- my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
- my $to_file = shift;
- my $schema = $self->schema;
- my $databases = $self->databases;
+method _sqldiff_from_yaml($from_version, $to_version, $db, $direction) {
my $dir = $self->script_directory;
+ my $sqltargs = {
+ add_drop_table => 1,
+ ignore_constraint_names => 1,
+ ignore_index_names => 1,
+ %{$self->sql_translator_args}
+ };
+
+ my $source_schema;
+ {
+ my $prefilename = $self->_ddl_protoschema_produce_filename($from_version, $dir);
+
+ # should probably be a croak
+ carp("No previous schema file found ($prefilename)")
+ unless -e $prefilename;
+
+ my $t = SQL::Translator->new({
+ %{$sqltargs},
+ debug => 0,
+ trace => 0,
+ parser => 'SQL::Translator::Parser::YAML',
+ });
+
+ my $out = $t->translate( $prefilename )
+ or croak($t->error);
+
+ $source_schema = $t->schema;
+
+ $source_schema->name( $prefilename )
+ unless $source_schema->name;
+ }
+
+ my $dest_schema;
+ {
+ my $filename = $self->_ddl_protoschema_produce_filename($to_version, $dir);
+
+ # should probably be a croak
+ carp("No next schema file found ($filename)")
+ unless -e $filename;
+
+ my $t = SQL::Translator->new({
+ %{$sqltargs},
+ debug => 0,
+ trace => 0,
+ parser => 'SQL::Translator::Parser::YAML',
+ });
+
+ my $out = $t->translate( $filename )
+ or croak($t->error);
+
+ $dest_schema = $t->schema;
+
+ $dest_schema->name( $filename )
+ unless $dest_schema->name;
+ }
+
+ my $transform_files_method = "_ddl_protoschema_${direction}_consume_filenames";
+ my $transforms = $self->_coderefs_per_files(
+ $self->$transform_files_method([$from_version, $to_version])
+ );
+ $_->($source_schema, $dest_schema) for @$transforms;
+
+ return [SQL::Translator::Diff::schema_diff(
+ $source_schema, $db,
+ $dest_schema, $db,
+ $sqltargs
+ )];
+}
+
+method _sql_from_yaml($sqltargs, $from_file, $db) {
+ my $schema = $self->schema;
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
- add_drop_table => 1,
- ignore_constraint_names => 1,
- ignore_index_names => 1,
- parser => 'SQL::Translator::Parser::DBIx::Class',
- %{$sqltargs}
+ add_drop_table => 0,
+ parser => 'SQL::Translator::Parser::YAML',
+ %{$sqltargs},
+ producer => $db,
});
- my $sqlt_schema = $sqlt->translate( data => $schema )
- or croak($sqlt->error);
+ my $yaml_filename = $self->$from_file($version);
+
+ my @sql = $sqlt->translate($yaml_filename);
+ if(!@sql) {
+ carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
+ return undef;
+ }
+ return \@sql;
+}
+
+sub _prepare_install {
+ my $self = shift;
+ my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
+ my $from_file = shift;
+ my $to_file = shift;
+ my $dir = $self->script_directory;
+ my $databases = $self->databases;
+ my $version = $self->schema_version;
foreach my $db (@$databases) {
- $sqlt->reset;
- $sqlt->{schema} = $sqlt_schema;
- $sqlt->producer($db);
+ my $sql = $self->_sql_from_yaml($sqltargs, $from_file, $db ) or next;
my $filename = $self->$to_file($db, $version, $dir);
if (-e $filename ) {
carp "Overwriting existing DDL file - $filename";
unlink $filename;
}
-
- my $output = $sqlt->translate;
- if(!$output) {
- carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")");
- next;
- }
open my $file, q(>), $filename;
- print {$file} $output;
+ print {$file} join ";\n", @$sql;
close $file;
}
}
}
}
+sub _resultsource_protoschema_filename {
+ my ($self, $source_name) = @_;
+ return sub {
+ my ($self, $version) = @_;
+ my $dirname = catfile( $self->script_directory, '_protoschema', $version );
+ mkpath($dirname) unless -d $dirname;
+
+ return catfile( $dirname, "001-auto-$source_name.yml" );
+ }
+}
+
sub install_resultsource {
my ($self, $args) = @_;
my $source = $args->{result_source};
my $version = $args->{version};
- log_info { '[DBICDH] installing_resultsource ' . $source->source_name . ", version $version" };
+ log_info { 'installing_resultsource ' . $source->source_name . ", version $version" };
my $rs_install_file =
$self->_resultsource_install_filename($source->source_name);
sub prepare_resultsource_install {
my $self = shift;
my $source = (shift @_)->{result_source};
- log_info { '[DBICDH] preparing install for resultsource ' . $source->source_name };
+ log_info { 'preparing install for resultsource ' . $source->source_name };
- my $filename = $self->_resultsource_install_filename($source->source_name);
- $self->_prepare_install({
+ my $install_filename = $self->_resultsource_install_filename($source->source_name);
+ my $proto_filename = $self->_resultsource_protoschema_filename($source->source_name);
+ $self->prepare_protoschema({
parser_args => { sources => [$source->source_name], }
- }, $filename);
+ }, $proto_filename);
+ $self->_prepare_install({}, $proto_filename, $install_filename);
}
sub prepare_deploy {
- log_info { '[DBICDH] preparing deploy' };
+ log_info { 'preparing deploy' };
my $self = shift;
- $self->_prepare_install({}, '_ddl_schema_produce_filename');
+ $self->prepare_protoschema({}, '_ddl_protoschema_produce_filename');
+ $self->_prepare_install({}, '_ddl_protoschema_produce_filename', '_ddl_schema_produce_filename');
}
sub prepare_upgrade {
my ($self, $args) = @_;
log_info {
- '[DBICDH] preparing upgrade ' .
- "from $args->{from_version} to $args->{to_version}"
+ "preparing upgrade from $args->{from_version} to $args->{to_version}"
};
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
sub prepare_downgrade {
my ($self, $args) = @_;
log_info {
- '[DBICDH] preparing downgrade ' .
- "from $args->{from_version} to $args->{to_version}"
+ "preparing downgrade from $args->{from_version} to $args->{to_version}"
};
$self->_prepare_changegrade(
$args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
);
}
+method _coderefs_per_files($files) {
+ no warnings 'redefine';
+ [map eval do { local( @ARGV, $/ ) = $_; <> }, @$files]
+}
+
method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
my $schema = $self->schema;
my $databases = $self->databases;
my $dir = $self->script_directory;
- my $sqltargs = $self->sql_translator_args;
my $schema_version = $self->schema_version;
-
- $sqltargs = {
- add_drop_table => 1,
- ignore_constraint_names => 1,
- ignore_index_names => 1,
- %{$sqltargs}
- };
-
- my $sqlt = SQL::Translator->new( $sqltargs );
-
- $sqlt->parser('SQL::Translator::Parser::DBIx::Class');
- my $sqlt_schema = $sqlt->translate( data => $schema )
- or croak($sqlt->error);
-
+ my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
foreach my $db (@$databases) {
- $sqlt->reset;
- $sqlt->{schema} = $sqlt_schema;
- $sqlt->producer($db);
-
- my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir);
- unless(-e $prefilename) {
- carp("No previous schema file found ($prefilename)");
- next;
- }
- my $diff_file_method = "_ddl_schema_${direction}_produce_filename";
my $diff_file = $self->$diff_file_method($db, $version_set, $dir );
if(-e $diff_file) {
carp("Overwriting existing $direction-diff file - $diff_file");
unlink $diff_file;
}
- my $source_schema;
- {
- my $t = SQL::Translator->new({
- %{$sqltargs},
- debug => 0,
- trace => 0,
- });
-
- $t->parser( $db ) # could this really throw an exception?
- or croak($t->error);
-
- my $out = $t->translate( $prefilename )
- or croak($t->error);
-
- $source_schema = $t->schema;
-
- $source_schema->name( $prefilename )
- unless $source_schema->name;
- }
-
- # The "new" style of producers have sane normalization and can support
- # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't
- # And we have to diff parsed SQL against parsed SQL.
- my $dest_schema = $sqlt_schema;
-
- unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) {
- my $t = SQL::Translator->new({
- %{$sqltargs},
- debug => 0,
- trace => 0,
- });
-
- $t->parser( $db ) # could this really throw an exception?
- or croak($t->error);
-
- my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir);
- my $out = $t->translate( $filename )
- or croak($t->error);
-
- $dest_schema = $t->schema;
-
- $dest_schema->name( $filename )
- unless $dest_schema->name;
- }
-
- my $diff = SQL::Translator::Diff::schema_diff(
- $source_schema, $db,
- $dest_schema, $db,
- $sqltargs
- );
open my $file, q(>), $diff_file;
- print {$file} $diff;
+ print {$file} join ";\n", @{$self->_sqldiff_from_yaml($from_version, $to_version, $db, $direction)};
close $file;
}
}
sub downgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
- log_info { qq([DBICDH] downgrade_single_step'ing ) . Dumper($version_set) };
-
+ Dlog_info { "downgrade_single_step'ing $_" } $version_set;
+
+ my $sqlt_type = $self->storage->sqlt_type;
+ my $sql_to_run;
+ if ($self->ignore_ddl) {
+ $sql_to_run = $self->_sqldiff_from_yaml(
+ $version_set->[0], $version_set->[1], $sqlt_type, 'down',
+ );
+ }
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
- $self->storage->sqlt_type,
+ $sqlt_type,
$version_set,
- ));
+ ), $sql_to_run);
return ['', $sql];
}
sub upgrade_single_step {
my $self = shift;
my $version_set = (shift @_)->{version_set};
- log_info { qq([DBICDH] upgrade_single_step'ing ) . Dumper($version_set) };
-
+ Dlog_info { "upgrade_single_step'ing $_" } $version_set;
+
+ my $sqlt_type = $self->storage->sqlt_type;
+ my $sql_to_run;
+ if ($self->ignore_ddl) {
+ $sql_to_run = $self->_sqldiff_from_yaml(
+ $version_set->[0], $version_set->[1], $sqlt_type, 'up',
+ );
+ }
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
- $self->storage->sqlt_type,
+ $sqlt_type,
$version_set,
- ));
+ ), $sql_to_run);
return ['', $sql];
}
+sub prepare_protoschema {
+ my $self = shift;
+ my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} };
+ my $to_file = shift;
+ my $filename
+ = $self->$to_file($self->schema_version);
+
+ # we do this because the code that uses this sets parser args,
+ # so we just need to merge in the package
+ $sqltargs->{parser_args}{package} = $self->schema;
+ my $sqlt = SQL::Translator->new({
+ parser => 'SQL::Translator::Parser::DBIx::Class',
+ producer => 'SQL::Translator::Producer::YAML',
+ %{ $sqltargs },
+ });
+
+ my $yml = $sqlt->translate;
+
+ croak("Failed to translate to YAML: " . $sqlt->error)
+ unless $yml;
+
+ if (-e $filename ) {
+ carp "Overwriting existing DDL-YML file - $filename";
+ unlink $filename;
+ }
+
+ open my $file, q(>), $filename;
+ print {$file} $yml;
+ close $file;
+}
+
__PACKAGE__->meta->make_immutable;
1;
=head1 DESCRIPTION
-This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care of
-generating sql files representing schemata as well as sql files to move from
-one version of a schema to the rest. One of the hallmark features of this
-class is that it allows for multiple sql files for deploy and upgrade, allowing
-developers to fine tune deployment. In addition it also allows for perl files
-to be run at any stage of the process.
+This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care
+of generating serialized schemata as well as sql files to move from one
+version of a schema to the rest. One of the hallmark features of this class
+is that it allows for multiple sql files for deploy and upgrade, allowing
+developers to fine tune deployment. In addition it also allows for perl
+files to be run at any stage of the process.
For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
documented here is extra fun stuff or private methods.
like the best way to describe the layout is with the following example:
$sql_migration_dir
+ |- _protoschema
+ | |- schema
+ | |- 1
+ | | `- 001-auto.yml
+ | |- 2
+ | | `- 001-auto.yml
+ | `- 3
+ | `- 001-auto.yml
|- SQLite
| |- down
| | `- 2-1
would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by
C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
-Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of
-the time it probably should be, since perl scripts will mostly be database
-independent.
+C<.pl> files don't have to be in the C<_common> directory, but most of the time
+they should be, because perl scripts are generally be database independent.
C<_generic> exists for when you for some reason are sure that your SQL is
generic enough to run on all databases. Good luck with that one.
of preinstall is to have it prompt for username and password, and then call the
appropriate C<< CREATE DATABASE >> commands etc.
+=head2 Directory Specification
+
+The following subdirectories are recognized by this DeployMethod:
+
+=over 2
+
+=item C<_protoschema> This directory can contain the following directories:
+
+=over 2
+
+=item C<down> This directory merely contains directories named after
+migrations, which are of the form C<$from_version-$to_version>. Inside of
+these directories you may put Perl scripts which are to return a subref
+that takes the arguments C<< $from_schema, $to_schema >>, which are
+L<SQL::Translator::Schema> objects.
+
+=item C<up> This directory merely contains directories named after
+migrations, which are of the form C<$from_version-$to_version>. Inside of
+these directories you may put Perl scripts which are to return a subref
+that takes the arguments C<< $from_schema, $to_schema >>, which are
+L<SQL::Translator::Schema> objects.
+
+=item C<schema> This directory merely contains directories named after schema
+versions, which in turn contain C<yaml> files that are serialized versions
+of the schema at that version. These files are not for editing by hand.
+
+=back
+
+=item C<$storage_type> This is a set of scripts that gets run depending on
+what your storage type is. If you are not sure what your storage type is,
+take a look at the producers listed for L<SQL::Translator>. Also note,
+C<_generic> and C<_common> are special cases. C<_generic> will get run if
+there is no directory for your given storage, and C<_common> will get merged
+into whatever other files (C<_generic> or your storage type) you already have.
+This directory can containt the following directories itself:
+
+=over 2
+
+=item C<preinstall> Gets run before the C<schema> is C<deploy>ed. Has the
+same structure as the C<schema> subdirectory as well; that is, it has a
+directory for each schema version. Unlike C<schema>, C<up>, and C<down>
+though, it can only run C<.pl> files, and the coderef in the perl files get
+no arguments passed to them.
+
+=item C<schema> Gets run when the schema is C<deploy>ed. Structure is a
+directory per schema version, and then files are merged with C<_common> and run
+in filename order. C<.sql> files are merely run, as expected. C<.pl> files are
+run according to L</PERL SCRIPTS>.
+
+=item C<up> Gets run when the schema is C<upgrade>d. Structure is a directory
+per upgrade step, (for example, C<1-2> for upgrading from version 1 to version
+2,) and then files are merged with C<_common> and run in filename order.
+C<.sql> files are merely run, as expected. C<.pl> files are run according
+to L</PERL SCRIPTS>.
+
+=item C<down> Gets run when the schema is C<downgrade>d. Structure is a directory
+per downgrade step, (for example, C<2-1> for downgrading from version 2 to version
+1,) and then files are merged with C<_common> and run in filename order.
+C<.sql> files are merely run, as expected. C<.pl> files are run according
+to L</PERL SCRIPTS>.
+
+
+=back
+
+=back
+
=head1 PERL SCRIPTS
A perl script for this tool is very simple. It merely needs to contain an