use autodie;
use Carp qw( carp croak );
+use DBIx::Class::DeploymentHandler::Logger;
+use Log::Contextual qw(:log :dlog), -default_logger =>
+ DBIx::Class::DeploymentHandler::Logger->new({
+ env_prefix => 'DBICDH'
+ });
use Method::Signatures::Simple;
use Try::Tiny;
is => 'ro',
default => sub { {} },
);
-has upgrade_directory => (
+has script_directory => (
isa => 'Str',
is => 'ro',
required => 1,
has schema_version => (
is => 'ro',
+ isa => 'Str',
lazy_build => 1,
);
+# this will probably never get called as the DBICDH
+# will be passing down a schema_version normally, which
+# is built the same way, but we leave this in place
method _build_schema_version { $self->schema->schema_version }
method __ddl_consume_with_prefix($type, $versions, $prefix) {
- my $base_dir = $self->upgrade_directory;
+ my $base_dir = $self->script_directory;
my $main = catfile( $base_dir, $type );
my $generic = catfile( $base_dir, '_generic' );
}
opendir my($dh), $dir;
- my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh;
+ my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } readdir $dh;
closedir $dh;
if (-d $common) {
}
method _ddl_schema_produce_filename($type, $version) {
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
return catfile( $dirname, '001-auto.sql' );
}
method _ddl_schema_up_produce_filename($type, $versions) {
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions});
mkpath($dirname) unless -d $dirname;
return catfile( $dirname, '001-auto.sql');
}
-method _run_sql_and_perl($filenames) {
- my @files = @{$filenames};
+method _run_sql_array($sql) {
my $storage = $self->storage;
+ $sql = [grep {
+ $_ && # remove blank lines
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's
+ } map {
+ s/^\s+//; s/\s+$//; # trim whitespace
+ join '', grep { !/^--/ } split /\n/ # remove comments
+ } @$sql];
+
+ Dlog_trace { "Running SQL $_" } $sql;
+ foreach my $line (@{$sql}) {
+ $storage->_query_start($line);
+ # the whole reason we do this is so that we can see the line that was run
+ try {
+ $storage->dbh_do (sub { $_[1]->do($line) });
+ }
+ catch {
+ die "$_ (running line '$line')"
+ }
+ $storage->_query_end($line);
+ }
+ return join "\n", @$sql
+}
- my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+method _run_sql($filename) {
+ log_debug { "Running SQL from $filename" };
+ return $self->_run_sql_array($self->_read_sql_file($filename));
+}
- my $sql;
- for my $filename (@files) {
- if ($filename =~ /\.sql$/) {
- my @sql = @{$self->_read_sql_file($filename)};
- $sql .= join "\n", @sql;
-
- foreach my $line (@sql) {
- $storage->_query_start($line);
- try {
- # do a dbh_do cycle here, as we need some error checking in
- # place (even though we will ignore errors)
- $storage->dbh_do (sub { $_[1]->do($line) });
- }
- catch {
- carp "$_ (running '${line}')"
- }
- $storage->_query_end($line);
- }
- } elsif ( $filename =~ /^(.+)\.pl$/ ) {
- my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
+method _run_perl($filename) {
+ log_debug { "Running Perl from $filename" };
+ my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
- my $fn = eval "$filedata";
- use warnings;
+ no warnings 'redefine';
+ my $fn = eval "$filedata";
+ use warnings;
+ Dlog_trace { "Running Perl $_" } $fn;
- if ($@) {
- carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
- $fn->($self->schema)
+ if ($@) {
+ carp "$filename failed to compile: $@";
+ } elsif (ref $fn eq 'CODE') {
+ $fn->($self->schema)
+ } else {
+ carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
+ }
+}
+{
+ my $json;
+
+ method _run_serialized_sql($filename, $type) {
+ if ($type eq 'json') {
+ require JSON;
+ $json ||= JSON->new->pretty;
+ my @sql = @{$json->decode($filename)};
} else {
- carp "$filename should define an anonymouse sub that takes a schema but it didn't!";
+ croak "A file ($filename) got to deploy that wasn't sql or perl!";
}
+ }
+
+}
+
+method _run_sql_and_perl($filenames) {
+ my @files = @{$filenames};
+ my $guard = $self->schema->txn_scope_guard if $self->txn_wrap;
+
+ my $sql = '';
+ for my $filename (@files) {
+ if ($filename =~ /\.sql$/) {
+ $sql .= $self->_run_sql($filename)
+ } elsif ( $filename =~ /\.sql-(\w+)$/ ) {
+ $sql .= $self->_run_serialized_sql($filename, $1)
+ } elsif ( $filename =~ /\.pl$/ ) {
+ $self->_run_perl($filename)
} else {
croak "A file ($filename) got to deploy that wasn't sql or perl!";
}
sub deploy {
my $self = shift;
- my $version = shift || $self->schema_version;
+ my $version = (shift @_ || {})->{version} || $self->schema_version;
+ log_info { "deploying version $version" };
return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames(
$self->storage->sqlt_type,
));
}
-sub preinstall_scripts {
- my $self = shift;
- my $version = shift || $self->schema_version;
+sub preinstall {
+ my $self = shift;
+ my $args = shift;
+ my $version = $args->{version} || $self->schema_version;
+ log_info { "preinstalling version $version" };
+ my $storage_type = $args->{storage_type} || $self->storage->sqlt_type;
my @files = @{$self->_ddl_preinstall_consume_filenames(
- $self->storage->sqlt_type,
+ $storage_type,
$version,
)};
if ( $filename =~ /^(.+)\.pl$/ ) {
my $filedata = do { local( @ARGV, $/ ) = $filename; <> };
- no warnings 'redefine';
+ no warnings 'redefine';
my $fn = eval "$filedata";
use warnings;
- if ($@) {
+ if ($@) {
carp "$filename failed to compile: $@";
- } elsif (ref $fn eq 'CODE') {
+ } elsif (ref $fn eq 'CODE') {
$fn->()
} else {
carp "$filename should define an anonymous sub but it didn't!";
my $to_file = shift;
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $version = $self->schema_version;
my $sqlt = SQL::Translator->new({
my ($self, $source_name) = @_;
return sub {
my ($self, $type, $version) = @_;
- my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version );
+ my $dirname = catfile( $self->script_directory, $type, 'schema', $version );
mkpath($dirname) unless -d $dirname;
return catfile( $dirname, "001-auto-$source_name.sql" );
}
sub install_resultsource {
- my ($self, $source, $version) = @_;
-
+ my ($self, $args) = @_;
+ my $source = $args->{result_source};
+ my $version = $args->{version};
+ log_info { 'installing_resultsource ' . $source->source_name . ", version $version" };
my $rs_install_file =
$self->_resultsource_install_filename($source->source_name);
sub prepare_resultsource_install {
my $self = shift;
- my $source = shift;
+ my $source = (shift @_)->{result_source};
+ log_info { 'preparing install for resultsource ' . $source->source_name };
my $filename = $self->_resultsource_install_filename($source->source_name);
$self->_prepare_install({
}
sub prepare_deploy {
+ log_info { 'preparing deploy' };
my $self = shift;
$self->_prepare_install({}, '_ddl_schema_produce_filename');
}
sub prepare_upgrade {
- my ($self, $from_version, $to_version, $version_set) = @_;
- $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up');
+ my ($self, $args) = @_;
+ log_info {
+ "preparing upgrade from $args->{from_version} to $args->{to_version}"
+ };
+ $self->_prepare_changegrade(
+ $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up'
+ );
}
sub prepare_downgrade {
- my ($self, $from_version, $to_version, $version_set) = @_;
-
- $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down');
+ my ($self, $args) = @_;
+ log_info {
+ "preparing downgrade from $args->{from_version} to $args->{to_version}"
+ };
+ $self->_prepare_changegrade(
+ $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down'
+ );
}
method _prepare_changegrade($from_version, $to_version, $version_set, $direction) {
my $schema = $self->schema;
my $databases = $self->databases;
- my $dir = $self->upgrade_directory;
+ my $dir = $self->script_directory;
my $sqltargs = $self->sql_translator_args;
my $schema_version = $self->schema_version;
sub downgrade_single_step {
my $self = shift;
- my $version_set = shift @_;
+ my $version_set = (shift @_)->{version_set};
+ Dlog_info { "downgrade_single_step'ing $_" } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames(
$self->storage->sqlt_type,
sub upgrade_single_step {
my $self = shift;
- my $version_set = shift @_;
+ my $version_set = (shift @_)->{version_set};
+ Dlog_info { "upgrade_single_step'ing $_" } $version_set;
my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames(
$self->storage->sqlt_type,
=head1 DESCRIPTION
-This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes care of
-generating sql files representing schemata as well as sql files to move from
-one version of a schema to the rest. One of the hallmark features of this
-class is that it allows for multiple sql files for deploy and upgrade, allowing
-developers to fine tune deployment. In addition it also allows for perl files
-to be run at any stage of the process.
+This class is the meat of L<DBIx::Class::DeploymentHandler>. It takes
+care of generating serialized sql files representing schemata as well
+as serialized sql files to move from one version of a schema to the rest.
+One of the hallmark features of this class is that it allows for multiple sql
+files for deploy and upgrade, allowing developers to fine tune deployment.
+In addition it also allows for perl files to be run
+at any stage of the process.
For basic usage see L<DBIx::Class::DeploymentHandler::HandlesDeploy>. What's
documented here is extra fun stuff or private methods.
$sql_migration_dir
|- SQLite
| |- down
- | | `- 1-2
- | | `- 001-auto.sql
+ | | `- 2-1
+ | | `- 001-auto.sql-json
| |- schema
| | `- 1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- up
| |- 1-2
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- 2-3
- | `- 001-auto.sql
+ | `- 001-auto.sql-json
|- _common
| |- down
- | | `- 1-2
+ | | `- 2-1
| | `- 002-remove-customers.pl
| `- up
| `- 1-2
| `- 002-generate-customers.pl
|- _generic
| |- down
- | | `- 1-2
- | | `- 001-auto.sql
+ | | `- 2-1
+ | | `- 001-auto.sql-json
| |- schema
| | `- 1
- | | `- 001-auto.sql
+ | | `- 001-auto.sql-json
| `- up
| `- 1-2
- | |- 001-auto.sql
+ | |- 001-auto.sql-json
| `- 002-create-stored-procedures.sql
`- MySQL
|- down
- | `- 1-2
- | `- 001-auto.sql
+ | `- 2-1
+ | `- 001-auto.sql-json
+ |- preinstall
+ | `- 1
+ | |- 001-create_database.pl
+ | `- 002-create_users_and_permissions.pl
|- schema
| `- 1
- | `- 001-auto.sql
+ | `- 001-auto.sql-json
`- up
`- 1-2
- `- 001-auto.sql
+ `- 001-auto.sql-json
So basically, the code
$dm->deploy(1)
on an C<SQLite> database that would simply run
-C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next,
+C<$sql_migration_dir/SQLite/schema/1/001-auto.sql-json>. Next,
$dm->upgrade_single_step([1,2])
-would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by
+would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql-json> followed by
C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>.
-Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of
-the time it probably should be, since perl scripts will mostly be database
-independent.
+C<.pl> files don't have to be in the C<_common> directory, but most of the time
+they should be, because perl scripts are generally be database independent.
C<_generic> exists for when you for some reason are sure that your SQL is
generic enough to run on all databases. Good luck with that one.
+Note that unlike most steps in the process, C<preinstall> will not run SQL, as
+there may not even be an database at preinstall time. It will run perl scripts
+just like the other steps in the process, but nothing is passed to them.
+Until people have used this more it will remain freeform, but a recommended use
+of preinstall is to have it prompt for username and password, and then call the
+appropriate C<< CREATE DATABASE >> commands etc.
+
+=head1 SERIALIZED SQL
+
+The SQL that this module generates and uses is serialized into an array of
+SQL statements. The reason being that some databases handle multiple
+statements in a single execution differently. Generally you do not need to
+worry about this as these are scripts generated for you. If you find that
+you are editing them on a regular basis something is wrong and you either need
+to submit a bug or consider writing extra serialized SQL or Perl scripts to run
+before or after the automatically generated script.
+
+B<NOTE:> Currently the SQL is serialized into JSON. I am willing to merge in
+patches that will allow more serialization formats if you want that feature,
+but if you do send me a patch for that realize that I do not want to add YAML
+support or whatever, I would rather add a generic method of adding any
+serialization format.
+
=head1 PERL SCRIPTS
-A perl script for this tool is very simple. It merely needs to contain a
-sub called C<run> that takes a L<DBIx::Class::Schema> as it's only argument.
+A perl script for this tool is very simple. It merely needs to contain an
+anonymous sub that takes a L<DBIx::Class::Schema> as it's only argument.
A very basic perl script might look like:
#!perl
use strict;
use warnings;
- sub run {
+ sub {
my $schema = shift;
$schema->resultset('Users')->create({
The arguments that get passed to L<SQL::Translator> when it's used.
-=attr upgrade_directory
+=attr script_directory
-The directory (default C<'sql'>) that upgrades are stored in
+The directory (default C<'sql'>) that scripts are stored in
=attr databases
The version the schema on your harddrive is at. Defaults to
C<< $self->schema->schema_version >>.
-=method __ddl_consume_with_prefix
+=begin comment
+
+=head2 __ddl_consume_with_prefix
$dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' )
files in the order that they should be run for a generic "type" of upgrade.
You should not be calling this in user code.
-=method _ddl_schema_consume_filenames
+=head2 _ddl_schema_consume_filenames
$dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
initial deploy.
-=method _ddl_schema_produce_filename
+=head2 _ddl_schema_produce_filename
$dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] )
Returns a single file in which an initial schema will be stored.
-=method _ddl_schema_up_consume_filenames
+=head2 _ddl_schema_up_consume_filenames
$dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for an
upgrade.
-=method _ddl_schema_down_consume_filenames
+=head2 _ddl_schema_down_consume_filenames
$dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] )
Just a curried L</__ddl_consume_with_prefix>. Get's a list of files for a
downgrade.
-=method _ddl_schema_up_produce_filenames
+=head2 _ddl_schema_up_produce_filenames
$dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to upgrade from one schema to another
will be stored.
-=method _ddl_schema_down_produce_filename
+=head2 _ddl_schema_down_produce_filename
$dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] )
Returns a single file in which the sql to downgrade from one schema to another
will be stored.
-=method _resultsource_install_filename
+=head2 _resultsource_install_filename
my $filename_fn = $dm->_resultsource_install_filename('User');
$dm->$filename_fn('SQLite', '1.00')
Returns a function which in turn returns a single filename used to install a
single resultsource. Weird interface is convenient for me. Deal with it.
-=method _run_sql_and_perl
+=head2 _run_sql_and_perl
$dm->_run_sql_and_perl([qw( list of filenames )])
Depending on L</txn_wrap> all of the files run will be wrapped in a single
transaction.
-=method _prepare_install
+=head2 _prepare_install
$dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' })
L<SQL::Translator> args and the second is a coderef that returns the filename
to store the sql in.
-=method _prepare_changegrade
+=head2 _prepare_changegrade
$dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up')
L<version set|DBIx::Class::DeploymentHandler/VERSION SET>, and last is the
direction of the changegrade, be it 'up' or 'down'.
-=method _read_sql_file
+=head2 _read_sql_file
$dm->_read_sql_file('foo.sql')
Reads a sql file and returns lines in an C<ArrayRef>. Strips out comments,
transactions, and blank lines.
+=end comment