X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?p=dbsrgits%2FDBIx-Class-DeploymentHandler.git;a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FDeploymentHandler%2FDeployMethod%2FSQL%2FTranslator.pm;h=3dc9d9be94cece03e4c067177e2661978750335c;hp=c56243ba4be0adf102bdc0d906a3de90250b9890;hb=f9c6ab503d63cc70fa884cadb7ed5f105f1a7bc8;hpb=80ff6f6de5995403431369d67fc2e441c134a4b4 diff --git a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm index c56243b..3dc9d9b 100644 --- a/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm +++ b/lib/DBIx/Class/DeploymentHandler/DeployMethod/SQL/Translator.pm @@ -5,6 +5,11 @@ use Moose; use autodie; use Carp qw( carp croak ); +use DBIx::Class::DeploymentHandler::Logger; +use Log::Contextual qw(:log :dlog), -default_logger => + DBIx::Class::DeploymentHandler::Logger->new({ + env_prefix => 'DBICDH' + }); use Method::Signatures::Simple; use Try::Tiny; @@ -20,6 +25,12 @@ use File::Spec::Functions; with 'DBIx::Class::DeploymentHandler::HandlesDeploy'; +has ignore_ddl => ( + isa => 'Bool', + is => 'ro', + default => undef, +); + has schema => ( isa => 'DBIx::Class::Schema', is => 'ro', @@ -43,7 +54,7 @@ has sql_translator_args => ( is => 'ro', default => sub { {} }, ); -has upgrade_directory => ( +has script_directory => ( isa => 'Str', is => 'ro', required => 1, @@ -65,13 +76,17 @@ has txn_wrap => ( has schema_version => ( is => 'ro', + isa => 'Str', lazy_build => 1, ); +# this will probably never get called as the DBICDH +# will be passing down a schema_version normally, which +# is built the same way, but we leave this in place method _build_schema_version { $self->schema->schema_version } method __ddl_consume_with_prefix($type, $versions, $prefix) { - my $base_dir = $self->upgrade_directory; + my $base_dir = $self->script_directory; my $main = catfile( $base_dir, $type ); my $generic = catfile( $base_dir, '_generic' ); @@ -87,10 +102,17 @@ method __ddl_consume_with_prefix($type, $versions, $prefix) { croak "neither $main or $generic exist; please write/generate some SQL"; } - opendir my($dh), $dir; - my %files = map { $_ => "$dir/$_" } grep { /\.(?:sql|pl)$/ && -f "$dir/$_" } readdir $dh; - closedir $dh; - + my %files; + try { + opendir my($dh), $dir; + %files = + map { $_ => "$dir/$_" } + grep { /\.(?:sql|pl|sql-\w+)$/ && -f "$dir/$_" } + readdir $dh; + closedir $dh; + } catch { + die $_ unless $self->ignore_ddl; + }; if (-d $common) { opendir my($dh), $common; for my $filename (grep { /\.(?:sql|pl)$/ && -f catfile($common,$_) } readdir $dh) { @@ -112,8 +134,43 @@ method _ddl_schema_consume_filenames($type, $version) { $self->__ddl_consume_with_prefix($type, [ $version ], 'schema') } +method _ddl_protoschema_up_consume_filenames($versions) { + my $base_dir = $self->script_directory; + + my $dir = catfile( $base_dir, '_protoschema', 'up', join q(-), @{$versions}); + + return [] unless -d $dir; + + opendir my($dh), $dir; + my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; + closedir $dh; + + return [@files{sort keys %files}] +} + +method _ddl_protoschema_down_consume_filenames($versions) { + my $base_dir = $self->script_directory; + + my $dir = catfile( $base_dir, '_protoschema', 'down', join q(-), @{$versions}); + + return [] unless -d $dir; + + opendir my($dh), $dir; + my %files = map { $_ => "$dir/$_" } grep { /\.pl$/ && -f "$dir/$_" } readdir $dh; + closedir $dh; + + return [@files{sort keys %files}] +} + +method _ddl_protoschema_produce_filename($version) { + my $dirname = catfile( $self->script_directory, '_protoschema', 'schema', $version ); + mkpath($dirname) unless -d $dirname; + + return catfile( $dirname, '001-auto.yml' ); +} + method _ddl_schema_produce_filename($type, $version) { - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); + my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; return catfile( $dirname, '001-auto.sql' ); @@ -128,13 +185,12 @@ method _ddl_schema_down_consume_filenames($type, $versions) { } method _ddl_schema_up_produce_filename($type, $versions) { - my $dir = $self->upgrade_directory; + my $dir = $self->script_directory; my $dirname = catfile( $dir, $type, 'up', join q(-), @{$versions}); mkpath($dirname) unless -d $dirname; - return catfile( $dirname, '001-auto.sql' - ); + return catfile( $dirname, '001-auto.sql' ); } method _ddl_schema_down_produce_filename($type, $versions, $dir) { @@ -144,45 +200,70 @@ method _ddl_schema_down_produce_filename($type, $versions, $dir) { return catfile( $dirname, '001-auto.sql'); } -method _run_sql_and_perl($filenames) { - my @files = @{$filenames}; +method _run_sql_array($sql) { my $storage = $self->storage; + $sql = [grep { + $_ && # remove blank lines + !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/ # strip txn's + } map { + s/^\s+//; s/\s+$//; # trim whitespace + join '', grep { !/^--/ } split /\n/ # remove comments + } @$sql]; + + Dlog_trace { "Running SQL $_" } $sql; + foreach my $line (@{$sql}) { + $storage->_query_start($line); + # the whole reason we do this is so that we can see the line that was run + try { + $storage->dbh_do (sub { $_[1]->do($line) }); + } + catch { + die "$_ (running line '$line')" + } + $storage->_query_end($line); + } + return join "\n", @$sql +} + +method _run_sql($filename) { + log_debug { "Running SQL from $filename" }; + return $self->_run_sql_array($self->_read_sql_file($filename)); +} - my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; +method _run_perl($filename) { + log_debug { "Running Perl from $filename" }; + my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - my $sql; - for my $filename (@files) { - if ($filename =~ /\.sql$/) { - my @sql = @{$self->_read_sql_file($filename)}; - $sql .= join "\n", @sql; - - foreach my $line (@sql) { - $storage->_query_start($line); - try { - # do a dbh_do cycle here, as we need some error checking in - # place (even though we will ignore errors) - $storage->dbh_do (sub { $_[1]->do($line) }); - } - catch { - carp "$_ (running '${line}')" - } - $storage->_query_end($line); - } - } elsif ( $filename =~ /^(.+)\.pl$/ ) { - my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; + no warnings 'redefine'; + my $fn = eval "$filedata"; + use warnings; + Dlog_trace { "Running Perl $_" } $fn; - no warnings 'redefine'; - my $fn = eval "$filedata"; - use warnings; + if ($@) { + carp "$filename failed to compile: $@"; + } elsif (ref $fn eq 'CODE') { + $fn->($self->schema) + } else { + carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; + } +} - if ($@) { - carp "$filename failed to compile: $@"; - } elsif (ref $fn eq 'CODE') { - $fn->($self->schema) - } else { - carp "$filename should define an anonymouse sub that takes a schema but it didn't!"; - } +method _run_sql_and_perl($filenames, $sql_to_run) { + my @files = @{$filenames}; + my $guard = $self->schema->txn_scope_guard if $self->txn_wrap; + + $self->_run_sql_array($sql_to_run) if $self->ignore_ddl; + + my $sql = ($sql_to_run)?join ";\n", @$sql_to_run:''; + FILENAME: + for my $filename (@files) { + if ($self->ignore_ddl && $filename =~ /^[^_]*-auto.*\.sql$/) { + next FILENAME + } elsif ($filename =~ /\.sql$/) { + $sql .= $self->_run_sql($filename) + } elsif ( $filename =~ /\.pl$/ ) { + $self->_run_perl($filename) } else { croak "A file ($filename) got to deploy that wasn't sql or perl!"; } @@ -195,20 +276,30 @@ method _run_sql_and_perl($filenames) { sub deploy { my $self = shift; - my $version = shift || $self->schema_version; - + my $version = (shift @_ || {})->{version} || $self->schema_version; + log_info { "deploying version $version" }; + my $sqlt_type = $self->storage->sqlt_type; + my $sql; + if ($self->ignore_ddl) { + $sql = $self->_sql_from_yaml({}, + '_ddl_protoschema_produce_filename', $sqlt_type + ); + } return $self->_run_sql_and_perl($self->_ddl_schema_consume_filenames( - $self->storage->sqlt_type, + $sqlt_type, $version, - )); + ), $sql); } sub preinstall { - my $self = shift; - my $version = shift || $self->schema_version; + my $self = shift; + my $args = shift; + my $version = $args->{version} || $self->schema_version; + log_info { "preinstalling version $version" }; + my $storage_type = $args->{storage_type} || $self->storage->sqlt_type; my @files = @{$self->_ddl_preinstall_consume_filenames( - $self->storage->sqlt_type, + $storage_type, $version, )}; @@ -217,13 +308,13 @@ sub preinstall { if ( $filename =~ /^(.+)\.pl$/ ) { my $filedata = do { local( @ARGV, $/ ) = $filename; <> }; - no warnings 'redefine'; + no warnings 'redefine'; my $fn = eval "$filedata"; use warnings; - if ($@) { + if ($@) { carp "$filename failed to compile: $@"; - } elsif (ref $fn eq 'CODE') { + } elsif (ref $fn eq 'CODE') { $fn->() } else { carp "$filename should define an anonymous sub but it didn't!"; @@ -234,44 +325,116 @@ sub preinstall { } } -sub _prepare_install { - my $self = shift; - my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; - my $to_file = shift; +method _sqldiff_from_yaml($from_version, $to_version, $db, $direction) { + my $dir = $self->script_directory; + my $sqltargs = { + add_drop_table => 1, + ignore_constraint_names => 1, + ignore_index_names => 1, + %{$self->sql_translator_args} + }; + + my $source_schema; + { + my $prefilename = $self->_ddl_protoschema_produce_filename($from_version, $dir); + + # should probably be a croak + carp("No previous schema file found ($prefilename)") + unless -e $prefilename; + + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + parser => 'SQL::Translator::Parser::YAML', + }); + + my $out = $t->translate( $prefilename ) + or croak($t->error); + + $source_schema = $t->schema; + + $source_schema->name( $prefilename ) + unless $source_schema->name; + } + + my $dest_schema; + { + my $filename = $self->_ddl_protoschema_produce_filename($to_version, $dir); + + # should probably be a croak + carp("No next schema file found ($filename)") + unless -e $filename; + + my $t = SQL::Translator->new({ + %{$sqltargs}, + debug => 0, + trace => 0, + parser => 'SQL::Translator::Parser::YAML', + }); + + my $out = $t->translate( $filename ) + or croak($t->error); + + $dest_schema = $t->schema; + + $dest_schema->name( $filename ) + unless $dest_schema->name; + } + + my $transform_files_method = "_ddl_protoschema_${direction}_consume_filenames"; + my $transforms = $self->_coderefs_per_files( + $self->$transform_files_method([$from_version, $to_version]) + ); + $_->($source_schema, $dest_schema) for @$transforms; + + return [SQL::Translator::Diff::schema_diff( + $source_schema, $db, + $dest_schema, $db, + $sqltargs + )]; +} + +method _sql_from_yaml($sqltargs, $from_file, $db) { my $schema = $self->schema; - my $databases = $self->databases; - my $dir = $self->upgrade_directory; my $version = $self->schema_version; my $sqlt = SQL::Translator->new({ - add_drop_table => 1, - ignore_constraint_names => 1, - ignore_index_names => 1, - parser => 'SQL::Translator::Parser::DBIx::Class', - %{$sqltargs} + add_drop_table => 0, + parser => 'SQL::Translator::Parser::YAML', + %{$sqltargs}, + producer => $db, }); - my $sqlt_schema = $sqlt->translate( data => $schema ) - or croak($sqlt->error); + my $yaml_filename = $self->$from_file($version); + + my @sql = $sqlt->translate($yaml_filename); + if(!@sql) { + carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); + return undef; + } + return \@sql; +} + +sub _prepare_install { + my $self = shift; + my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; + my $from_file = shift; + my $to_file = shift; + my $dir = $self->script_directory; + my $databases = $self->databases; + my $version = $self->schema_version; foreach my $db (@$databases) { - $sqlt->reset; - $sqlt->{schema} = $sqlt_schema; - $sqlt->producer($db); + my $sql = $self->_sql_from_yaml($sqltargs, $from_file, $db ) or next; my $filename = $self->$to_file($db, $version, $dir); if (-e $filename ) { carp "Overwriting existing DDL file - $filename"; unlink $filename; } - - my $output = $sqlt->translate; - if(!$output) { - carp("Failed to translate to $db, skipping. (" . $sqlt->error . ")"); - next; - } open my $file, q(>), $filename; - print {$file} $output; + print {$file} join ";\n", @$sql; close $file; } } @@ -280,16 +443,29 @@ sub _resultsource_install_filename { my ($self, $source_name) = @_; return sub { my ($self, $type, $version) = @_; - my $dirname = catfile( $self->upgrade_directory, $type, 'schema', $version ); + my $dirname = catfile( $self->script_directory, $type, 'schema', $version ); mkpath($dirname) unless -d $dirname; return catfile( $dirname, "001-auto-$source_name.sql" ); } } -sub install_resultsource { - my ($self, $source, $version) = @_; +sub _resultsource_protoschema_filename { + my ($self, $source_name) = @_; + return sub { + my ($self, $version) = @_; + my $dirname = catfile( $self->script_directory, '_protoschema', $version ); + mkpath($dirname) unless -d $dirname; + return catfile( $dirname, "001-auto-$source_name.yml" ); + } +} + +sub install_resultsource { + my ($self, $args) = @_; + my $source = $args->{result_source}; + my $version = $args->{version}; + log_info { 'installing_resultsource ' . $source->source_name . ", version $version" }; my $rs_install_file = $self->_resultsource_install_filename($source->source_name); @@ -304,120 +480,65 @@ sub install_resultsource { sub prepare_resultsource_install { my $self = shift; - my $source = shift; + my $source = (shift @_)->{result_source}; + log_info { 'preparing install for resultsource ' . $source->source_name }; - my $filename = $self->_resultsource_install_filename($source->source_name); - $self->_prepare_install({ + my $install_filename = $self->_resultsource_install_filename($source->source_name); + my $proto_filename = $self->_resultsource_protoschema_filename($source->source_name); + $self->prepare_protoschema({ parser_args => { sources => [$source->source_name], } - }, $filename); + }, $proto_filename); + $self->_prepare_install({}, $proto_filename, $install_filename); } sub prepare_deploy { + log_info { 'preparing deploy' }; my $self = shift; - $self->_prepare_install({}, '_ddl_schema_produce_filename'); + $self->prepare_protoschema({}, '_ddl_protoschema_produce_filename'); + $self->_prepare_install({}, '_ddl_protoschema_produce_filename', '_ddl_schema_produce_filename'); } sub prepare_upgrade { - my ($self, $from_version, $to_version, $version_set) = @_; - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'up'); + my ($self, $args) = @_; + log_info { + "preparing upgrade from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'up' + ); } sub prepare_downgrade { - my ($self, $from_version, $to_version, $version_set) = @_; + my ($self, $args) = @_; + log_info { + "preparing downgrade from $args->{from_version} to $args->{to_version}" + }; + $self->_prepare_changegrade( + $args->{from_version}, $args->{to_version}, $args->{version_set}, 'down' + ); +} - $self->_prepare_changegrade($from_version, $to_version, $version_set, 'down'); +method _coderefs_per_files($files) { + no warnings 'redefine'; + [map eval do { local( @ARGV, $/ ) = $_; <> }, @$files] } method _prepare_changegrade($from_version, $to_version, $version_set, $direction) { my $schema = $self->schema; my $databases = $self->databases; - my $dir = $self->upgrade_directory; - my $sqltargs = $self->sql_translator_args; + my $dir = $self->script_directory; my $schema_version = $self->schema_version; - - $sqltargs = { - add_drop_table => 1, - ignore_constraint_names => 1, - ignore_index_names => 1, - %{$sqltargs} - }; - - my $sqlt = SQL::Translator->new( $sqltargs ); - - $sqlt->parser('SQL::Translator::Parser::DBIx::Class'); - my $sqlt_schema = $sqlt->translate( data => $schema ) - or croak($sqlt->error); - + my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; foreach my $db (@$databases) { - $sqlt->reset; - $sqlt->{schema} = $sqlt_schema; - $sqlt->producer($db); - - my $prefilename = $self->_ddl_schema_produce_filename($db, $from_version, $dir); - unless(-e $prefilename) { - carp("No previous schema file found ($prefilename)"); - next; - } - my $diff_file_method = "_ddl_schema_${direction}_produce_filename"; my $diff_file = $self->$diff_file_method($db, $version_set, $dir ); if(-e $diff_file) { carp("Overwriting existing $direction-diff file - $diff_file"); unlink $diff_file; } - my $source_schema; - { - my $t = SQL::Translator->new({ - %{$sqltargs}, - debug => 0, - trace => 0, - }); - - $t->parser( $db ) # could this really throw an exception? - or croak($t->error); - - my $out = $t->translate( $prefilename ) - or croak($t->error); - - $source_schema = $t->schema; - - $source_schema->name( $prefilename ) - unless $source_schema->name; - } - - # The "new" style of producers have sane normalization and can support - # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't - # And we have to diff parsed SQL against parsed SQL. - my $dest_schema = $sqlt_schema; - - unless ( "SQL::Translator::Producer::$db"->can('preprocess_schema') ) { - my $t = SQL::Translator->new({ - %{$sqltargs}, - debug => 0, - trace => 0, - }); - - $t->parser( $db ) # could this really throw an exception? - or croak($t->error); - - my $filename = $self->_ddl_schema_produce_filename($db, $to_version, $dir); - my $out = $t->translate( $filename ) - or croak($t->error); - - $dest_schema = $t->schema; - - $dest_schema->name( $filename ) - unless $dest_schema->name; - } - - my $diff = SQL::Translator::Diff::schema_diff( - $source_schema, $db, - $dest_schema, $db, - $sqltargs - ); open my $file, q(>), $diff_file; - print {$file} $diff; + print {$file} join ";\n", @{$self->_sqldiff_from_yaml($from_version, $to_version, $db, $direction)}; close $file; } } @@ -442,27 +563,74 @@ method _read_sql_file($file) { sub downgrade_single_step { my $self = shift; - my $version_set = shift @_; - + my $version_set = (shift @_)->{version_set}; + Dlog_info { "downgrade_single_step'ing $_" } $version_set; + + my $sqlt_type = $self->storage->sqlt_type; + my $sql_to_run; + if ($self->ignore_ddl) { + $sql_to_run = $self->_sqldiff_from_yaml( + $version_set->[0], $version_set->[1], $sqlt_type, 'down', + ); + } my $sql = $self->_run_sql_and_perl($self->_ddl_schema_down_consume_filenames( - $self->storage->sqlt_type, + $sqlt_type, $version_set, - )); + ), $sql_to_run); return ['', $sql]; } sub upgrade_single_step { my $self = shift; - my $version_set = shift @_; - + my $version_set = (shift @_)->{version_set}; + Dlog_info { "upgrade_single_step'ing $_" } $version_set; + + my $sqlt_type = $self->storage->sqlt_type; + my $sql_to_run; + if ($self->ignore_ddl) { + $sql_to_run = $self->_sqldiff_from_yaml( + $version_set->[0], $version_set->[1], $sqlt_type, 'up', + ); + } my $sql = $self->_run_sql_and_perl($self->_ddl_schema_up_consume_filenames( - $self->storage->sqlt_type, + $sqlt_type, $version_set, - )); + ), $sql_to_run); return ['', $sql]; } +sub prepare_protoschema { + my $self = shift; + my $sqltargs = { %{$self->sql_translator_args}, %{shift @_} }; + my $to_file = shift; + my $filename + = $self->$to_file($self->schema_version); + + # we do this because the code that uses this sets parser args, + # so we just need to merge in the package + $sqltargs->{parser_args}{package} = $self->schema; + my $sqlt = SQL::Translator->new({ + parser => 'SQL::Translator::Parser::DBIx::Class', + producer => 'SQL::Translator::Producer::YAML', + %{ $sqltargs }, + }); + + my $yml = $sqlt->translate; + + croak("Failed to translate to YAML: " . $sqlt->error) + unless $yml; + + if (-e $filename ) { + carp "Overwriting existing DDL-YML file - $filename"; + unlink $filename; + } + + open my $file, q(>), $filename; + print {$file} $yml; + close $file; +} + __PACKAGE__->meta->make_immutable; 1; @@ -473,12 +641,12 @@ __END__ =head1 DESCRIPTION -This class is the meat of L. It takes care of -generating sql files representing schemata as well as sql files to move from -one version of a schema to the rest. One of the hallmark features of this -class is that it allows for multiple sql files for deploy and upgrade, allowing -developers to fine tune deployment. In addition it also allows for perl files -to be run at any stage of the process. +This class is the meat of L. It takes care +of generating serialized schemata as well as sql files to move from one +version of a schema to the rest. One of the hallmark features of this class +is that it allows for multiple sql files for deploy and upgrade, allowing +developers to fine tune deployment. In addition it also allows for perl +files to be run at any stage of the process. For basic usage see L. What's documented here is extra fun stuff or private methods. @@ -548,9 +716,8 @@ C<$sql_migration_dir/SQLite/schema/1/001-auto.sql>. Next, would run C<$sql_migration_dir/SQLite/up/1-2/001-auto.sql> followed by C<$sql_migration_dir/_common/up/1-2/002-generate-customers.pl>. -Now, a C<.pl> file doesn't have to be in the C<_common> directory, but most of -the time it probably should be, since perl scripts will mostly be database -independent. +C<.pl> files don't have to be in the C<_common> directory, but most of the time +they should be, because perl scripts are generally be database independent. C<_generic> exists for when you for some reason are sure that your SQL is generic enough to run on all databases. Good luck with that one. @@ -596,9 +763,9 @@ and generate the DDL. This is automatically created with L. The arguments that get passed to L when it's used. -=attr upgrade_directory +=attr script_directory -The directory (default C<'sql'>) that upgrades are stored in +The directory (default C<'sql'>) that scripts are stored in =attr databases @@ -615,7 +782,9 @@ transaction. The version the schema on your harddrive is at. Defaults to C<< $self->schema->schema_version >>. -=method __ddl_consume_with_prefix +=begin comment + +=head2 __ddl_consume_with_prefix $dm->__ddl_consume_with_prefix( 'SQLite', [qw( 1.00 1.01 )], 'up' ) @@ -623,48 +792,48 @@ This is the meat of the multi-file upgrade/deploy stuff. It returns a list of files in the order that they should be run for a generic "type" of upgrade. You should not be calling this in user code. -=method _ddl_schema_consume_filenames +=head2 _ddl_schema_consume_filenames $dm->__ddl_schema_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for an initial deploy. -=method _ddl_schema_produce_filename +=head2 _ddl_schema_produce_filename $dm->__ddl_schema_produce_filename( 'SQLite', [qw( 1.00 )] ) Returns a single file in which an initial schema will be stored. -=method _ddl_schema_up_consume_filenames +=head2 _ddl_schema_up_consume_filenames $dm->_ddl_schema_up_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for an upgrade. -=method _ddl_schema_down_consume_filenames +=head2 _ddl_schema_down_consume_filenames $dm->_ddl_schema_down_consume_filenames( 'SQLite', [qw( 1.00 )] ) Just a curried L. Get's a list of files for a downgrade. -=method _ddl_schema_up_produce_filenames +=head2 _ddl_schema_up_produce_filenames $dm->_ddl_schema_up_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) Returns a single file in which the sql to upgrade from one schema to another will be stored. -=method _ddl_schema_down_produce_filename +=head2 _ddl_schema_down_produce_filename $dm->_ddl_schema_down_produce_filename( 'SQLite', [qw( 1.00 1.01 )] ) Returns a single file in which the sql to downgrade from one schema to another will be stored. -=method _resultsource_install_filename +=head2 _resultsource_install_filename my $filename_fn = $dm->_resultsource_install_filename('User'); $dm->$filename_fn('SQLite', '1.00') @@ -672,7 +841,7 @@ will be stored. Returns a function which in turn returns a single filename used to install a single resultsource. Weird interface is convenient for me. Deal with it. -=method _run_sql_and_perl +=head2 _run_sql_and_perl $dm->_run_sql_and_perl([qw( list of filenames )]) @@ -682,7 +851,7 @@ C<.sql> it runs it as sql and if it ends in C<.pl> it runs it as a perl file. Depending on L all of the files run will be wrapped in a single transaction. -=method _prepare_install +=head2 _prepare_install $dm->_prepare_install({ add_drop_table => 0 }, sub { 'file_to_create' }) @@ -690,7 +859,7 @@ Generates the sql file for installing the database. First arg is simply L args and the second is a coderef that returns the filename to store the sql in. -=method _prepare_changegrade +=head2 _prepare_changegrade $dm->_prepare_changegrade('1.00', '1.01', [qw( 1.00 1.01)], 'up') @@ -699,10 +868,11 @@ arg is the version to start from, second is the version to go to, third is the L, and last is the direction of the changegrade, be it 'up' or 'down'. -=method _read_sql_file +=head2 _read_sql_file $dm->_read_sql_file('foo.sql') Reads a sql file and returns lines in an C. Strips out comments, transactions, and blank lines. +=end comment