X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FFixtures.pm;h=24439971d5abbcb7c307029e4d6aad61b9fc28d4;hb=dcdf675f7c1e751f53bce149fe7af49bf079771b;hp=a805c9672d490ecd8ea88d4d3067f73072f4bc09;hpb=bbbc0dfea746efb370bef317b86f1dfd760188d9;p=dbsrgits%2FDBIx-Class-Fixtures.git diff --git a/lib/DBIx/Class/Fixtures.pm b/lib/DBIx/Class/Fixtures.pm index a805c96..2443997 100644 --- a/lib/DBIx/Class/Fixtures.pm +++ b/lib/DBIx/Class/Fixtures.pm @@ -6,7 +6,8 @@ use warnings; use DBIx::Class 0.08100; use DBIx::Class::Exception; use Class::Accessor::Grouped; -use Path::Class qw(dir file); +use Path::Class qw(dir file tempdir); +use File::Spec::Functions 'catfile', 'catdir'; use Config::Any::JSON; use Data::Dump::Streamer; use Data::Visitor::Callback; @@ -16,25 +17,22 @@ use File::Copy qw/move/; use Hash::Merge qw( merge ); use Data::Dumper; use Class::C3::Componentised; +use MIME::Base64; use base qw(Class::Accessor::Grouped); our $namespace_counter = 0; __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir - _inherited_attributes debug schema_class dumped_objects/); + _inherited_attributes debug schema_class dumped_objects config_attrs/); -=head1 VERSION +our $VERSION = '1.001025'; -Version 1.001000 - -=cut - -our $VERSION = '1.001003'; +$VERSION = eval $VERSION; =head1 NAME -DBIx::Class::Fixtures +DBIx::Class::Fixtures - Dump data and repopulate a database using rules =head1 SYNOPSIS @@ -76,18 +74,18 @@ describe which data to pull and dump from the source database. For example: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'], - fetch: [ + "class": "Producer", + "ids": ["5"], + "fetch": [ { - rel: 'artists', - quantity: '2' + "rel": "artists", + "quantity": "2" } ] } @@ -113,27 +111,27 @@ was dumped you also wanted all of their cds dumped too, then you could use a rule to specify this. For example: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'], - fetch: [ + "class": "Producer", + "ids": ["5"], + "fetch": [ { - rel: 'artists', - quantity: '2' + "rel": "artists", + "quantity": "2" } ] } ], - rules: { - Artist: { - fetch: [ { - rel: 'cds', - quantity: 'all' + "rules": { + "Artist": { + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] } } @@ -144,24 +142,24 @@ dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist to CD. This is eqivalent to: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'], - fetch: [ { - rel: 'cds', - quantity: 'all' + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] }, { - class: 'Producer', - ids: ['5'], - fetch: [ { - rel: 'artists', - quantity: '2', - fetch: [ { - rel: 'cds', - quantity: 'all' + "class": "Producer", + "ids": ["5"], + "fetch": [ { + "rel": "artists", + "quantity": "2", + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] } ] } @@ -178,12 +176,12 @@ To prevent repetition between configs you can include other configs. For example: { - sets: [ { - class: 'Producer', - ids: ['5'] + "sets": [ { + "class": "Producer", + "ids": ["5"] } ], - includes: [ - { file: 'base.json' } + "includes": [ + { "file": "base.json" } ] } @@ -197,11 +195,11 @@ Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::* can parse. For example: { - sets: [ { - class: 'RecentItems', - ids: ['9'] + "sets": [ { + "class": "RecentItems", + "ids": ["9"] } ], - datetime_relative : "2007-10-30 00:00:00" + "datetime_relative": "2007-10-30 00:00:00" } This will work when dumping from a MySQL database and will cause any datetime @@ -217,15 +215,15 @@ Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0. { - might_have: { fetch: 1 }, - sets: [ + "might_have": { "fetch": 1 }, + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'] + "class": "Producer", + "ids": ["5"] } ] } @@ -260,10 +258,10 @@ A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example: { - sets: [{ - class: 'Artist', - quantiy: 'all', - cond: { name: 'Dave' } + "sets": [{ + "class": "Artist", + "quantiy": "all", + "cond": { "name": "Dave" } }] } @@ -277,10 +275,10 @@ Sometimes in a search clause it's useful to use scalar refs to do things like: This could be specified in the cond hash like so: { - sets: [ { - class: 'Artist', - quantiy: 'all', - cond: { no1_singles: '\> no1_albums' } + "sets": [ { + "class": "Artist", + "quantiy": "all", + "cond": { "no1_singles": "\> no1_albums" } } ] } @@ -292,11 +290,11 @@ being passed to search. An array of relationships to be used in the cond clause. { - sets: [ { - class: 'Artist', - quantiy: 'all', - cond: { 'cds.position': { '>': 4 } }, - join: ['cds'] + "sets": [ { + "class": "Artist", + "quantiy": "all", + "cond": { "cds.position": { ">": 4 } }, + "join": ["cds"] } ] } @@ -307,13 +305,13 @@ Fetch all artists who have cds with position greater than 4. Must be an array of hashes. Specifies which rels to also dump. For example: { - sets: [ { - class: 'Artist', - ids: ['1', '3'], - fetch: [ { - rel: 'cds', - quantity: '3', - cond: { position: '2' } + "sets": [ { + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "3", + "cond": { "position": "2" } } ] } ] } @@ -343,6 +341,40 @@ in this case. This value will be inherited by all fetches in this set. This is not true for the has_many attribute. +=head2 external + +In some cases your database information might be keys to values in some sort of +external storage. The classic example is you are using L +to store blob information on the filesystem. In this case you may wish the ability +to backup your external storage in the same way your database data. The L +attribute lets you specify a handler for this type of issue. For example: + + { + "sets": [{ + "class": "Photo", + "quantity": "all", + "external": { + "file": { + "class": "File", + "args": {"path":"__ATTR(photo_dir)__"} + } + } + }] + } + +This would use L to read from a directory +where the path to a file is specified by the C field of the C source. +We use the uninflated value of the field so you need to completely handle backup +and restore. For the common case we provide L +and you can create your own custom handlers by placing a '+' in the namespace: + + "class": "+MyApp::Schema::SomeExternalStorage", + +Although if possible I'd love to get patches to add some of the other common +types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.) + +See L for the external handler interface. + =head1 RULE ATTRIBUTES =head2 cond @@ -365,6 +397,34 @@ Same as with L Same as with L +=head1 RULE SUBSTITUTIONS + +You can provide the following substitution patterns for your rule values. An +example of this might be: + + { + "sets": [{ + "class": "Photo", + "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__", + }] + } + +=head2 ENV + +Provide a value from %ENV + +=head2 ATTR + +Provide a value from L + +=head2 catfile + +Create the path to a file from a list + +=head2 catdir + +Create the path to a directory from a list + =head1 METHODS =head2 new @@ -395,6 +455,32 @@ determines whether to be verbose ignore errors on import of DDL etc +=item config_attrs + +A hash of information you can use to do replacements inside your configuration +sets. For example, if your set looks like: + + { + "sets": [ { + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "__ATTR(quantity)__", + } ] + } ] + } + + my $fixtures = DBIx::Class::Fixtures->new( { + config_dir => '/home/me/app/fixture_configs' + config_attrs => { + quantity => 100, + }, + }); + +You may wish to do this if you want to let whoever runs the dumps have a bit +more control + =back my $fixtures = DBIx::Class::Fixtures->new( { @@ -422,10 +508,13 @@ sub new { my $self = { config_dir => $config_dir, - _inherited_attributes => [qw/datetime_relative might_have rules/], + _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/], debug => $params->{debug} || 0, ignore_sql_errors => $params->{ignore_sql_errors}, - dumped_objects => {} + dumped_objects => {}, + use_create => $params->{use_create} || 0, + use_find_or_create => $params->{use_find_or_create} || 0, + config_attrs => $params->{config_attrs} || {}, }; bless $self, $class; @@ -433,6 +522,22 @@ sub new { return $self; } +=head2 available_config_sets + +Returns a list of all the config sets found in the L. These will +be a list of the json based files containing dump rules. + +=cut + +my @config_sets; +sub available_config_sets { + @config_sets = scalar(@config_sets) ? @config_sets : map { + $_->basename; + } grep { + -f $_ && $_=~/json$/; + } dir((shift)->config_dir)->children; +} + =head2 dump =over 4 @@ -465,7 +570,12 @@ directory. For example: /home/me/app/fixtures/artist/3.fix /home/me/app/fixtures/producer/5.fix -schema and directory are required attributes. also, one of config or all must be specified. +schema and directory are required attributes. also, one of config or all must +be specified. + +Lastly, the C parameter can be a Perl HashRef instead of a file name. +If this form is used your HashRef should conform to the structure rules defined +for the JSON representations. =cut @@ -483,18 +593,32 @@ sub dump { } } + if($params->{excludes} && !$params->{all}) { + return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param"); + } + my $schema = $params->{schema}; my $config; if ($params->{config}) { - #read config - my $config_file = $self->config_dir->file($params->{config}); - $config = $self->load_config_file($config_file); + $config = ref $params->{config} eq 'HASH' ? + $params->{config} : + do { + #read config + my $config_file = $self->config_dir->file($params->{config}); + $self->load_config_file($config_file); + }; } elsif ($params->{all}) { + my %excludes = map {$_=>1} @{$params->{excludes}||[]}; $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, - sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] + sets => [ + map { + { class => $_, quantity => 'all' }; + } grep { + !$excludes{$_} + } $schema->sources], }; } else { DBIx::Class::Exception->throw('must pass config or set all'); @@ -507,7 +631,7 @@ sub dump { } $self->msg("generating fixtures"); - my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); + my $tmp_output_dir = tempdir(); if (-e $tmp_output_dir) { $self->msg("- clearing existing $tmp_output_dir"); @@ -521,9 +645,20 @@ sub dump { ->openw ->print($VERSION); + # write our current config set + $tmp_output_dir->file('_config_set') + ->openw + ->print( Dumper $config ); + $config->{rules} ||= {}; my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; + while ( my ($k,$v) = each %{ $config->{rules} } ) { + if ( my $source = eval { $schema->source($k) } ) { + $config->{rules}{$source->source_name} = $v; + } + } + foreach my $source (@sources) { # apply rule to set if specified my $rule = $config->{rules}->{$source->{class}}; @@ -586,8 +721,7 @@ sub dump { } $self->msg("- moving temp dir to $output_dir"); - move($_, dir($output_dir, $_->relative($_->parent)->stringify)) - for $tmp_output_dir->children; + dircopy($tmp_output_dir, $output_dir); if (-e $output_dir) { $self->msg("- clearing tmp dir $tmp_output_dir"); @@ -654,6 +788,46 @@ sub dump_rs { sub dump_object { my ($self, $object, $params) = @_; my $set = $params->{set}; + + my $v = Data::Visitor::Callback->new( + plain_value => sub { + my ($visitor, $data) = @_; + my $subs = { + ENV => sub { + my ( $self, $v ) = @_; + if (! defined($ENV{$v})) { + return ""; + } else { + return $ENV{ $v }; + } + }, + ATTR => sub { + my ($self, $v) = @_; + if(my $attr = $self->config_attrs->{$v}) { + return $attr; + } else { + return ""; + } + }, + catfile => sub { + my ($self, @args) = @_; + catfile(@args); + }, + catdir => sub { + my ($self, @args) = @_; + catdir(@args); + }, + }; + + my $subsre = join( '|', keys %$subs ); + $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; + + return $_; + } + ); + + $v->visit( $set ); + die 'no dir passed to dump_object' unless $params->{set_dir}; die 'no object passed to dump_object' unless $object; @@ -670,7 +844,7 @@ sub dump_object { # write dir and gen filename - my $source_dir = $params->{set_dir}->subdir(lc $src->from); + my $source_dir = $params->{set_dir}->subdir($self->_name_for_source($src)); $source_dir->mkpath(0, 0777); # strip dir separators from file name @@ -678,12 +852,26 @@ sub dump_object { join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix' ); - # write file unless ($exists) { $self->msg('-- dumping ' . $file->stringify, 2); my %ds = $object->get_columns; + if($set->{external}) { + foreach my $field (keys %{$set->{external}}) { + my $key = $ds{$field}; + my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/); + my $args = $set->{external}->{$field}->{args}; + + $class = "DBIx::Class::Fixtures::External::$class" unless $plus; + eval "use $class"; + + $ds{external}->{$field} = + encode_base64( $class + ->backup($key => $args),''); + } + } + # mess with dates if specified if ($set->{datetime_relative}) { my $formatter= $object->result_source->schema->storage->datetime_parser; @@ -700,7 +888,13 @@ sub dump_object { next unless $value && $col_info->{_inflate_info} - && uc($col_info->{data_type}) eq 'DATETIME'; + && ( + (uc($col_info->{data_type}) eq 'DATETIME') + or (uc($col_info->{data_type}) eq 'DATE') + or (uc($col_info->{data_type}) eq 'TIME') + or (uc($col_info->{data_type}) eq 'TIMESTAMP') + or (uc($col_info->{data_type}) eq 'INTERVAL') + ); $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); } @@ -716,7 +910,7 @@ sub dump_object { # don't bother looking at rels unless we are actually planning to dump at least one type my ($might_have, $belongs_to, $has_many) = map { - $set->{$_}{fetch}; + $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch} } qw/might_have belongs_to has_many/; return unless $might_have @@ -851,18 +1045,20 @@ sub _generate_schema { unless( $pre_schema ) { return DBIx::Class::Exception->throw('connection details not valid'); } - my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources; + my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources; $self->msg("Tables to drop: [". join(', ', sort @tables) . "]"); my $dbh = $pre_schema->storage->dbh; # clear existing db $self->msg("- clearing DB of existing tables"); - $pre_schema->storage->with_deferred_fk_checks(sub { - foreach my $table (@tables) { - eval { - $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) - }; - } + $pre_schema->storage->txn_do(sub { + $pre_schema->storage->with_deferred_fk_checks(sub { + foreach my $table (@tables) { + eval { + $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) + }; + } + }); }); # import new ddl file to db @@ -895,6 +1091,79 @@ sub _read_sql { return \@data; } +=head2 dump_config_sets + +Works just like L but instead of specifying a single json config set +located in L we dump each set named in the C parameter. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + configs => [qw/one.json other.json/], + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_config_sets { + my ($self, $params) = @_; + my $available_config_sets = delete $params->{configs}; + my $directory_template = delete $params->{directory_template} || + DBIx::Class::Exception->throw("'directory_template is required parameter"); + + for my $set (@$available_config_sets) { + my $localparams = $params; + $localparams->{directory} = $directory_template->($self, $localparams, $set); + $localparams->{config} = $set; + $self->dump($localparams); + $self->dumped_objects({}); ## Clear dumped for next go, if there is one! + } +} + +=head2 dump_all_config_sets + + my %local_params = %$params; + my $local_self = bless { %$self }, ref($self); + $local_params{directory} = $directory_template->($self, \%local_params, $set); + $local_params{config} = $set; + $self->dump(\%local_params); + + +Works just like L but instead of specifying a single json config set +located in L we dump each set in turn to the specified directory. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_all_config_sets { + my ($self, $params) = @_; + $self->dump_config_sets({ + %$params, + configs=>[$self->available_config_sets], + }); +} + =head2 populate =over 4 @@ -924,6 +1193,15 @@ sub _read_sql { # optional, set to 1 to run ddl but not populate no_populate => 0, + # optional, set to 1 to run each fixture through ->create rather than have + # each $rs populated using $rs->populate. Useful if you have overridden new() logic + # that effects the value of column(s). + use_create => 0, + + # optional, same as use_create except with find_or_create. + # Useful if you are populating a persistent data store. + use_find_or_create => 0, + # Dont try to clean the database, just populate over whats there. Requires # schema option. Use this if you want to handle removing old data yourself # no_deploy => 1 @@ -998,8 +1276,57 @@ sub populate { return 1 if $params->{no_populate}; $self->msg("\nimporting fixtures"); - my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); + my $tmp_fixture_dir = tempdir(); my $version_file = file($fixture_dir, '_dumper_version'); + my $config_set_path = file($fixture_dir, '_config_set'); + my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : ''; + + my $v = Data::Visitor::Callback->new( + plain_value => sub { + my ($visitor, $data) = @_; + my $subs = { + ENV => sub { + my ( $self, $v ) = @_; + if (! defined($ENV{$v})) { + return ""; + } else { + return $ENV{ $v }; + } + }, + ATTR => sub { + my ($self, $v) = @_; + if(my $attr = $self->config_attrs->{$v}) { + return $attr; + } else { + return ""; + } + }, + catfile => sub { + my ($self, @args) = @_; + catfile(@args); + }, + catdir => sub { + my ($self, @args) = @_; + catdir(@args); + }, + }; + + my $subsre = join( '|', keys %$subs ); + $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; + + return $_; + } + ); + + $v->visit( $config_set ); + + + my %sets_by_src; + if($config_set) { + %sets_by_src = map { delete($_->{class}) => $_ } + @{$config_set->{sets}} + } + # DBIx::Class::Exception->throw('no version file found'); # unless -e $version_file; @@ -1009,7 +1336,7 @@ sub populate { } $self->msg("- creating temp dir"); $tmp_fixture_dir->mkpath(); - for ( map { $schema->source($_)->from } $schema->sources) { + for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) { my $from_dir = $fixture_dir->subdir($_); next unless -e $from_dir; dircopy($from_dir, $tmp_fixture_dir->subdir($_) ); @@ -1036,28 +1363,67 @@ sub populate { $fixup_visitor = new Data::Visitor::Callback(%callbacks); } - $schema->storage->with_deferred_fk_checks(sub { - foreach my $source (sort $schema->sources) { - $self->msg("- adding " . $source); - my $rs = $schema->resultset($source); - my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from ); - next unless (-e $source_dir); - my @rows; - while (my $file = $source_dir->next) { - next unless ($file =~ /\.fix$/); - next if $file->is_dir; - my $contents = $file->slurp; - my $HASH1; - eval($contents); - $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; - push(@rows, $HASH1); + $schema->storage->txn_do(sub { + $schema->storage->with_deferred_fk_checks(sub { + foreach my $source (sort $schema->sources) { + $self->msg("- adding " . $source); + my $rs = $schema->resultset($source); + my $source_dir = $tmp_fixture_dir->subdir( $self->_name_for_source($rs->result_source) ); + next unless (-e $source_dir); + my @rows; + while (my $file = $source_dir->next) { + next unless ($file =~ /\.fix$/); + next if $file->is_dir; + my $contents = $file->slurp; + my $HASH1; + eval($contents); + $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; + if(my $external = delete $HASH1->{external}) { + my @fields = keys %{$sets_by_src{$source}->{external}}; + foreach my $field(@fields) { + my $key = $HASH1->{$field}; + my $content = decode_base64 ($external->{$field}); + my $args = $sets_by_src{$source}->{external}->{$field}->{args}; + my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/); + $class = "DBIx::Class::Fixtures::External::$class" unless $plus; + eval "use $class"; + $class->restore($key, $content, $args); + } + } + if ( $params->{use_create} ) { + $rs->create( $HASH1 ); + } elsif( $params->{use_find_or_create} ) { + $rs->find_or_create( $HASH1 ); + } else { + push(@rows, $HASH1); + } + } + $rs->populate(\@rows) if scalar(@rows); + + ## Now we need to do some db specific cleanup + ## this probably belongs in a more isolated space. Right now this is + ## to just handle postgresql SERIAL types that use Sequences + + my $table = $rs->result_source->name; + for my $column(my @columns = $rs->result_source->columns) { + my $info = $rs->result_source->column_info($column); + if(my $sequence = $info->{sequence}) { + $self->msg("- updating sequence $sequence"); + $rs->result_source->storage->dbh_do(sub { + my ($storage, $dbh, @cols) = @_; + $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));"); + my $sth = $dbh->prepare($sql); + my $rv = $sth->execute or die $sth->errstr; + $self->msg("- $sql"); + }); + } + } + } - $rs->populate(\@rows) if scalar(@rows); - } + }); }); - $self->do_post_ddl( { - schema=>$schema, + schema=>$schema, post_ddl=>$params->{post_ddl} } ) if $params->{post_ddl}; @@ -1091,6 +1457,16 @@ sub msg { } } +# Helper method for ensuring that the name used for a given source +# is always the same (This is used to name the fixture directories +# for example) + +sub _name_for_source { + my ($self, $source) = @_; + + return ref $source->name ? $source->source_name : $source->name; +} + =head1 AUTHOR Luke Saunders @@ -1105,6 +1481,10 @@ sub msg { Drew Taylor + Frank Switalski + + Chris Akins + =head1 LICENSE This library is free software under the same license as perl itself