X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FFixtures.pm;h=50b9fdf781e819c936d833df88a456b1419c660c;hb=745efc608197a3becfb8a55e254da872f93865c8;hp=3c8f0389de601e1037993cc4444b266253233f13;hpb=06b7a1cc89850d44bc424d43b5dc48148f999522;p=dbsrgits%2FDBIx-Class-Fixtures.git diff --git a/lib/DBIx/Class/Fixtures.pm b/lib/DBIx/Class/Fixtures.pm index 3c8f038..50b9fdf 100644 --- a/lib/DBIx/Class/Fixtures.pm +++ b/lib/DBIx/Class/Fixtures.pm @@ -26,15 +26,15 @@ __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir =head1 VERSION -Version 1.001005 +Version 1.001013 =cut -our $VERSION = '1.001004'; +our $VERSION = '1.001013'; =head1 NAME -DBIx::Class::Fixtures +DBIx::Class::Fixtures - Dump data and repopulate a database using rules =head1 SYNOPSIS @@ -76,18 +76,18 @@ describe which data to pull and dump from the source database. For example: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'], - fetch: [ + "class": "Producer", + "ids": ["5"], + "fetch": [ { - rel: 'artists', - quantity: '2' + "rel": "artists", + "quantity": "2" } ] } @@ -113,27 +113,27 @@ was dumped you also wanted all of their cds dumped too, then you could use a rule to specify this. For example: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'], - fetch: [ + "class": "Producer", + "ids": ["5"], + "fetch": [ { - rel: 'artists', - quantity: '2' + "rel": "artists", + "quantity": "2" } ] } ], - rules: { - Artist: { - fetch: [ { - rel: 'cds', - quantity: 'all' + "rules": { + "Artist": { + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] } } @@ -144,24 +144,24 @@ dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist to CD. This is eqivalent to: { - sets: [ + "sets": [ { - class: 'Artist', - ids: ['1', '3'], - fetch: [ { - rel: 'cds', - quantity: 'all' + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] }, { - class: 'Producer', - ids: ['5'], - fetch: [ { - rel: 'artists', - quantity: '2', - fetch: [ { - rel: 'cds', - quantity: 'all' + "class": "Producer", + "ids": ["5"], + "fetch": [ { + "rel": "artists", + "quantity": "2", + "fetch": [ { + "rel": "cds", + "quantity": "all" } ] } ] } @@ -178,12 +178,12 @@ To prevent repetition between configs you can include other configs. For example: { - sets: [ { - class: 'Producer', - ids: ['5'] + "sets": [ { + "class": "Producer", + "ids": ["5"] } ], - includes: [ - { file: 'base.json' } + "includes": [ + { "file": "base.json" } ] } @@ -197,11 +197,11 @@ Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::* can parse. For example: { - sets: [ { - class: 'RecentItems', - ids: ['9'] + "sets": [ { + "class": "RecentItems", + "ids": ["9"] } ], - datetime_relative : "2007-10-30 00:00:00" + "datetime_relative": "2007-10-30 00:00:00" } This will work when dumping from a MySQL database and will cause any datetime @@ -217,15 +217,15 @@ Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0. { - might_have: { fetch: 1 }, - sets: [ + "might_have": { "fetch": 1 }, + "sets": [ { - class: 'Artist', - ids: ['1', '3'] + "class": "Artist", + "ids": ["1", "3"] }, { - class: 'Producer', - ids: ['5'] + "class": "Producer", + "ids": ["5"] } ] } @@ -260,10 +260,10 @@ A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example: { - sets: [{ - class: 'Artist', - quantiy: 'all', - cond: { name: 'Dave' } + "sets": [{ + "class": "Artist", + "quantiy": "all", + "cond": { "name": "Dave" } }] } @@ -277,10 +277,10 @@ Sometimes in a search clause it's useful to use scalar refs to do things like: This could be specified in the cond hash like so: { - sets: [ { - class: 'Artist', - quantiy: 'all', - cond: { no1_singles: '\> no1_albums' } + "sets": [ { + "class": "Artist", + "quantiy": "all", + "cond": { "no1_singles": "\> no1_albums" } } ] } @@ -292,11 +292,11 @@ being passed to search. An array of relationships to be used in the cond clause. { - sets: [ { - class: 'Artist', - quantiy: 'all', - cond: { 'cds.position': { '>': 4 } }, - join: ['cds'] + "sets": [ { + "class": "Artist", + "quantiy": "all", + "cond": { "cds.position": { ">": 4 } }, + "join": ["cds"] } ] } @@ -307,13 +307,13 @@ Fetch all artists who have cds with position greater than 4. Must be an array of hashes. Specifies which rels to also dump. For example: { - sets: [ { - class: 'Artist', - ids: ['1', '3'], - fetch: [ { - rel: 'cds', - quantity: '3', - cond: { position: '2' } + "sets": [ { + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "3", + "cond": { "position": "2" } } ] } ] } @@ -425,7 +425,8 @@ sub new { _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/], debug => $params->{debug} || 0, ignore_sql_errors => $params->{ignore_sql_errors}, - dumped_objects => {} + dumped_objects => {}, + use_create => $params->{use_create} || 0 }; bless $self, $class; @@ -433,6 +434,22 @@ sub new { return $self; } +=head2 available_config_sets + +Returns a list of all the config sets found in the L. These will +be a list of the json based files containing dump rules. + +=cut + +my @config_sets; +sub available_config_sets { + @config_sets = scalar(@config_sets) ? @config_sets : map { + $_->basename; + } grep { + -f $_ && $_=~/json$/; + } dir((shift)->config_dir)->children; +} + =head2 dump =over 4 @@ -465,7 +482,12 @@ directory. For example: /home/me/app/fixtures/artist/3.fix /home/me/app/fixtures/producer/5.fix -schema and directory are required attributes. also, one of config or all must be specified. +schema and directory are required attributes. also, one of config or all must +be specified. + +Lastly, the C parameter can be a Perl HashRef instead of a file name. +If this form is used your HashRef should conform to the structure rules defined +for the JSON representations. =cut @@ -483,18 +505,32 @@ sub dump { } } + if($params->{excludes} && !$params->{all}) { + return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param"); + } + my $schema = $params->{schema}; my $config; if ($params->{config}) { - #read config - my $config_file = $self->config_dir->file($params->{config}); - $config = $self->load_config_file($config_file); + $config = ref $params->{config} eq 'HASH' ? + $params->{config} : + do { + #read config + my $config_file = $self->config_dir->file($params->{config}); + $self->load_config_file($config_file); + }; } elsif ($params->{all}) { + my %excludes = map {$_=>1} @{$params->{excludes}||[]}; $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, - sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] + sets => [ + map { + { class => $_, quantity => 'all' }; + } grep { + !$excludes{$_} + } $schema->sources], }; } else { DBIx::Class::Exception->throw('must pass config or set all'); @@ -525,8 +561,8 @@ sub dump { my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; while ( my ($k,$v) = each %{ $config->{rules} } ) { - if ( my $rs = $schema->resultset($k) ) { - $config->{rules}{$rs->result_source->source_name} = $v; + if ( my $source = eval { $schema->source($k) } ) { + $config->{rules}{$source->source_name} = $v; } } @@ -863,12 +899,14 @@ sub _generate_schema { # clear existing db $self->msg("- clearing DB of existing tables"); - $pre_schema->storage->with_deferred_fk_checks(sub { - foreach my $table (@tables) { - eval { - $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) - }; - } + $pre_schema->storage->txn_do(sub { + $pre_schema->storage->with_deferred_fk_checks(sub { + foreach my $table (@tables) { + eval { + $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') ) + }; + } + }); }); # import new ddl file to db @@ -901,6 +939,79 @@ sub _read_sql { return \@data; } +=head2 dump_config_sets + +Works just like L but instead of specifying a single json config set +located in L we dump each set named in the C parameter. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + configs => [qw/one.json other.json/], + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_config_sets { + my ($self, $params) = @_; + my $available_config_sets = delete $params->{configs}; + my $directory_template = delete $params->{directory_template} || + DBIx::Class::Exception->throw("'directory_template is required parameter"); + + for my $set (@$available_config_sets) { + my $localparams = $params; + $localparams->{directory} = $directory_template->($self, $localparams, $set); + $localparams->{config} = $set; + $self->dump($localparams); + $self->dumped_objects({}); ## Clear dumped for next go, if there is one! + } +} + +=head2 dump_all_config_sets + + my %local_params = %$params; + my $local_self = bless { %$self }, ref($self); + $local_params{directory} = $directory_template->($self, \%local_params, $set); + $local_params{config} = $set; + $self->dump(\%local_params); + + +Works just like L but instead of specifying a single json config set +located in L we dump each set in turn to the specified directory. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_all_config_sets { + my ($self, $params) = @_; + $self->dump_config_sets({ + %$params, + configs=>[$self->available_config_sets], + }); +} + =head2 populate =over 4 @@ -930,6 +1041,11 @@ sub _read_sql { # optional, set to 1 to run ddl but not populate no_populate => 0, + # optional, set to 1 to run each fixture through ->create rather than have + # each $rs populated using $rs->populate. Useful if you have overridden new() logic + # that effects the value of column(s). + use_create => 0, + # Dont try to clean the database, just populate over whats there. Requires # schema option. Use this if you want to handle removing old data yourself # no_deploy => 1 @@ -1042,26 +1158,31 @@ sub populate { $fixup_visitor = new Data::Visitor::Callback(%callbacks); } - $schema->storage->with_deferred_fk_checks(sub { - foreach my $source (sort $schema->sources) { - $self->msg("- adding " . $source); - my $rs = $schema->resultset($source); - my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from ); - next unless (-e $source_dir); - my @rows; - while (my $file = $source_dir->next) { - next unless ($file =~ /\.fix$/); - next if $file->is_dir; - my $contents = $file->slurp; - my $HASH1; - eval($contents); - $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; - push(@rows, $HASH1); + $schema->storage->txn_do(sub { + $schema->storage->with_deferred_fk_checks(sub { + foreach my $source (sort $schema->sources) { + $self->msg("- adding " . $source); + my $rs = $schema->resultset($source); + my $source_dir = $tmp_fixture_dir->subdir( lc $rs->result_source->from ); + next unless (-e $source_dir); + my @rows; + while (my $file = $source_dir->next) { + next unless ($file =~ /\.fix$/); + next if $file->is_dir; + my $contents = $file->slurp; + my $HASH1; + eval($contents); + $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; + if ( $params->{use_create} ) { + $rs->create( $HASH1 ); + } else { + push(@rows, $HASH1); + } + } + $rs->populate(\@rows) if scalar(@rows); } - $rs->populate(\@rows) if scalar(@rows); - } + }); }); - $self->do_post_ddl( { schema=>$schema, post_ddl=>$params->{post_ddl} @@ -1111,6 +1232,8 @@ sub msg { Drew Taylor + Frank Switalski + =head1 LICENSE This library is free software under the same license as perl itself