X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FFixtures.pm;h=484b80ac360d1877f321623098ffc9765423b37f;hb=acc0fa8425889e0aceaa6152a0f3eb495574c57e;hp=0fa59637a95a8a20884e356d874cfee7ab76a5c4;hpb=c5178f250b5f57c9966636e06c4df7b4a3b6a0a7;p=dbsrgits%2FDBIx-Class-Fixtures.git diff --git a/lib/DBIx/Class/Fixtures.pm b/lib/DBIx/Class/Fixtures.pm index 0fa5963..484b80a 100644 --- a/lib/DBIx/Class/Fixtures.pm +++ b/lib/DBIx/Class/Fixtures.pm @@ -7,6 +7,7 @@ use DBIx::Class 0.08100; use DBIx::Class::Exception; use Class::Accessor::Grouped; use Path::Class qw(dir file); +use File::Spec::Functions 'catfile', 'catdir'; use Config::Any::JSON; use Data::Dump::Streamer; use Data::Visitor::Callback; @@ -16,25 +17,21 @@ use File::Copy qw/move/; use Hash::Merge qw( merge ); use Data::Dumper; use Class::C3::Componentised; +use MIME::Base64; +use File::Temp qw/tempdir/; use base qw(Class::Accessor::Grouped); our $namespace_counter = 0; __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir - _inherited_attributes debug schema_class dumped_objects/); + _inherited_attributes debug schema_class dumped_objects config_attrs/); -=head1 VERSION - -Version 1.001010 - -=cut - -our $VERSION = '1.001010'; +our $VERSION = '1.001018'; =head1 NAME -DBIx::Class::Fixtures +DBIx::Class::Fixtures - Dump data and repopulate a database using rules =head1 SYNOPSIS @@ -343,6 +340,40 @@ in this case. This value will be inherited by all fetches in this set. This is not true for the has_many attribute. +=head2 external + +In some cases your database information might be keys to values in some sort of +external storage. The classic example is you are using L +to store blob information on the filesystem. In this case you may wish the ability +to backup your external storage in the same way your database data. The L +attribute lets you specify a handler for this type of issue. For example: + + { + "sets": [{ + "class": "Photo", + "quantity": "all", + "external": { + "file": { + "class": "File", + "args": {"path":"__ATTR(photo_dir)__"} + } + } + }] + } + +This would use L to read from a directory +where the path to a file is specified by the C field of the C source. +We use the uninflated value of the field so you need to completely handle backup +and restore. For the common case we provide L +and you can create your own custom handlers by placing a '+' in the namespace: + + "class": "+MyApp::Schema::SomeExternalStorage", + +Although if possible I'd love to get patches to add some of the other common +types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.) + +See L for the external handler interface. + =head1 RULE ATTRIBUTES =head2 cond @@ -365,6 +396,34 @@ Same as with L Same as with L +=head1 RULE SUBSTITUTIONS + +You can provide the following substitution patterns for your rule values. An +example of this might be: + + { + "sets": [{ + "class": "Photo", + "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__", + }] + } + +=head2 ENV + +Provide a value from %ENV + +=head2 ATTR + +Provide a value from L + +=head2 catfile + +Create the path to a file from a list + +=head2 catdir + +Create the path to a directory from a list + =head1 METHODS =head2 new @@ -395,6 +454,32 @@ determines whether to be verbose ignore errors on import of DDL etc +=item config_attrs + +A hash of information you can use to do replacements inside your configuration +sets. For example, if your set looks like: + + { + "sets": [ { + "class": "Artist", + "ids": ["1", "3"], + "fetch": [ { + "rel": "cds", + "quantity": "__ATTR(quantity)__", + } ] + } ] + } + + my $fixtures = DBIx::Class::Fixtures->new( { + config_dir => '/home/me/app/fixture_configs' + config_attrs => { + quantity => 100, + }, + }); + +You may wish to do this if you want to let whoever runs the dumps have a bit +more control + =back my $fixtures = DBIx::Class::Fixtures->new( { @@ -425,7 +510,9 @@ sub new { _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/], debug => $params->{debug} || 0, ignore_sql_errors => $params->{ignore_sql_errors}, - dumped_objects => {} + dumped_objects => {}, + use_create => $params->{use_create} || 0, + config_attrs => $params->{config_attrs} || {}, }; bless $self, $class; @@ -433,6 +520,22 @@ sub new { return $self; } +=head2 available_config_sets + +Returns a list of all the config sets found in the L. These will +be a list of the json based files containing dump rules. + +=cut + +my @config_sets; +sub available_config_sets { + @config_sets = scalar(@config_sets) ? @config_sets : map { + $_->basename; + } grep { + -f $_ && $_=~/json$/; + } dir((shift)->config_dir)->children; +} + =head2 dump =over 4 @@ -465,7 +568,12 @@ directory. For example: /home/me/app/fixtures/artist/3.fix /home/me/app/fixtures/producer/5.fix -schema and directory are required attributes. also, one of config or all must be specified. +schema and directory are required attributes. also, one of config or all must +be specified. + +Lastly, the C parameter can be a Perl HashRef instead of a file name. +If this form is used your HashRef should conform to the structure rules defined +for the JSON representations. =cut @@ -483,18 +591,32 @@ sub dump { } } + if($params->{excludes} && !$params->{all}) { + return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param"); + } + my $schema = $params->{schema}; my $config; if ($params->{config}) { - #read config - my $config_file = $self->config_dir->file($params->{config}); - $config = $self->load_config_file($config_file); + $config = ref $params->{config} eq 'HASH' ? + $params->{config} : + do { + #read config + my $config_file = $self->config_dir->file($params->{config}); + $self->load_config_file($config_file); + }; } elsif ($params->{all}) { + my %excludes = map {$_=>1} @{$params->{excludes}||[]}; $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, - sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] + sets => [ + map { + { class => $_, quantity => 'all' }; + } grep { + !$excludes{$_} + } $schema->sources], }; } else { DBIx::Class::Exception->throw('must pass config or set all'); @@ -507,7 +629,7 @@ sub dump { } $self->msg("generating fixtures"); - my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); + my $tmp_output_dir = dir(tmpdir()); if (-e $tmp_output_dir) { $self->msg("- clearing existing $tmp_output_dir"); @@ -521,6 +643,11 @@ sub dump { ->openw ->print($VERSION); + # write our current config set + $tmp_output_dir->file('_config_set') + ->openw + ->print( Dumper $config ); + $config->{rules} ||= {}; my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; @@ -660,6 +787,46 @@ sub dump_rs { sub dump_object { my ($self, $object, $params) = @_; my $set = $params->{set}; + + my $v = Data::Visitor::Callback->new( + plain_value => sub { + my ($visitor, $data) = @_; + my $subs = { + ENV => sub { + my ( $self, $v ) = @_; + if (! defined($ENV{$v})) { + return ""; + } else { + return $ENV{ $v }; + } + }, + ATTR => sub { + my ($self, $v) = @_; + if(my $attr = $self->config_attrs->{$v}) { + return $attr; + } else { + return ""; + } + }, + catfile => sub { + my ($self, @args) = @_; + catfile(@args); + }, + catdir => sub { + my ($self, @args) = @_; + catdir(@args); + }, + }; + + my $subsre = join( '|', keys %$subs ); + $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; + + return $_; + } + ); + + $v->visit( $set ); + die 'no dir passed to dump_object' unless $params->{set_dir}; die 'no object passed to dump_object' unless $object; @@ -684,12 +851,26 @@ sub dump_object { join('-', map { s|[/\\]|_|g; $_; } @pk_vals) . '.fix' ); - # write file unless ($exists) { $self->msg('-- dumping ' . $file->stringify, 2); my %ds = $object->get_columns; + if($set->{external}) { + foreach my $field (keys %{$set->{external}}) { + my $key = $ds{$field}; + my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/); + my $args = $set->{external}->{$field}->{args}; + + $class = "DBIx::Class::Fixtures::External::$class" unless $plus; + eval "use $class"; + + $ds{external}->{$field} = + encode_base64( $class + ->backup($key => $args)); + } + } + # mess with dates if specified if ($set->{datetime_relative}) { my $formatter= $object->result_source->schema->storage->datetime_parser; @@ -706,7 +887,13 @@ sub dump_object { next unless $value && $col_info->{_inflate_info} - && uc($col_info->{data_type}) eq 'DATETIME'; + && ( + (uc($col_info->{data_type}) eq 'DATETIME') + or (uc($col_info->{data_type}) eq 'DATE') + or (uc($col_info->{data_type}) eq 'TIME') + or (uc($col_info->{data_type}) eq 'TIMESTAMP') + or (uc($col_info->{data_type}) eq 'INTERVAL') + ); $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); } @@ -903,6 +1090,79 @@ sub _read_sql { return \@data; } +=head2 dump_config_sets + +Works just like L but instead of specifying a single json config set +located in L we dump each set named in the C parameter. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + configs => [qw/one.json other.json/], + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_config_sets { + my ($self, $params) = @_; + my $available_config_sets = delete $params->{configs}; + my $directory_template = delete $params->{directory_template} || + DBIx::Class::Exception->throw("'directory_template is required parameter"); + + for my $set (@$available_config_sets) { + my $localparams = $params; + $localparams->{directory} = $directory_template->($self, $localparams, $set); + $localparams->{config} = $set; + $self->dump($localparams); + $self->dumped_objects({}); ## Clear dumped for next go, if there is one! + } +} + +=head2 dump_all_config_sets + + my %local_params = %$params; + my $local_self = bless { %$self }, ref($self); + $local_params{directory} = $directory_template->($self, \%local_params, $set); + $local_params{config} = $set; + $self->dump(\%local_params); + + +Works just like L but instead of specifying a single json config set +located in L we dump each set in turn to the specified directory. + +The parameters are the same as for L except instead of a C +parameter we have a C which is a coderef expected to return +a scalar that is a root directory where we will do the actual dumping. This +coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For +example: + + $fixture->dump_all_config_sets({ + schema => $schema, + directory_template => sub { + my ($fixture, $params, $set) = @_; + return File::Spec->catdir('var', 'fixtures', $params->{schema}->version, $set); + }, + }); + +=cut + +sub dump_all_config_sets { + my ($self, $params) = @_; + $self->dump_config_sets({ + %$params, + configs=>[$self->available_config_sets], + }); +} + =head2 populate =over 4 @@ -932,6 +1192,11 @@ sub _read_sql { # optional, set to 1 to run ddl but not populate no_populate => 0, + # optional, set to 1 to run each fixture through ->create rather than have + # each $rs populated using $rs->populate. Useful if you have overridden new() logic + # that effects the value of column(s). + use_create => 0, + # Dont try to clean the database, just populate over whats there. Requires # schema option. Use this if you want to handle removing old data yourself # no_deploy => 1 @@ -1006,8 +1271,57 @@ sub populate { return 1 if $params->{no_populate}; $self->msg("\nimporting fixtures"); - my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); + my $tmp_fixture_dir = dir(tmpdir()); my $version_file = file($fixture_dir, '_dumper_version'); + my $config_set_path = file($fixture_dir, '_config_set'); + my $config_set = -e $config_set_path ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : ''; + + my $v = Data::Visitor::Callback->new( + plain_value => sub { + my ($visitor, $data) = @_; + my $subs = { + ENV => sub { + my ( $self, $v ) = @_; + if (! defined($ENV{$v})) { + return ""; + } else { + return $ENV{ $v }; + } + }, + ATTR => sub { + my ($self, $v) = @_; + if(my $attr = $self->config_attrs->{$v}) { + return $attr; + } else { + return ""; + } + }, + catfile => sub { + my ($self, @args) = @_; + catfile(@args); + }, + catdir => sub { + my ($self, @args) = @_; + catdir(@args); + }, + }; + + my $subsre = join( '|', keys %$subs ); + $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg; + + return $_; + } + ); + + $v->visit( $config_set ); + + + my %sets_by_src; + if($config_set) { + %sets_by_src = map { delete($_->{class}) => $_ } + @{$config_set->{sets}} + } + # DBIx::Class::Exception->throw('no version file found'); # unless -e $version_file; @@ -1059,14 +1373,50 @@ sub populate { my $HASH1; eval($contents); $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; - push(@rows, $HASH1); + if(my $external = delete $HASH1->{external}) { + my @fields = keys %{$sets_by_src{$source}->{external}}; + foreach my $field(@fields) { + my $key = $HASH1->{$field}; + my $content = decode_base64 ($external->{$field}); + my $args = $sets_by_src{$source}->{external}->{$field}->{args}; + my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/); + $class = "DBIx::Class::Fixtures::External::$class" unless $plus; + eval "use $class"; + $class->restore($key, $content, $args); + } + } + if ( $params->{use_create} ) { + $rs->create( $HASH1 ); + } else { + push(@rows, $HASH1); + } } $rs->populate(\@rows) if scalar(@rows); + + ## Now we need to do some db specific cleanup + ## this probably belongs in a more isolated space. Right now this is + ## to just handle postgresql SERIAL types that use Sequences + + my $table = $rs->result_source->name; + for my $column(my @columns = $rs->result_source->columns) { + my $info = $rs->result_source->column_info($column); + if(my $sequence = $info->{sequence}) { + $self->msg("- updating sequence $sequence"); + $rs->result_source->storage->dbh_do(sub { + my ($storage, $dbh, @cols) = @_; + $self->msg(my $sql = "SELECT setval('${sequence}', (SELECT max($column) FROM ${table}));"); + my $sth = $dbh->prepare($sql); + my $rv = $sth->execute or die $sth->errstr; + $self->msg("- $sql"); + }); + } + } + } }); }); $self->do_post_ddl( { - schema=>$schema, + schema=>$schema, post_ddl=>$params->{post_ddl} } ) if $params->{post_ddl};