X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FFixtures.pm;h=d429ee667d6a9f8d2def2e04f58a6d3adb05dd15;hb=d01826ae39bea00c05d21606e5b6784a5814ee58;hp=f9af4e476c84bba54339ced0c94fa999d0c9d422;hpb=e5963c1b96b4fc0b19093fcd0bceec1a73cf8d1d;p=dbsrgits%2FDBIx-Class-Fixtures.git diff --git a/lib/DBIx/Class/Fixtures.pm b/lib/DBIx/Class/Fixtures.pm index f9af4e4..d429ee6 100644 --- a/lib/DBIx/Class/Fixtures.pm +++ b/lib/DBIx/Class/Fixtures.pm @@ -4,26 +4,37 @@ use strict; use warnings; use DBIx::Class::Exception; -use Class::Accessor; +use Class::Accessor::Grouped; use Path::Class qw(dir file); -use FindBin; -use JSON::Syck qw(LoadFile); +use File::Slurp; +use Config::Any::JSON; +use Data::Dump::Streamer; +use Data::Visitor::Callback; +use File::Path; +use File::Copy::Recursive qw/dircopy/; +use File::Copy qw/move/; +use Hash::Merge qw( merge ); use Data::Dumper; +use Class::C3::Componentised; -use base qw(Class::Accessor); +use base qw(Class::Accessor::Grouped); -__PACKAGE__->mk_accessors(qw(config_dir)); +our $namespace_counter = 0; + +__PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir _inherited_attributes debug schema_class/); =head1 VERSION -Version 1.000 +Version 1.001000 =cut -our $VERSION = '1.000'; +our $VERSION = '1.001001'; =head1 NAME +DBIx::Class::Fixtures + =head1 SYNOPSIS use DBIx::Class::Fixtures; @@ -41,17 +52,303 @@ our $VERSION = '1.000'; $fixtures->populate({ directory => '/home/me/app/fixtures', ddl => '/home/me/app/sql/ddl.sql', - connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'] + connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], + post_ddl => '/home/me/app/sql/post_ddl.sql', }); =head1 DESCRIPTION -=head1 AUTHOR +Dump fixtures from source database to filesystem then import to another database (with same schema) +at any time. Use as a constant dataset for running tests against or for populating development databases +when impractical to use production clones. Describe fixture set using relations and conditions based +on your DBIx::Class schema. -=head1 CONTRIBUTORS +=head1 DEFINE YOUR FIXTURE SET -=cut +Fixture sets are currently defined in .json files which must reside in your config_dir +(e.g. /home/me/app/fixture_configs/a_fixture_set.json). They describe which data to pull and dump +from the source database. + +For example: + + { + sets: [{ + class: 'Artist', + ids: ['1', '3'] + }, { + class: 'Producer', + ids: ['5'], + fetch: [{ + rel: 'artists', + quantity: '2' + }] + }] + } + +This will fetch artists with primary keys 1 and 3, the producer with primary key 5 and two of producer 5's +artists where 'artists' is a has_many DBIx::Class rel from Producer to Artist. + +The top level attributes are as follows: + +=head2 sets + +Sets must be an array of hashes, as in the example given above. Each set defines a set of objects to be +included in the fixtures. For details on valid set attributes see L below. + +=head2 rules + +Rules place general conditions on classes. For example if whenever an artist was dumped you also wanted all +of their cds dumped too, then you could use a rule to specify this. For example: + + { + sets: [{ + class: 'Artist', + ids: ['1', '3'] + }, { + class: 'Producer', + ids: ['5'], + fetch: [{ + rel: 'artists', + quantity: '2' + }] + }], + rules: { + Artist: { + fetch: [{ + rel: 'cds', + quantity: 'all' + }] + } + } + } + +In this case all the cds of artists 1, 3 and all producer 5's artists will be dumped as well. Note that 'cds' is a +has_many DBIx::Class relation from Artist to CD. This is eqivalent to: + + { + sets: [{ + class: 'Artist', + ids: ['1', '3'], + fetch: [{ + rel: 'cds', + quantity: 'all' + }] + }, { + class: 'Producer', + ids: ['5'], + fetch: [{ + rel: 'artists', + quantity: '2', + fetch: [{ + rel: 'cds', + quantity: 'all' + }] + }] + }] + } + +rules must be a hash keyed by class name. + +L + +=head2 includes + +To prevent repetition between configs you can include other configs. For example: + + { + sets: [{ + class: 'Producer', + ids: ['5'] + }], + includes: [{ + file: 'base.json' + }] + } + +Includes must be an arrayref of hashrefs where the hashrefs have key 'file' which is the name of another config +file in the same directory. The original config is merged with its includes using Hash::Merge. + +=head2 datetime_relative + +Only available for MySQL and PostgreSQL at the moment, must be a value that DateTime::Format::* +can parse. For example: + + { + sets: [{ + class: 'RecentItems', + ids: ['9'] + }], + datetime_relative : "2007-10-30 00:00:00" + } + +This will work when dumping from a MySQL database and will cause any datetime fields (where datatype => 'datetime' +in the column def of the schema class) to be dumped as a DateTime::Duration object relative to the date specified in +the datetime_relative value. For example if the RecentItem object had a date field set to 2007-10-25, then when the +fixture is imported the field will be set to 5 days in the past relative to the current time. + +=head2 might_have + +Specifies whether to automatically dump might_have relationships. Should be a hash with one attribute - fetch. Set fetch to 1 or 0. + + { + might_have: { + fetch: 1 + }, + sets: [{ + class: 'Artist', + ids: ['1', '3'] + }, { + class: 'Producer', + ids: ['5'] + }] + } + +Note: belongs_to rels are automatically dumped whether you like it or not, this is to avoid FKs to nowhere when importing. +General rules on has_many rels are not accepted at this top level, but you can turn them on for individual +sets - see L. + +=head1 SET ATTRIBUTES + +=head2 class + +Required attribute. Specifies the DBIx::Class object class you wish to dump. + +=head2 ids + +Array of primary key ids to fetch, basically causing an $rs->find($_) for each. If the id is not in the source db then it +just won't get dumped, no warnings or death. + +=head2 quantity + +Must be either an integer or the string 'all'. Specifying an integer will effectively set the 'rows' attribute on the resultset clause, +specifying 'all' will cause the rows attribute to be left off and for all matching rows to be dumped. There's no randomising +here, it's just the first x rows. + +=head2 cond + +A hash specifying the conditions dumped objects must match. Essentially this is a JSON representation of a DBIx::Class search clause. For example: + + { + sets: [{ + class: 'Artist', + quantiy: 'all', + cond: { name: 'Dave' } + }] + } + +This will dump all artists whose name is 'dave'. Essentially $artist_rs->search({ name => 'Dave' })->all. + +Sometimes in a search clause it's useful to use scalar refs to do things like: + +$artist_rs->search({ no1_singles => \'> no1_albums' }) + +This could be specified in the cond hash like so: + + { + sets: [{ + class: 'Artist', + quantiy: 'all', + cond: { no1_singles: '\> no1_albums' } + }] + } + +So if the value starts with a backslash the value is made a scalar ref before being passed to search. + +=head2 join + +An array of relationships to be used in the cond clause. + + { + sets: [{ + class: 'Artist', + quantiy: 'all', + cond: { 'cds.position': { '>': 4 } }, + join: ['cds'] + }] + } + +Fetch all artists who have cds with position greater than 4. + +=head2 fetch + +Must be an array of hashes. Specifies which rels to also dump. For example: + + { + sets: [{ + class: 'Artist', + ids: ['1', '3'], + fetch: [{ + rel: 'cds', + quantity: '3', + cond: { position: '2' } + }] + }] + } + +Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2. + +Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and 'join'. rel is the name of the DBIx::Class +rel to follow, the rest are the same as in the set attributes. quantity is necessary for has_many relationships, +but not if using for belongs_to or might_have relationships. +=head2 has_many + +Specifies whether to fetch has_many rels for this set. Must be a hash containing keys fetch and quantity. + +Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an integer. + +Be careful here, dumping has_many rels can lead to a lot of data being dumped. + +=head2 might_have + +As with has_many but for might_have relationships. Quantity doesn't do anything in this case. + +This value will be inherited by all fetches in this set. This is not true for the has_many attribute. + +=head1 RULE ATTRIBUTES + +=head2 cond + +Same as with L + +=head2 fetch + +Same as with L + +=head2 join + +Same as with L + +=head2 has_many + +Same as with L + +=head2 might_have + +Same as with L + +=head1 METHODS + +=head2 new + +=over 4 + +=item Arguments: \%$attrs + +=item Return Value: $fixture_object + +=back + +Returns a new DBIx::Class::Fixture object. %attrs can have the following parameters: + +- config_dir: required. must contain a valid path to the directory in which your .json configs reside. +- debug: determines whether to be verbose +- ignore_sql_errors: ignore errors on import of DDL etc + + + my $fixtures = DBIx::Class::Fixtures->new({ config_dir => '/home/me/app/fixture_configs' }); + +=cut sub new { my $class = shift; @@ -71,7 +368,10 @@ sub new { } my $self = { - config_dir => $config_dir + config_dir => $config_dir, + _inherited_attributes => [qw/datetime_relative might_have rules/], + debug => $params->{debug} || 0, + ignore_sql_errors => $params->{ignore_sql_errors} }; bless $self, $class; @@ -79,4 +379,557 @@ sub new { return $self; } +=head2 dump + +=over 4 + +=item Arguments: \%$attrs + +=item Return Value: 1 + +=back + + $fixtures->dump({ + config => 'set_config.json', # config file to use. must be in the config directory specified in the constructor + schema => $source_dbic_schema, + directory => '/home/me/app/fixtures' # output directory + }); + + or + + $fixtures->dump({ + all => 1, # just dump everything that's in the schema + schema => $source_dbic_schema, + directory => '/home/me/app/fixtures' # output directory + }); + +In this case objects will be dumped to subdirectories in the specified directory. For example: + + /home/me/app/fixtures/artist/1.fix + /home/me/app/fixtures/artist/3.fix + /home/me/app/fixtures/producer/5.fix + +schema and directory are required attributes. also, one of config or all must be specified. + +=cut + +sub dump { + my $self = shift; + + my ($params) = @_; + unless (ref $params eq 'HASH') { + return DBIx::Class::Exception->throw('first arg to dump must be hash ref'); + } + + foreach my $param (qw/schema directory/) { + unless ($params->{$param}) { + return DBIx::Class::Exception->throw($param . ' param not specified'); + } + } + + my $schema = $params->{schema}; + my $config_file; + my $config; + if ($params->{config}) { + #read config + $config_file = file($self->config_dir, $params->{config}); + unless (-e $config_file) { + return DBIx::Class::Exception->throw('config does not exist at ' . $config_file); + } + $config = Config::Any::JSON->load($config_file); + + #process includes + if ($config->{includes}) { + $self->msg($config->{includes}); + unless (ref $config->{includes} eq 'ARRAY') { + return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs'); + } + foreach my $include_config (@{$config->{includes}}) { + unless ((ref $include_config eq 'HASH') && $include_config->{file}) { + return DBIx::Class::Exception->throw('includes params of config must be an array ref of hashrefs'); + } + + my $include_file = file($self->config_dir, $include_config->{file}); + unless (-e $include_file) { + return DBIx::Class::Exception->throw('config does not exist at ' . $include_file); + } + my $include = Config::Any::JSON->load($include_file); + $self->msg($include); + $config = merge( $config, $include ); + } + delete $config->{includes}; + } + + # validate config + unless ($config && $config->{sets} && ref $config->{sets} eq 'ARRAY' && scalar(@{$config->{sets}})) { + return DBIx::Class::Exception->throw('config has no sets'); + } + + $config->{might_have} = { fetch => 0 } unless (exists $config->{might_have}); + $config->{has_many} = { fetch => 0 } unless (exists $config->{has_many}); + $config->{belongs_to} = { fetch => 1 } unless (exists $config->{belongs_to}); + } elsif ($params->{all}) { + $config = { might_have => { fetch => 0 }, has_many => { fetch => 0 }, belongs_to => { fetch => 0 }, sets => [map {{ class => $_, quantity => 'all' }} $schema->sources] }; + print Dumper($config); + } else { + return DBIx::Class::Exception->throw('must pass config or set all'); + } + + my $output_dir = dir($params->{directory}); + unless (-e $output_dir) { + $output_dir->mkpath || + return DBIx::Class::Exception->throw('output directory does not exist at ' . $output_dir); + } + + $self->msg("generating fixtures"); + my $tmp_output_dir = dir($output_dir, '-~dump~-' . $<); + + if (-e $tmp_output_dir) { + $self->msg("- clearing existing $tmp_output_dir"); + $tmp_output_dir->rmtree; + } + $self->msg("- creating $tmp_output_dir"); + $tmp_output_dir->mkpath; + + # write version file (for the potential benefit of populate) + my $version_file = file($tmp_output_dir, '_dumper_version'); + write_file($version_file->stringify, $VERSION); + + $config->{rules} ||= {}; + my @sources = sort { $a->{class} cmp $b->{class} } @{delete $config->{sets}}; + my %options = ( is_root => 1 ); + $self->{queue} = []; + foreach my $source (@sources) { + # apply rule to set if specified + my $rule = $config->{rules}->{$source->{class}}; + $source = merge( $source, $rule ) if ($rule); + + # fetch objects + my $rs = $schema->resultset($source->{class}); + + if ($source->{cond} and ref $source->{cond} eq 'HASH') { + # if value starts with / assume it's meant to be passed as a scalar ref to dbic + # ideally this would substitute deeply + $source->{cond} = { map { $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_} : $source->{cond}->{$_} } keys %{$source->{cond}} }; + } + + $rs = $rs->search($source->{cond}, { join => $source->{join} }) if ($source->{cond}); + $self->msg("- dumping $source->{class}"); + my @objects; + my %source_options = ( set => { %{$config}, %{$source} } ); + if ($source->{quantity}) { + $rs = $rs->search({}, { order_by => $source->{order_by} }) if ($source->{order_by}); + if ($source->{quantity} eq 'all') { + push (@objects, $rs->all); + } elsif ($source->{quantity} =~ /^\d+$/) { + push (@objects, $rs->search({}, { rows => $source->{quantity} })); + } else { + DBIx::Class::Exception->throw('invalid value for quantity - ' . $source->{quantity}); + } + } + if ($source->{ids}) { + my @ids = @{$source->{ids}}; + my @id_objects = grep { $_ } map { $rs->find($_) } @ids; + push (@objects, @id_objects); + } + unless ($source->{quantity} || $source->{ids}) { + DBIx::Class::Exception->throw('must specify either quantity or ids'); + } + + # dump objects + foreach my $object (@objects) { + $source_options{set_dir} = $tmp_output_dir; + $self->dump_object($object, { %options, %source_options } ); + next; + } + } + + while (my $entry = shift @{$self->{queue}}) { + $self->dump_object(@$entry); + } + + # clear existing output dir + foreach my $child ($output_dir->children) { + if ($child->is_dir) { + next if ($child eq $tmp_output_dir); + if (grep { $_ =~ /\.fix/ } $child->children) { + $child->rmtree; + } + } elsif ($child =~ /_dumper_version$/) { + $child->remove; + } + } + + $self->msg("- moving temp dir to $output_dir"); + move($_, dir($output_dir, $_->relative($_->parent)->stringify)) for $tmp_output_dir->children; + if (-e $output_dir) { + $self->msg("- clearing tmp dir $tmp_output_dir"); + # delete existing fixture set + $tmp_output_dir->remove; + } + + $self->msg("done"); + + return 1; +} + +sub dump_object { + my ($self, $object, $params, $rr_info) = @_; + my $set = $params->{set}; + die 'no dir passed to dump_object' unless $params->{set_dir}; + die 'no object passed to dump_object' unless $object; + + my @inherited_attrs = @{$self->_inherited_attributes}; + + # write dir and gen filename + my $source_dir = dir($params->{set_dir}, lc($object->result_source->from)); + mkdir($source_dir->stringify, 0777); + + # strip dir separators from file name + my $file = file($source_dir, join('-', map { + ( my $a = $object->get_column($_) ) =~ s|[/\\]|_|g; $a; + } sort $object->primary_columns) . '.fix'); + + # write file + my $exists = (-e $file->stringify) ? 1 : 0; + unless ($exists) { + $self->msg('-- dumping ' . $file->stringify, 2); + my %ds = $object->get_columns; + + my $formatter= $object->result_source->schema->storage->datetime_parser; + # mess with dates if specified + if ($set->{datetime_relative}) { + unless ($@ || !$formatter) { + my $dt; + if ($set->{datetime_relative} eq 'today') { + $dt = DateTime->today; + } else { + $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@); + } + + while (my ($col, $value) = each %ds) { + my $col_info = $object->result_source->column_info($col); + + next unless $value + && $col_info->{_inflate_info} + && uc($col_info->{data_type}) eq 'DATETIME'; + + $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt); + } + } else { + warn "datetime_relative not supported for this db driver at the moment"; + } + } + + # do the actual dumping + my $serialized = Dump(\%ds)->Out(); + write_file($file->stringify, $serialized); + my $mode = 0777; chmod $mode, $file->stringify; + } + + # don't bother looking at rels unless we are actually planning to dump at least one type + return unless ($set->{might_have}->{fetch} || $set->{belongs_to}->{fetch} || $set->{has_many}->{fetch} || $set->{fetch}); + + # dump rels of object + my $s = $object->result_source; + unless ($exists) { + foreach my $name (sort $s->relationships) { + my $info = $s->relationship_info($name); + my $r_source = $s->related_source($name); + # if belongs_to or might_have with might_have param set or has_many with has_many param set then + if (($info->{attrs}{accessor} eq 'single' && (!$info->{attrs}{join_type} || ($set->{might_have} && $set->{might_have}->{fetch}))) || $info->{attrs}{accessor} eq 'filter' || ($info->{attrs}{accessor} eq 'multi' && ($set->{has_many} && $set->{has_many}->{fetch}))) { + my $related_rs = $object->related_resultset($name); + my $rule = $set->{rules}->{$related_rs->result_source->source_name}; + # these parts of the rule only apply to has_many rels + if ($rule && $info->{attrs}{accessor} eq 'multi') { + $related_rs = $related_rs->search($rule->{cond}, { join => $rule->{join} }) if ($rule->{cond}); + $related_rs = $related_rs->search({}, { rows => $rule->{quantity} }) if ($rule->{quantity} && $rule->{quantity} ne 'all'); + $related_rs = $related_rs->search({}, { order_by => $rule->{order_by} }) if ($rule->{order_by}); + } + if ($set->{has_many}->{quantity} && $set->{has_many}->{quantity} =~ /^\d+$/) { + $related_rs = $related_rs->search({}, { rows => $set->{has_many}->{quantity} }); + } + my %c_params = %{$params}; + # inherit date param + my %mock_set = map { $_ => $set->{$_} } grep { $set->{$_} } @inherited_attrs; + $c_params{set} = \%mock_set; + # use Data::Dumper; print ' -- ' . Dumper($c_params{set}, $rule->{fetch}) if ($rule && $rule->{fetch}); + $c_params{set} = merge( $c_params{set}, $rule) if ($rule && $rule->{fetch}); + # use Data::Dumper; print ' -- ' . Dumper(\%c_params) if ($rule && $rule->{fetch}); + $self->dump_object($_, \%c_params) foreach $related_rs->all; + } + } + } + + return unless $set && $set->{fetch}; + foreach my $fetch (@{$set->{fetch}}) { + # inherit date param + $fetch->{$_} = $set->{$_} foreach grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs; + my $related_rs = $object->related_resultset($fetch->{rel}); + my $rule = $set->{rules}->{$related_rs->result_source->source_name}; + if ($rule) { + my $info = $object->result_source->relationship_info($fetch->{rel}); + if ($info->{attrs}{accessor} eq 'multi') { + $fetch = merge( $fetch, $rule ); + } elsif ($rule->{fetch}) { + $fetch = merge( $fetch, { fetch => $rule->{fetch} } ); + } + } + die "relationship " . $fetch->{rel} . " does not exist for " . $s->source_name unless ($related_rs); + if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') { + # if value starts with / assume it's meant to be passed as a scalar ref to dbic + # ideally this would substitute deeply + $fetch->{cond} = { map { $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_} : $fetch->{cond}->{$_} } keys %{$fetch->{cond}} }; + } + $related_rs = $related_rs->search($fetch->{cond}, { join => $fetch->{join} }) if ($fetch->{cond}); + $related_rs = $related_rs->search({}, { rows => $fetch->{quantity} }) if ($fetch->{quantity} && $fetch->{quantity} ne 'all'); + $related_rs = $related_rs->search({}, { order_by => $fetch->{order_by} }) if ($fetch->{order_by}); + $self->dump_object($_, { %{$params}, set => $fetch }) foreach $related_rs->all; + } +} + +sub _generate_schema { + my $self = shift; + my $params = shift || {}; + require DBI; + $self->msg("\ncreating schema"); + # die 'must pass version param to generate_schema_from_ddl' unless $params->{version}; + + my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema"; + eval "require $schema_class"; + die $@ if $@; + + my $pre_schema; + my $connection_details = $params->{connection_details}; + $namespace_counter++; + my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter; + Class::C3::Componentised->inject_base( $namespace => $schema_class ); + $pre_schema = $namespace->connect(@{$connection_details}); + unless( $pre_schema ) { + return DBIx::Class::Exception->throw('connection details not valid'); + } + my @tables = map { $pre_schema->source($_)->from } $pre_schema->sources; + $self->msg("Tables to drop: [". join(', ', sort @tables) . "]"); + my $dbh = $pre_schema->storage->dbh; + + # clear existing db + $self->msg("- clearing DB of existing tables"); + eval { $dbh->do('SET foreign_key_checks=0') }; + foreach my $table (@tables) { + eval { $dbh->do('drop table ' . $table . ($params->{cascade} ? ' cascade' : '') ) }; + } + + # import new ddl file to db + my $ddl_file = $params->{ddl}; + $self->msg("- deploying schema using $ddl_file"); + my $data = _read_sql($ddl_file); + foreach (@$data) { + eval { $dbh->do($_) or warn "SQL was:\n $_"}; + if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } + } + $self->msg("- finished importing DDL into DB"); + + # load schema object from our new DB + $namespace_counter++; + my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_" . $namespace_counter; + Class::C3::Componentised->inject_base( $namespace2 => $schema_class ); + my $schema = $namespace2->connect(@{$connection_details}); + return $schema; +} + +sub _read_sql { + my $ddl_file = shift; + my $fh; + open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)"); + my @data = split(/\n/, join('', <$fh>)); + @data = grep(!/^--/, @data); + @data = split(/;/, join('', @data)); + close($fh); + @data = grep { $_ && $_ !~ /^-- / } @data; + return \@data; +} + +=head2 populate + +=over 4 + +=item Arguments: \%$attrs + +=item Return Value: 1 + +=back + + $fixtures->populate({ + directory => '/home/me/app/fixtures', # directory to look for fixtures in, as specified to dump + ddl => '/home/me/app/sql/ddl.sql', # DDL to deploy + connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'], # database to clear, deploy and then populate + post_ddl => '/home/me/app/sql/post_ddl.sql', # DDL to deploy after populating records, ie. FK constraints + cascade => 1, # use CASCADE option when dropping tables + }); + +In this case the database app_dev will be cleared of all tables, then the specified DDL deployed to it, +then finally all fixtures found in /home/me/app/fixtures will be added to it. populate will generate +its own DBIx::Class schema from the DDL rather than being passed one to use. This is better as +custom insert methods are avoided which can to get in the way. In some cases you might not +have a DDL, and so this method will eventually allow a $schema object to be passed instead. + +If needed, you can specify a post_ddl attribute which is a DDL to be applied after all the fixtures +have been added to the database. A good use of this option would be to add foreign key constraints +since databases like Postgresql cannot disable foreign key checks. + +If your tables have foreign key constraints you may want to use the cascade attribute which will +make the drop table functionality cascade, ie 'DROP TABLE $table CASCADE'. + +directory, dll and connection_details are all required attributes. + +=cut + +sub populate { + my $self = shift; + my ($params) = @_; + unless (ref $params eq 'HASH') { + return DBIx::Class::Exception->throw('first arg to populate must be hash ref'); + } + + foreach my $param (qw/directory/) { + unless ($params->{$param}) { + return DBIx::Class::Exception->throw($param . ' param not specified'); + } + } + my $fixture_dir = dir(delete $params->{directory}); + unless (-e $fixture_dir) { + return DBIx::Class::Exception->throw('fixture directory does not exist at ' . $fixture_dir); + } + + my $ddl_file; + my $dbh; + if ($params->{ddl} && $params->{connection_details}) { + $ddl_file = file(delete $params->{ddl}); + unless (-e $ddl_file) { + return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file); + } + unless (ref $params->{connection_details} eq 'ARRAY') { + return DBIx::Class::Exception->throw('connection details must be an arrayref'); + } + } elsif ($params->{schema}) { + return DBIx::Class::Exception->throw('passing a schema is not supported at the moment'); + } else { + return DBIx::Class::Exception->throw('you must set the ddl and connection_details params'); + } + + my $schema = $self->_generate_schema({ ddl => $ddl_file, connection_details => delete $params->{connection_details}, %{$params} }); + $self->msg("\nimporting fixtures"); + my $tmp_fixture_dir = dir($fixture_dir, "-~populate~-" . $<); + + my $version_file = file($fixture_dir, '_dumper_version'); + unless (-e $version_file) { +# return DBIx::Class::Exception->throw('no version file found'); + } + + if (-e $tmp_fixture_dir) { + $self->msg("- deleting existing temp directory $tmp_fixture_dir"); + $tmp_fixture_dir->rmtree; + } + $self->msg("- creating temp dir"); + dircopy(dir($fixture_dir, $schema->source($_)->from), dir($tmp_fixture_dir, $schema->source($_)->from)) for grep { -e dir($fixture_dir, $schema->source($_)->from) } $schema->sources; + + my $fixup_visitor; + my $formatter= $schema->storage->datetime_parser; + unless ($@ || !$formatter) { + my %callbacks; + if ($params->{datetime_relative_to}) { + $callbacks{'DateTime::Duration'} = sub { + $params->{datetime_relative_to}->clone->add_duration($_); + }; + } else { + $callbacks{'DateTime::Duration'} = sub { + $formatter->format_datetime(DateTime->today->add_duration($_)) + }; + } + $callbacks{object} ||= "visit_ref"; + $fixup_visitor = new Data::Visitor::Callback(%callbacks); + } + + my $db = $schema->storage->dbh->{Driver}->{Name}; + my $dbi_class = "DBIx::Class::Fixtures::DBI::$db"; + + eval "require $dbi_class"; + if ($@) { + $dbi_class = "DBIx::Class::Fixtures::DBI"; + eval "require $dbi_class"; + die $@ if $@; + } + + $dbi_class->do_insert($schema, sub { + foreach my $source (sort $schema->sources) { + $self->msg("- adding " . $source); + my $rs = $schema->resultset($source); + my $source_dir = dir($tmp_fixture_dir, lc($rs->result_source->from)); + next unless (-e $source_dir); + my @rows; + while (my $file = $source_dir->next) { + next unless ($file =~ /\.fix$/); + next if $file->is_dir; + my $contents = $file->slurp; + my $HASH1; + eval($contents); + $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor; + push(@rows, $HASH1); + } + $rs->populate(\@rows) if (scalar(@rows)); + } + }); + + $self->do_post_ddl({schema=>$schema, post_ddl=>$params->{post_ddl}}) if $params->{post_ddl}; + + $self->msg("- fixtures imported"); + $self->msg("- cleaning up"); + $tmp_fixture_dir->rmtree; + eval { $schema->storage->dbh->do('SET foreign_key_checks=1') }; + + return 1; +} + +sub do_post_ddl { + my ($self, $params) = @_; + + my $schema = $params->{schema}; + my $data = _read_sql($params->{post_ddl}); + foreach (@$data) { + eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"}; + if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; } + } + $self->msg("- finished importing post-populate DDL into DB"); +} + +sub msg { + my $self = shift; + my $subject = shift || return; + my $level = shift || 1; + return unless $self->debug >= $level; + if (ref $subject) { + print Dumper($subject); + } else { + print $subject . "\n"; + } +} + +=head1 AUTHOR + + Luke Saunders + + Initial development sponsored by and (c) Takkle, Inc. 2007 + +=head1 CONTRIBUTORS + + Ash Berlin + Matt S. Trout + Drew Taylor + +=head1 LICENSE + + This library is free software under the same license as perl itself + +=cut + 1;