X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FSQL%2FTranslator%2FParser%2FDBIx%2FClass.pm;h=14812ac939da820a309317ba36ebce8e56e3494d;hb=f4dc39d649672ff4452cf827ca204a1e937bc8b7;hp=182206d4a365bc141715ac1e711d8379333adeb2;hpb=ebed3aafa2b8501b96b7bb4dd6de5733a65ddb90;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 182206d..14812ac 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -8,14 +8,18 @@ package SQL::Translator::Parser::DBIx::Class; use strict; use warnings; -use vars qw($DEBUG $VERSION @EXPORT_OK); +our ($DEBUG, $VERSION, @EXPORT_OK); $VERSION = '1.10'; $DEBUG = 0 unless defined $DEBUG; use Exporter; use SQL::Translator::Utils qw(debug normalize_name); -use Carp::Clan qw/^SQL::Translator|^DBIx::Class/; -use Scalar::Util (); +use DBIx::Class::Carp qw/^SQL::Translator|^DBIx::Class|^Try::Tiny/; +use DBIx::Class::_Util qw( dbic_internal_try dbic_internal_catch bag_eq ); +use Class::C3::Componentised; +use Scalar::Util 'blessed'; +use namespace::clean; + use base qw(Exporter); @EXPORT_OK = qw(parse); @@ -30,20 +34,44 @@ use base qw(Exporter); # We're working with DBIx::Class Schemas, not data streams. # ------------------------------------------------------------------- sub parse { - # this is a hack to prevent schema leaks due to a retarded SQLT implementation - # DO NOT REMOVE (until SQLT2 is out, the all of this will be rewritten anyway) - Scalar::Util::weaken ($_[1]) if ref ($_[1]); - my ($tr, $data) = @_; my $args = $tr->parser_args; - my $dbicschema = $args->{'DBIx::Class::Schema'} || $args->{"DBIx::Schema"} ||$data; - $dbicschema ||= $args->{'package'}; - my $limit_sources = $args->{'sources'}; - croak 'No DBIx::Class::Schema' unless ($dbicschema); + my $dbicschema = $data || $args->{dbic_schema}; + + for (qw(DBIx::Class::Schema DBIx::Schema package)) { + if (defined (my $s = delete $args->{$_} )) { + carp_unique("Supplying a schema via ... parser_args => { '$_' => \$schema } is deprecated. Please use parser_args => { dbic_schema => \$schema } instead"); + + # move it from the deprecated to the proper $args slot + unless ($dbicschema) { + $args->{dbic_schema} = $dbicschema = $s; + } + } + } + + DBIx::Class::Exception->throw('No DBIx::Class::Schema') unless ($dbicschema); + if (!ref $dbicschema) { - eval "use $dbicschema;"; - croak "Can't load $dbicschema ($@)" if($@); + dbic_internal_try { + Class::C3::Componentised->ensure_class_loaded($dbicschema) + } + dbic_internal_catch { + DBIx::Class::Exception->throw("Can't load $dbicschema: $_"); + } + } + + if ( + ref $args->{dbic_schema} + and + $args->{dbic_schema}->storage + ) { + # we have a storage-holding $schema instance in $args + # we need to dissociate it from that $storage + # otherwise SQLT insanity may ensue due to how some + # serializing producers treat $args (crazy crazy shit) + local $args->{dbic_schema}{storage}; + $args->{dbic_schema} = $args->{dbic_schema}->clone; } my $schema = $tr->schema; @@ -53,12 +81,12 @@ sub parse { unless ($schema->name); my @monikers = sort $dbicschema->sources; - if ($limit_sources) { + if (my $limit_sources = $args->{'sources'}) { my $ref = ref $limit_sources || ''; $dbicschema->throw_exception ("'sources' parameter must be an array or hash ref") unless( $ref eq 'ARRAY' || ref eq 'HASH' ); - # limit monikers to those specified in + # limit monikers to those specified in my $sources; if ($ref eq 'ARRAY') { $sources->{$_} = 1 for (@$limit_sources); @@ -98,6 +126,10 @@ sub parse { name => $table_name, type => 'TABLE', ); + + my $ci = $source->columns_info; + + # same order as add_columns foreach my $col ($source->columns) { # assuming column_info in dbic is the same as DBI (?) @@ -108,7 +140,7 @@ sub parse { is_auto_increment => 0, is_foreign_key => 0, is_nullable => 0, - %{$source->column_info($col)} + %{$ci->{$col} || {}} ); if ($colinfo{is_nullable}) { $colinfo{default} = '' unless exists $colinfo{default}; @@ -123,13 +155,11 @@ sub parse { my %unique_constraints = $source->unique_constraints; foreach my $uniq (sort keys %unique_constraints) { - if (!$source->_compare_relationship_keys($unique_constraints{$uniq}, \@primary)) { - $table->add_constraint( - type => 'unique', - name => $uniq, - fields => $unique_constraints{$uniq} - ); - } + $table->add_constraint( + type => 'unique', + name => $uniq, + fields => $unique_constraints{$uniq} + ) unless bag_eq( \@primary, $unique_constraints{$uniq} ); } my @rels = $source->relationships(); @@ -139,15 +169,24 @@ sub parse { # global add_fk_index set in parser_args my $add_fk_index = (exists $args->{add_fk_index} && ! $args->{add_fk_index}) ? 0 : 1; - foreach my $rel (sort @rels) - { + REL: + foreach my $rel (sort @rels) { my $rel_info = $source->relationship_info($rel); # Ignore any rel cond that isn't a straight hash + # + # FIXME - this can be done *WAY* better via the recolcond resolver + # but no time to think through the implications for deploy() at + # the moment. Grep for {identity_map_matches_condition} for ideas + # how to improve this, and the /^\w+\.(\w+)$/ crap below next unless ref $rel_info->{cond} eq 'HASH'; - my $relsource = $source->related_source($rel); + my $relsource = dbic_internal_try { $source->related_source($rel) }; + unless ($relsource) { + carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '$rel_info->{class}' is not registered with this schema\n"; + next; + }; # related sources might be excluded via a {sources} filter or might be views next unless exists $table_monikers{$relsource->source_name}; @@ -158,13 +197,18 @@ sub parse { # support quoting properly to be signaled about this $rel_table = $$rel_table if ref $rel_table eq 'SCALAR'; - my $reverse_rels = $source->reverse_relationship_info($rel); - my ($otherrelname, $otherrelationship) = each %{$reverse_rels}; - # Force the order of @cond to match the order of ->add_columns my $idx; my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns; - my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); + + for ( keys %{$rel_info->{cond}} ) { + unless (exists $other_columns_idx{$_}) { + carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '@{[ $relsource->source_name ]}' does not contain one of the specified columns: '$_'\n"; + next REL; + } + } + + my @cond = sort { $other_columns_idx{$a} <=> $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); # Get the key information, mapping off the foreign/self markers my @refkeys = map {/^\w+\.(\w+)$/} @cond; @@ -186,10 +230,12 @@ sub parse { # this is supposed to indicate a has_one/might_have... # where's the introspection!!?? :) else { - $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary); + $fk_constraint = ! bag_eq( \@keys, \@primary ); } + my $cascade; + CASCADE_TYPE: for my $c (qw/delete update/) { if (exists $rel_info->{attrs}{"on_$c"}) { if ($fk_constraint) { @@ -200,8 +246,16 @@ sub parse { . "If you are sure that SQLT must generate a constraint for this relationship, add 'is_foreign_key_constraint => 1' to the attributes.\n"; } } - elsif (defined $otherrelationship and $otherrelationship->{attrs}{$c eq 'update' ? 'cascade_copy' : 'cascade_delete'}) { - $cascade->{$c} = 'CASCADE'; + else { + for my $revrelinfo (values %{ $source->reverse_relationship_info($rel) } ) { + ( ( $cascade->{$c} = 'CASCADE' ), next CASCADE_TYPE ) if ( + $revrelinfo->{attrs} + ->{ ($c eq 'update') + ? 'cascade_copy' + : 'cascade_delete' + } + ); + } } } @@ -209,7 +263,7 @@ sub parse { # Constraints are added only if applicable next unless $fk_constraint; - # Make sure we dont create the same foreign key constraint twice + # Make sure we don't create the same foreign key constraint twice my $key_test = join("\x00", sort @keys); next if $created_FK_rels{$rel_table}->{$key_test}; @@ -224,9 +278,12 @@ sub parse { $tables{$table_name}{foreign_table_deps}{$rel_table}++; } + # trim schema before generating constraint/index names + (my $table_abbrev = $table_name) =~ s/ ^ [^\.]+ \. //x; + $table->add_constraint( type => 'foreign_key', - name => join('_', $table_name, 'fk', @keys), + name => join('_', $table_abbrev, 'fk', @keys), fields => \@keys, reference_fields => \@refkeys, reference_table => $rel_table, @@ -247,8 +304,9 @@ sub parse { next if join("\x00", @keys) eq join("\x00", @primary); if ($add_fk_index_rel) { + (my $idx_name = $table_name) =~ s/ ^ [^\.]+ \. //x; my $index = $table->add_index( - name => join('_', $table_name, 'idx', @keys), + name => join('_', $table_abbrev, 'idx', @keys), fields => \@keys, type => 'NORMAL', ); @@ -263,7 +321,7 @@ sub parse { my $dependencies = { map { $_ => _resolve_deps ($_, \%tables) } (keys %tables) }; - + for my $table (sort { keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} } @@ -277,7 +335,7 @@ sub parse { # the hook might have already removed the table if ($schema->get_table($table) && $table =~ /^ \s* \( \s* SELECT \s+/ix) { - warn <<'EOW'; + carp <<'EOW'; Custom SQL through ->name(\'( SELECT ...') is DEPRECATED, for more details see "Arbitrary SQL through a custom ResultSource" in DBIx::Class::Manual::Cookbook @@ -292,17 +350,22 @@ EOW } my %views; - + my @views = map { $dbicschema->source($_) } keys %view_monikers; + + my $view_dependencies = { + map { + $_ => _resolve_deps( $dbicschema->source($_), \%view_monikers ) + } ( keys %view_monikers ) + }; + my @view_sources = - sort { - keys %{ $dependencies->{$a} || {} } - <=> - keys %{ $dependencies->{$b} || {} } - || - $a cmp $b - } - map { $dbicschema->source($_) } - keys %view_monikers; + sort { + keys %{ $view_dependencies->{ $a->source_name } || {} } <=> + keys %{ $view_dependencies->{ $b->source_name } || {} } + || $a->source_name cmp $b->source_name + } + map { $dbicschema->source($_) } + keys %view_monikers; foreach my $source (@view_sources) { @@ -342,35 +405,51 @@ EOW # Quick and dirty dependency graph calculator # sub _resolve_deps { - my ($table, $tables, $seen) = @_; - - my $ret = {}; - $seen ||= {}; - - # copy and bump all deps by one (so we can reconstruct the chain) - my %seen = map { $_ => $seen->{$_} + 1 } (keys %$seen); - $seen{$table} = 1; - - for my $dep (keys %{$tables->{$table}{foreign_table_deps}} ) { - - if ($seen->{$dep}) { - - # warn and remove the circular constraint so we don't get flooded with the same warning over and over - #carp sprintf ("Circular dependency detected, schema may not be deployable:\n%s\n", - # join (' -> ', (sort { $seen->{$b} <=> $seen->{$a} } (keys %$seen) ), $table, $dep ) - #); - #delete $tables->{$table}{foreign_table_deps}{$dep}; - - return {}; + my ( $question, $answers, $seen ) = @_; + my $ret = {}; + $seen ||= {}; + my @deps; + + # copy and bump all deps by one (so we can reconstruct the chain) + my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen ); + if ( blessed($question) + && $question->isa('DBIx::Class::ResultSource::View') ) + { + $seen{ $question->result_class } = 1; + @deps = keys %{ $question->{deploy_depends_on} }; + } + else { + $seen{$question} = 1; + @deps = keys %{ $answers->{$question}{foreign_table_deps} }; } - my $subdeps = _resolve_deps ($dep, $tables, \%seen); - $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps ); - - ++$ret->{$dep}; - } + for my $dep (@deps) { + if ( $seen->{$dep} ) { + return {}; + } + my $next_dep; - return $ret; + if ( blessed($question) + && $question->isa('DBIx::Class::ResultSource::View') ) + { + no warnings 'uninitialized'; + my ($next_dep_source_name) = + grep { + $question->schema->source($_)->result_class eq $dep + && !( $question->schema->source($_) + ->isa('DBIx::Class::ResultSource::Table') ) + } @{ [ $question->schema->sources ] }; + return {} unless $next_dep_source_name; + $next_dep = $question->schema->source($next_dep_source_name); + } + else { + $next_dep = $dep; + } + my $subdeps = _resolve_deps( $next_dep, $answers, \%seen ); + $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps ); + ++$ret->{$dep}; + } + return $ret; } 1; @@ -397,7 +476,7 @@ from a DBIx::Class::Schema instance my $trans = SQL::Translator->new ( parser => 'SQL::Translator::Parser::DBIx::Class', parser_args => { - package => $schema, + dbic_schema => $schema, add_fk_index => 0, sources => [qw/ Artist @@ -427,6 +506,27 @@ L. =head1 PARSER OPTIONS +=head2 dbic_schema + +The DBIx::Class schema (either an instance or a class name) to be parsed. +This argument is in fact optional - instead one can supply it later at +translation time as an argument to L. In +other words both of the following invocations are valid and will produce +conceptually identical output: + + my $yaml = SQL::Translator->new( + parser => 'SQL::Translator::Parser::DBIx::Class', + parser_args => { + dbic_schema => $schema, + }, + producer => 'SQL::Translator::Producer::YAML', + )->translate; + + my $yaml = SQL::Translator->new( + parser => 'SQL::Translator::Parser::DBIx::Class', + producer => 'SQL::Translator::Producer::YAML', + )->translate(data => $schema); + =head2 add_fk_index Create an index for each foreign key. @@ -447,12 +547,13 @@ Limit the amount of parsed sources by supplying an explicit list of source names L, L -=head1 AUTHORS - -See L. +=head1 FURTHER QUESTIONS? -=head1 LICENSE +Check the list of L. -You may distribute this code under the same terms as Perl itself. +=head1 COPYRIGHT AND LICENSE -=cut +This module is free software L +by the L. You can +redistribute it and/or modify it under the same terms as the +L.