X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FSchema%2FLoader%2FBase.pm;h=74f99698e90f101218e6efe00300a2135b9e19ab;hb=refs%2Fheads%2Fregex_capture_issues;hp=8d881fb1b548011d70dc70dbae39c2eef6d1b4c3;hpb=c0a0986f6f77e7677d39b9b6021862b987d9a6b9;p=dbsrgits%2FDBIx-Class-Schema-Loader.git diff --git a/lib/DBIx/Class/Schema/Loader/Base.pm b/lib/DBIx/Class/Schema/Loader/Base.pm index 8d881fb..74f9969 100644 --- a/lib/DBIx/Class/Schema/Loader/Base.pm +++ b/lib/DBIx/Class/Schema/Loader/Base.pm @@ -17,12 +17,13 @@ use File::Temp qw//; use Class::Unload; use Class::Inspector (); use Scalar::Util 'looks_like_number'; -use File::Slurp 'slurp'; +use File::Slurp 'read_file'; use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path/; use DBIx::Class::Schema::Loader::Optional::Dependencies (); use Try::Tiny; use DBIx::Class (); -use Encode qw/decode encode/; +use Encode qw/encode/; +use List::MoreUtils 'all'; use namespace::clean; our $VERSION = '0.07010'; @@ -70,6 +71,9 @@ __PACKAGE__->mk_group_ro_accessors('simple', qw/ config_file loader_class qualify_objects + tables + class_to_table + uniq_to_primary /); @@ -88,13 +92,13 @@ __PACKAGE__->mk_group_accessors('simple', qw/ preserve_case col_collision_map rel_collision_map + rel_name_map real_dump_directory result_components_map result_roles_map datetime_undef_if_invalid _result_class_methods naming_set - tables /); =head1 NAME @@ -300,8 +304,12 @@ decides to execute will be C-ed before execution. =head2 db_schema Set the name of the schema to load (schema in the sense that your database -vendor means it). Does not currently support loading more than one schema -name. +vendor means it). + +Can be set to an arrayref of schema names for multiple schemas, or the special +value C<%> for all schemas. + +Multiple schemas have only been tested on PostgreSQL. =head2 constraint @@ -348,6 +356,43 @@ passed, the code is called with arguments of column_info => hashref of column info (data_type, is_nullable, etc), } +=head2 rel_name_map + +Similar in idea to moniker_map, but different in the details. It can be +a hashref or a code ref. + +If it is a hashref, keys can be either the default relationship name, or the +moniker. The keys that are the default relationship name should map to the +name you want to change the relationship to. Keys that are monikers should map +to hashes mapping relationship names to their translation. You can do both at +once, and the more specific moniker version will be picked up first. So, for +instance, you could have + + { + bar => "baz", + Foo => { + bar => "blat", + }, + } + +and relationships that would have been named C will now be named C +except that in the table whose moniker is C it will be named C. + +If it is a coderef, the argument passed will be a hashref of this form: + + { + name => default relationship name, + type => the relationship type eg: C, + local_class => name of the DBIC class we are building, + local_moniker => moniker of the DBIC class we are building, + local_columns => columns in this table in the relationship, + remote_class => name of the DBIC class we are related to, + remote_moniker => moniker of the DBIC class we are related to, + remote_columns => columns in the other table in the relationship, + } + +DBICSL will try to use the value returned as the relationship name. + =head2 inflect_plural Just like L above (can be hash/code-ref, falls back to default @@ -585,6 +630,12 @@ rather than column names/accessors. The default is to just append C<_rel> to the relationship name, see L. +=head2 uniq_to_primary + +Automatically promotes the largest unique constraints with non-nullable columns +on tables to primary keys, assuming there is only one largest unique +constraint. + =head1 METHODS None of these methods are intended for direct invocation by regular @@ -647,11 +698,19 @@ sub new { } } - $self->result_components_map($self->{result_component_map}) - if defined $self->{result_component_map}; - - $self->result_roles_map($self->{result_role_map}) - if defined $self->{result_role_map}; + if (defined $self->{result_component_map}) { + if (defined $self->result_components_map) { + croak "Specify only one of result_components_map or result_component_map"; + } + $self->result_components_map($self->{result_component_map}) + } + + if (defined $self->{result_role_map}) { + if (defined $self->result_roles_map) { + croak "Specify only one of result_roles_map or result_role_map"; + } + $self->result_roles_map($self->{result_role_map}) + } croak "the result_roles and result_roles_map options may only be used in conjunction with use_moose=1" if ((not defined $self->use_moose) || (not $self->use_moose)) @@ -706,6 +765,7 @@ sub new { $self->{monikers} = {}; $self->{tables} = {}; + $self->{class_to_table} = {}; $self->{classes} = {}; $self->{_upgrading_classes} = {}; @@ -774,6 +834,45 @@ sub new { } } + if (my $rel_collision_map = $self->rel_collision_map) { + if (my $reftype = ref $rel_collision_map) { + if ($reftype ne 'HASH') { + croak "Invalid type $reftype for option 'rel_collision_map'"; + } + } + else { + $self->rel_collision_map({ '(.*)' => $rel_collision_map }); + } + } + + if (defined(my $rel_name_map = $self->rel_name_map)) { + my $reftype = ref $rel_name_map; + if ($reftype ne 'HASH' && $reftype ne 'CODE') { + croak "Invalid type $reftype for option 'rel_name_map', must be HASH or CODE"; + } + } + + if (defined $self->db_schema) { + if (ref $self->db_schema eq 'ARRAY') { + if (@{ $self->db_schema } > 1) { + $self->{qualify_objects} = 1; + } + elsif (@{ $self->db_schema } == 0) { + $self->{db_schema} = undef; + } + } + elsif (not ref $self->db_schema) { + if ($self->db_schema eq '%') { + $self->{qualify_objects} = 1; + } + + $self->{db_schema} = [ $self->db_schema ]; + } + else { + croak 'db_schema must be an array or single value'; + } + } + $self; } @@ -819,7 +918,7 @@ EOF } # otherwise check if we need backcompat mode for a static schema - my $filename = $self->_get_dump_filename($self->schema_class); + my $filename = $self->get_dump_filename($self->schema_class); return unless -e $filename; my ($old_gen, $old_md5, $old_ver, $old_ts, $old_custom) = @@ -833,7 +932,14 @@ EOF } my $load_classes = ($old_gen =~ /^__PACKAGE__->load_classes;/m) ? 1 : 0; - my $result_namespace = do { ($old_gen =~ /result_namespace => '([^']+)'/) ? $1 : '' }; + + my $result_namespace = do { ($old_gen =~ /result_namespace => (.+)/) ? $1 : '' }; + my $ds = eval $result_namespace; + die <<"EOF" if $@; +Could not eval expression '$result_namespace' for result_namespace from +$filename: $@ +EOF + $result_namespace = $ds || ''; if ($load_classes && (not defined $self->use_namespaces)) { warn <<"EOF" unless $ENV{SCHEMA_LOADER_BACKCOMPAT}; @@ -1021,7 +1127,7 @@ sub _load_external { warn qq/# Loaded external class definition for '$class'\n/ if $self->debug; - my $code = $self->_rewrite_old_classnames(decode 'UTF-8', scalar slurp $real_inc_path); + my $code = $self->_rewrite_old_classnames(scalar read_file($real_inc_path, binmode => ':encoding(UTF-8)')); if ($self->dynamic) { # load the class too eval_package_without_redefine_warnings($class, $code); @@ -1044,7 +1150,7 @@ sub _load_external { } if ($old_real_inc_path) { - my $code = decode 'UTF-8', scalar slurp $old_real_inc_path; + my $code = read_file($old_real_inc_path, binmode => ':encoding(UTF-8)'); $self->_ext_stmt($class, <<"EOF"); @@ -1125,8 +1231,7 @@ sub rescan { } } - delete $self->{_dump_storage}; - delete $self->{_relations_started}; + delete @$self{qw/_dump_storage _relations_started _uniqs_started/}; my $loaded = $self->_load_tables(@current); @@ -1299,7 +1404,7 @@ sub _reload_class { eval_package_without_redefine_warnings ($class, "require $class"); } catch { - my $source = decode 'UTF-8', scalar slurp $self->_get_dump_filename($class); + my $source = read_file($self->_get_dump_filename($class), binmode => ':encoding(UTF-8)'); die "Failed to reload class $class: $_.\n\nCLASS SOURCE:\n\n$source"; }; } @@ -1377,7 +1482,8 @@ sub _dump_to_dir { for my $attr (@attr) { if ($self->$attr) { - $namespace_options .= qq| $attr => '| . $self->$attr . qq|',\n| + my $code = dumper_squashed $self->$attr; + $namespace_options .= qq| $attr => $code,\n| } } $schema_text .= qq|(\n$namespace_options)| if $namespace_options; @@ -1398,26 +1504,30 @@ sub _dump_to_dir { my $src_text = qq|package $src_class;\n\n| . qq|# Created by DBIx::Class::Schema::Loader\n| - . qq|# DO NOT MODIFY THE FIRST PART OF THIS FILE\n\n| - . qq|use strict;\nuse warnings;\n\n|; + . qq|# DO NOT MODIFY THE FIRST PART OF THIS FILE\n\n|; + + $src_text .= $self->_make_pod_heading($src_class); + + $src_text .= qq|use strict;\nuse warnings;\n\n|; + + $src_text .= $self->_base_class_pod($result_base_class) + unless $result_base_class eq 'DBIx::Class::Core'; + if ($self->use_moose) { $src_text.= qq|use Moose;\nuse MooseX::NonMoose;\nuse namespace::autoclean;|; # these options 'use base' which is compile time if (@{ $self->left_base_classes } || @{ $self->additional_base_classes }) { - $src_text .= qq|\nBEGIN { extends '$result_base_class' }\n\n|; + $src_text .= qq|\nBEGIN { extends '$result_base_class' }\n|; } else { - $src_text .= qq|\nextends '$result_base_class';\n\n|; + $src_text .= qq|\nextends '$result_base_class';\n|; } } else { - $src_text .= qq|use base '$result_base_class';\n\n|; + $src_text .= qq|use base '$result_base_class';\n|; } - $self->_base_class_pod($src_class, $result_base_class) - unless $result_base_class eq 'DBIx::Class::Core'; - $self->_write_classfile($src_class, $src_text); } @@ -1646,6 +1756,8 @@ sub _result_namespace { my ($self, $schema_class, $ns) = @_; my @result_namespace; + $ns = $ns->[0] if ref $ns; + if ($ns =~ /^\+(.*)/) { # Fully qualified namespace @result_namespace = ($1) @@ -1708,6 +1820,7 @@ sub _make_src_class { $self->classes->{$table} = $table_class; $self->monikers->{$table} = $table_moniker; $self->tables->{$table_moniker} = $table; + $self->class_to_table->{$table_class} = $table; $self->_pod_class_list($table_class, 'ADDITIONAL CLASSES USED', @{$self->additional_classes}); @@ -1785,14 +1898,12 @@ sub _is_result_class_method { sub _resolve_col_accessor_collisions { my ($self, $table, $col_info) = @_; - my $table_name = ref $table ? $$table : $table; - while (my ($col, $info) = each %$col_info) { my $accessor = $info->{accessor} || $col; next if $accessor eq 'id'; # special case (very common column) - if ($self->_is_result_class_method($accessor, $table_name)) { + if ($self->_is_result_class_method($accessor, $table)) { my $mapped = 0; if (my $map = $self->col_collision_map) { @@ -1806,7 +1917,7 @@ sub _resolve_col_accessor_collisions { if (not $mapped) { warn <<"EOF"; -Column '$col' in table '$table_name' collides with an inherited method. +Column '$col' in table '$table' collides with an inherited method. See "COLUMN ACCESSOR COLLISIONS" in perldoc DBIx::Class::Schema::Loader::Base . EOF $info->{accessor} = undef; @@ -1815,8 +1926,7 @@ EOF } } -# use the same logic to run moniker_map, col_accessor_map, and -# relationship_name_map +# use the same logic to run moniker_map, col_accessor_map sub _run_user_map { my ( $self, $map, $default_code, $ident, @extra ) = @_; @@ -1896,11 +2006,11 @@ sub _setup_src_meta { } my $full_table_name = ($self->qualify_objects ? - ($self->_quote($self->db_schema) . '.') : '') - . (ref $table_name ? $$table_name : $table_name); + ($self->_quote($table->schema) . '.') : '') + . (ref $table_name eq 'SCALAR' ? $$table_name : $table_name); # be careful to not create refs Data::Dump can "optimize" - $full_table_name = \do {"".$full_table_name} if ref $table_name; + $full_table_name = \do {"".$full_table_name} if ref $table_name eq 'SCALAR'; $self->_dbic_stmt($table_class, 'table', $full_table_name); @@ -1941,6 +2051,39 @@ sub _setup_src_meta { my $pks = $self->_table_pk_info($table) || []; + my %uniq_tag; # used to eliminate duplicate uniqs + + $uniq_tag{ join("\0", @$pks) }++ if @$pks; # pk is a uniq + + my $uniqs = $self->_table_uniq_info($table) || []; + my @uniqs; + + foreach my $uniq (@$uniqs) { + my ($name, $cols) = @$uniq; + next if $uniq_tag{ join("\0", @$cols) }++; # skip duplicates + push @uniqs, [$name, $cols]; + } + + my @non_nullable_uniqs = grep { + all { $col_info->{$_}{is_nullable} == 0 } @{ $_->[1] } + } @uniqs; + + if ($self->uniq_to_primary && (not @$pks) && @non_nullable_uniqs) { + my @by_colnum = sort { $b->[0] <=> $a->[0] } + map [ scalar @{ $_->[1] }, $_ ], @non_nullable_uniqs; + + if (not (@by_colnum > 1 && $by_colnum[0][0] == $by_colnum[1][0])) { + my @keys = map $_->[1], @by_colnum; + + my $pk = $keys[0]; + + # remove the uniq from list + @uniqs = grep { $_->[0] ne $pk->[0] } @uniqs; + + $pks = $pk->[1]; + } + } + foreach my $pkcol (@$pks) { $col_info->{$pkcol}{is_nullable} = 0; } @@ -1951,19 +2094,13 @@ sub _setup_src_meta { map { $_, ($col_info->{$_}||{}) } @$cols ); - my %uniq_tag; # used to eliminate duplicate uniqs + $self->_dbic_stmt($table_class, 'set_primary_key', @$pks) + if @$pks; - @$pks ? $self->_dbic_stmt($table_class,'set_primary_key',@$pks) - : carp("$table has no primary key"); - $uniq_tag{ join("\0", @$pks) }++ if @$pks; # pk is a uniq - - my $uniqs = $self->_table_uniq_info($table) || []; - for (@$uniqs) { - my ($name, $cols) = @$_; - next if $uniq_tag{ join("\0", @$cols) }++; # skip duplicates + foreach my $uniq (@uniqs) { + my ($name, $cols) = @$uniq; $self->_dbic_stmt($table_class,'add_unique_constraint', $name, $cols); } - } sub __columns_info_for { @@ -2119,6 +2256,36 @@ sub _dbic_stmt { return; } +sub _make_pod_heading { + my ($self, $class) = @_; + + return '' if not $self->generate_pod; + + my $table = $self->class_to_table->{$class}; + my $pod; + + my $pcm = $self->pod_comment_mode; + my ($comment, $comment_overflows, $comment_in_name, $comment_in_desc); + $comment = $self->__table_comment($table); + $comment_overflows = ($comment and length $comment > $self->pod_comment_spillover_length); + $comment_in_name = ($pcm eq 'name' or ($pcm eq 'auto' and !$comment_overflows)); + $comment_in_desc = ($pcm eq 'description' or ($pcm eq 'auto' and $comment_overflows)); + + $pod .= "=head1 NAME\n\n"; + + my $table_descr = $class; + $table_descr .= " - " . $comment if $comment and $comment_in_name; + + $pod .= "$table_descr\n\n"; + + if ($comment and $comment_in_desc) { + $pod .= "=head1 DESCRIPTION\n\n${comment}\n\n"; + } + $pod .= "=cut\n\n"; + + return $pod; +} + # generates the accompanying pod for a DBIC class method statement, # storing it with $self->_pod sub _make_pod { @@ -2126,25 +2293,13 @@ sub _make_pod { my $class = shift; my $method = shift; - if ( $method eq 'table' ) { - my ($table) = @_; - my $pcm = $self->pod_comment_mode; - my ($comment, $comment_overflows, $comment_in_name, $comment_in_desc); - $comment = $self->__table_comment($table); - $comment_overflows = ($comment and length $comment > $self->pod_comment_spillover_length); - $comment_in_name = ($pcm eq 'name' or ($pcm eq 'auto' and !$comment_overflows)); - $comment_in_desc = ($pcm eq 'description' or ($pcm eq 'auto' and $comment_overflows)); - $self->_pod( $class, "=head1 NAME" ); - my $table_descr = $class; - $table_descr .= " - " . $comment if $comment and $comment_in_name; - $self->{_class2table}{ $class } = $table; - $self->_pod( $class, $table_descr ); - if ($comment and $comment_in_desc) { - $self->_pod( $class, "=head1 DESCRIPTION" ); - $self->_pod( $class, $comment ); - } - $self->_pod_cut( $class ); - } elsif ( $method eq 'add_columns' ) { + if ($method eq 'table') { + my $table = $_[0]; + $table = $$table if ref $table eq 'SCALAR'; + $self->_pod($class, "=head1 TABLE: C<$table>"); + $self->_pod_cut($class); + } + elsif ( $method eq 'add_columns' ) { $self->_pod( $class, "=head1 ACCESSORS" ); my $col_counter = 0; my @cols = @_; @@ -2163,7 +2318,7 @@ sub _make_pod { " $_: $s" } sort keys %$attrs, ); - if (my $comment = $self->__column_comment($self->{_class2table}{$class}, $col_counter, $name)) { + if (my $comment = $self->__column_comment($self->class_to_table->{$class}, $col_counter, $name)) { $self->_pod( $class, $comment ); } } @@ -2177,6 +2332,35 @@ sub _make_pod { $self->_pod_cut( $class ); $self->{_relations_started} { $class } = 1; } + elsif ($method eq 'add_unique_constraint') { + $self->_pod($class, '=head1 UNIQUE CONSTRAINTS') + unless $self->{_uniqs_started}{$class}; + + my ($name, $cols) = @_; + + $self->_pod($class, "=head2 C<$name>"); + $self->_pod($class, '=over 4'); + + foreach my $col (@$cols) { + $self->_pod($class, "=item \* L"); + } + + $self->_pod($class, '=back'); + $self->_pod_cut($class); + + $self->{_uniqs_started}{$class} = 1; + } + elsif ($method eq 'set_primary_key') { + $self->_pod($class, "=head1 PRIMARY KEY"); + $self->_pod($class, '=over 4'); + + foreach my $col (@_) { + $self->_pod($class, "=item \* L"); + } + + $self->_pod($class, '=back'); + $self->_pod_cut($class); + } } sub _pod_class_list { @@ -2196,12 +2380,16 @@ sub _pod_class_list { } sub _base_class_pod { - my ($self, $class, $base_class) = @_; + my ($self, $base_class) = @_; return unless $self->generate_pod; - $self->_pod($class, "=head1 BASE CLASS: L<$base_class>"); - $self->_pod_cut($class); + return <<"EOF" +=head1 BASE CLASS: L<$base_class> + +=cut + +EOF } sub _filter_comment {