X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FSchema%2FLoader%2FBase.pm;h=a576c716a581956f4e4bc2a3bee41d7a477850b9;hb=5c06aa08ab17d9d0e8437a990b5717238deeb8fd;hp=6ea127fb4e0c6e4ff1c85e266ec91c39c98d4c31;hpb=dc96667ab512fd5d1af88bb06d8e89e42c706c02;p=dbsrgits%2FDBIx-Class-Schema-Loader.git diff --git a/lib/DBIx/Class/Schema/Loader/Base.pm b/lib/DBIx/Class/Schema/Loader/Base.pm index 6ea127f..a576c71 100644 --- a/lib/DBIx/Class/Schema/Loader/Base.pm +++ b/lib/DBIx/Class/Schema/Loader/Base.pm @@ -17,12 +17,12 @@ use File::Temp qw//; use Class::Unload; use Class::Inspector (); use Scalar::Util 'looks_like_number'; -use File::Slurp 'read_file'; -use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path/; +use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path slurp_file/; use DBIx::Class::Schema::Loader::Optional::Dependencies (); use Try::Tiny; use DBIx::Class (); use Encode qw/encode/; +use List::MoreUtils 'all'; use namespace::clean; our $VERSION = '0.07010'; @@ -37,6 +37,7 @@ __PACKAGE__->mk_group_ro_accessors('simple', qw/ additional_base_classes left_base_classes components + schema_components skip_relationships skip_load_external moniker_map @@ -71,7 +72,11 @@ __PACKAGE__->mk_group_ro_accessors('simple', qw/ loader_class qualify_objects tables + table_comments_table + column_comments_table class_to_table + uniq_to_primary + quiet /); @@ -90,6 +95,7 @@ __PACKAGE__->mk_group_accessors('simple', qw/ preserve_case col_collision_map rel_collision_map + rel_name_map real_dump_directory result_components_map result_roles_map @@ -98,6 +104,17 @@ __PACKAGE__->mk_group_accessors('simple', qw/ naming_set /); +my $CURRENT_V = 'v7'; + +my @CLASS_ARGS = qw( + schema_components schema_base_class result_base_class + additional_base_classes left_base_classes additional_classes components + result_roles +); + +my $LF = "\x0a"; +my $CRLF = "\x0d\x0a"; + =head1 NAME DBIx::Class::Schema::Loader::Base - Base DBIx::Class::Schema::Loader Implementation. @@ -236,13 +253,34 @@ next major version upgrade: __PACKAGE__->naming('v7'); +=head2 quiet + +If true, will not print the usual C messages. Does not affect warnings (except for warnings related to +L.) + =head2 generate_pod By default POD will be generated for columns and relationships, using database metadata for the text if available and supported. -Reading database metadata (e.g. C) is only -supported for Postgres right now. +Metadata can be stored in two ways. + +The first is that you can create two tables named C and +C respectively. They both need to have columns named +C and C. The second one needs to have a column +named C. Then data stored in these tables will be used as a +source of metadata about tables and comments. + +(If you wish you can change the name of these tables with the parameters +L and L.) + +As a fallback you can use built-in commenting mechanisms. Currently this +is only supported for PostgreSQL and MySQL. To create comments in +PostgreSQL you add statements of the form C. +To create comments in MySQL you add C to the end of the +column or table definition. Note that MySQL restricts the length of comments, +and also does not handle complex Unicode characters properly. Set this to C<0> to turn off all POD generation. @@ -278,6 +316,16 @@ which it will be forced into a separate description section. The default is C<60> +=head2 table_comments_table + +The table to look for comments about tables in. By default C. +See L for details. + +=head2 column_comments_table + +The table to look for comments about columns in. By default C. +See L for details. + =head2 relationship_attrs Hashref of attributes to pass to each generated relationship, listed @@ -349,6 +397,43 @@ passed, the code is called with arguments of column_info => hashref of column info (data_type, is_nullable, etc), } +=head2 rel_name_map + +Similar in idea to moniker_map, but different in the details. It can be +a hashref or a code ref. + +If it is a hashref, keys can be either the default relationship name, or the +moniker. The keys that are the default relationship name should map to the +name you want to change the relationship to. Keys that are monikers should map +to hashes mapping relationship names to their translation. You can do both at +once, and the more specific moniker version will be picked up first. So, for +instance, you could have + + { + bar => "baz", + Foo => { + bar => "blat", + }, + } + +and relationships that would have been named C will now be named C +except that in the table whose moniker is C it will be named C. + +If it is a coderef, the argument passed will be a hashref of this form: + + { + name => default relationship name, + type => the relationship type eg: C, + local_class => name of the DBIC class we are building, + local_moniker => moniker of the DBIC class we are building, + local_columns => columns in this table in the relationship, + remote_class => name of the DBIC class we are related to, + remote_moniker => moniker of the DBIC class we are related to, + remote_columns => columns in the other table in the relationship, + } + +DBICSL will try to use the value returned as the relationship name. + =head2 inflect_plural Just like L above (can be hash/code-ref, falls back to default @@ -383,9 +468,13 @@ that need to be leftmost. List of additional classes which all of your table classes will use. +=head2 schema_components + +List of components to load into the Schema class. + =head2 components -List of additional components to be loaded into all of your table +List of additional components to be loaded into all of your Result classes. A good example would be L @@ -586,6 +675,12 @@ rather than column names/accessors. The default is to just append C<_rel> to the relationship name, see L. +=head2 uniq_to_primary + +Automatically promotes the largest unique constraints with non-nullable columns +on tables to primary keys, assuming there is only one largest unique +constraint. + =head1 METHODS None of these methods are intended for direct invocation by regular @@ -594,13 +689,6 @@ L. =cut -my $CURRENT_V = 'v7'; - -my @CLASS_ARGS = qw( - schema_base_class result_base_class additional_base_classes - left_base_classes additional_classes components result_roles -); - # ensure that a peice of object data is a valid arrayref, creating # an empty one or encapsulating whatever's there. sub _ensure_arrayref { @@ -648,17 +736,26 @@ sub new { } } - $self->result_components_map($self->{result_component_map}) - if defined $self->{result_component_map}; - - $self->result_roles_map($self->{result_role_map}) - if defined $self->{result_role_map}; + if (defined $self->{result_component_map}) { + if (defined $self->result_components_map) { + croak "Specify only one of result_components_map or result_component_map"; + } + $self->result_components_map($self->{result_component_map}) + } + + if (defined $self->{result_role_map}) { + if (defined $self->result_roles_map) { + croak "Specify only one of result_roles_map or result_role_map"; + } + $self->result_roles_map($self->{result_role_map}) + } croak "the result_roles and result_roles_map options may only be used in conjunction with use_moose=1" if ((not defined $self->use_moose) || (not $self->use_moose)) && ((defined $self->result_roles) || (defined $self->result_roles_map)); - $self->_ensure_arrayref(qw/additional_classes + $self->_ensure_arrayref(qw/schema_components + additional_classes additional_base_classes left_base_classes components @@ -713,6 +810,8 @@ sub new { $self->{schema_class} ||= ( ref $self->{schema} || $self->{schema} ); $self->{schema} ||= $self->{schema_class}; + $self->{table_comments_table} ||= 'table_comments'; + $self->{column_comments_table} ||= 'column_comments'; croak "dump_overwrite is deprecated. Please read the" . " DBIx::Class::Schema::Loader::Base documentation" @@ -776,6 +875,24 @@ sub new { } } + if (my $rel_collision_map = $self->rel_collision_map) { + if (my $reftype = ref $rel_collision_map) { + if ($reftype ne 'HASH') { + croak "Invalid type $reftype for option 'rel_collision_map'"; + } + } + else { + $self->rel_collision_map({ '(.*)' => $rel_collision_map }); + } + } + + if (defined(my $rel_name_map = $self->rel_name_map)) { + my $reftype = ref $rel_name_map; + if ($reftype ne 'HASH' && $reftype ne 'CODE') { + croak "Invalid type $reftype for option 'rel_name_map', must be HASH or CODE"; + } + } + $self; } @@ -842,7 +959,7 @@ EOF Could not eval expression '$result_namespace' for result_namespace from $filename: $@ EOF - $result_namespace = $ds; + $result_namespace = $ds || ''; if ($load_classes && (not defined $self->use_namespaces)) { warn <<"EOF" unless $ENV{SCHEMA_LOADER_BACKCOMPAT}; @@ -1030,7 +1147,7 @@ sub _load_external { warn qq/# Loaded external class definition for '$class'\n/ if $self->debug; - my $code = $self->_rewrite_old_classnames(scalar read_file($real_inc_path, binmode => ':encoding(UTF-8)')); + my $code = $self->_rewrite_old_classnames(slurp_file $real_inc_path); if ($self->dynamic) { # load the class too eval_package_without_redefine_warnings($class, $code); @@ -1053,7 +1170,7 @@ sub _load_external { } if ($old_real_inc_path) { - my $code = read_file($old_real_inc_path, binmode => ':encoding(UTF-8)'); + my $code = slurp_file $old_real_inc_path; $self->_ext_stmt($class, <<"EOF"); @@ -1134,8 +1251,7 @@ sub rescan { } } - delete $self->{_dump_storage}; - delete $self->{_relations_started}; + delete @$self{qw/_dump_storage _relations_started _uniqs_started/}; my $loaded = $self->_load_tables(@current); @@ -1205,11 +1321,10 @@ sub _load_tables { if(!$self->skip_relationships) { # The relationship loader needs a working schema - $self->{quiet} = 1; + local $self->{quiet} = 1; local $self->{dump_directory} = $self->{temp_directory}; $self->_reload_classes(\@tables); $self->_load_relationships(\@tables); - $self->{quiet} = 0; # Remove that temp dir from INC so it doesn't get reloaded @INC = grep $_ ne $self->dump_directory, @INC; @@ -1308,7 +1423,7 @@ sub _reload_class { eval_package_without_redefine_warnings ($class, "require $class"); } catch { - my $source = read_file($self->_get_dump_filename($class), binmode => ':encoding(UTF-8)'); + my $source = slurp_file $self->_get_dump_filename($class); die "Failed to reload class $class: $_.\n\nCLASS SOURCE:\n\n$source"; }; } @@ -1362,7 +1477,7 @@ sub _dump_to_dir { my $target_dir = $self->dump_directory; warn "Dumping manual schema for $schema_class to directory $target_dir ...\n" - unless $self->{dynamic} or $self->{quiet}; + unless $self->dynamic or $self->quiet; my $schema_text = qq|package $schema_class;\n\n| @@ -1376,6 +1491,15 @@ sub _dump_to_dir { $schema_text .= qq|use strict;\nuse warnings;\n\nuse base '$schema_base_class';\n\n|; } + my @schema_components = @{ $self->schema_components || [] }; + + if (@schema_components) { + my $schema_components = dump @schema_components; + $schema_components = "($schema_components)" if @schema_components == 1; + + $schema_text .= "__PACKAGE__->load_components${schema_components};\n\n"; + } + if ($self->use_namespaces) { $schema_text .= qq|__PACKAGE__->load_namespaces|; my $namespace_options; @@ -1452,7 +1576,7 @@ sub _dump_to_dir { } } - warn "Schema dump completed.\n" unless $self->{dynamic} or $self->{quiet}; + warn "Schema dump completed.\n" unless $self->dynamic or $self->quiet; } @@ -1472,7 +1596,7 @@ sub _write_classfile { if (-f $filename && $self->really_erase_my_files) { warn "Deleting existing file '$filename' due to " - . "'really_erase_my_files' setting\n" unless $self->{quiet}; + . "'really_erase_my_files' setting\n" unless $self->quiet; unlink($filename); } @@ -1592,7 +1716,7 @@ sub _parse_generated_file { or croak "Cannot open '$fn' for reading: $!"; my $mark_re = - qr{^(# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:)([A-Za-z0-9/+]{22})\n}; + qr{^(# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:)([A-Za-z0-9/+]{22})\r?\n}; my ($md5, $ts, $ver, $gen); while(<$fh>) { @@ -1601,7 +1725,7 @@ sub _parse_generated_file { $md5 = $2; # Pull out the version and timestamp from the line above - ($ver, $ts) = $gen =~ m/^# Created by DBIx::Class::Schema::Loader v(.*?) @ (.*?)\Z/m; + ($ver, $ts) = $gen =~ m/^# Created by DBIx::Class::Schema::Loader v(.*?) @ (.*?)\r?\Z/m; $gen .= $pre_md5; croak "Checksum mismatch in '$fn', the auto-generated part of the file has been modified outside of this loader. Aborting.\nIf you want to overwrite these modifications, set the 'overwrite_modifications' loader option.\n" @@ -1617,7 +1741,10 @@ sub _parse_generated_file { my $custom = do { local $/; <$fh> } if $md5; - close ($fh); + $custom ||= ''; + $custom =~ s/$CRLF|$LF/\n/g; + + close $fh; return ($gen, $md5, $ver, $ts, $custom); } @@ -1832,8 +1959,7 @@ EOF } } -# use the same logic to run moniker_map, col_accessor_map, and -# relationship_name_map +# use the same logic to run moniker_map, col_accessor_map sub _run_user_map { my ( $self, $map, $default_code, $ident, @extra ) = @_; @@ -1919,8 +2045,6 @@ sub _setup_src_meta { # be careful to not create refs Data::Dump can "optimize" $full_table_name = \do {"".$full_table_name} if ref $table_name; - $self->_raw_stmt($table_class, ''); # add a blank line - $self->_dbic_stmt($table_class, 'table', $full_table_name); my $cols = $self->_table_columns($table); @@ -1960,6 +2084,39 @@ sub _setup_src_meta { my $pks = $self->_table_pk_info($table) || []; + my %uniq_tag; # used to eliminate duplicate uniqs + + $uniq_tag{ join("\0", @$pks) }++ if @$pks; # pk is a uniq + + my $uniqs = $self->_table_uniq_info($table) || []; + my @uniqs; + + foreach my $uniq (@$uniqs) { + my ($name, $cols) = @$uniq; + next if $uniq_tag{ join("\0", @$cols) }++; # skip duplicates + push @uniqs, [$name, $cols]; + } + + my @non_nullable_uniqs = grep { + all { $col_info->{$_}{is_nullable} == 0 } @{ $_->[1] } + } @uniqs; + + if ($self->uniq_to_primary && (not @$pks) && @non_nullable_uniqs) { + my @by_colnum = sort { $b->[0] <=> $a->[0] } + map [ scalar @{ $_->[1] }, $_ ], @non_nullable_uniqs; + + if (not (@by_colnum > 1 && $by_colnum[0][0] == $by_colnum[1][0])) { + my @keys = map $_->[1], @by_colnum; + + my $pk = $keys[0]; + + # remove the uniq from list + @uniqs = grep { $_->[0] ne $pk->[0] } @uniqs; + + $pks = $pk->[1]; + } + } + foreach my $pkcol (@$pks) { $col_info->{$pkcol}{is_nullable} = 0; } @@ -1970,19 +2127,13 @@ sub _setup_src_meta { map { $_, ($col_info->{$_}||{}) } @$cols ); - my %uniq_tag; # used to eliminate duplicate uniqs - - @$pks ? $self->_dbic_stmt($table_class,'set_primary_key',@$pks) - : carp("$table has no primary key"); - $uniq_tag{ join("\0", @$pks) }++ if @$pks; # pk is a uniq + $self->_dbic_stmt($table_class, 'set_primary_key', @$pks) + if @$pks; - my $uniqs = $self->_table_uniq_info($table) || []; - for (@$uniqs) { - my ($name, $cols) = @$_; - next if $uniq_tag{ join("\0", @$cols) }++; # skip duplicates + foreach my $uniq (@uniqs) { + my ($name, $cols) = @$uniq; $self->_dbic_stmt($table_class,'add_unique_constraint', $name, $cols); } - } sub __columns_info_for { @@ -2077,9 +2228,13 @@ sub _load_relationships { my $rel_stmts = $self->_relbuilder->generate_code(\@tables); foreach my $src_class (sort keys %$rel_stmts) { - my $src_stmts = $rel_stmts->{$src_class}; - foreach my $stmt (@$src_stmts) { - $self->_dbic_stmt($src_class,$stmt->{method},@{$stmt->{args}}); + # sort by rel name + my @src_stmts = map $_->[1], + sort { $a->[0] cmp $b->[0] } + map [ $_->{args}[0], $_ ], @{ $rel_stmts->{$src_class} }; + + foreach my $stmt (@src_stmts) { + $self->_dbic_stmt($src_class,$stmt->{method}, @{$stmt->{args}}); } } } @@ -2175,7 +2330,13 @@ sub _make_pod { my $class = shift; my $method = shift; - if ( $method eq 'add_columns' ) { + if ($method eq 'table') { + my $table = $_[0]; + $table = $$table if ref $table eq 'SCALAR'; + $self->_pod($class, "=head1 TABLE: C<$table>"); + $self->_pod_cut($class); + } + elsif ( $method eq 'add_columns' ) { $self->_pod( $class, "=head1 ACCESSORS" ); my $col_counter = 0; my @cols = @_; @@ -2208,6 +2369,35 @@ sub _make_pod { $self->_pod_cut( $class ); $self->{_relations_started} { $class } = 1; } + elsif ($method eq 'add_unique_constraint') { + $self->_pod($class, '=head1 UNIQUE CONSTRAINTS') + unless $self->{_uniqs_started}{$class}; + + my ($name, $cols) = @_; + + $self->_pod($class, "=head2 C<$name>"); + $self->_pod($class, '=over 4'); + + foreach my $col (@$cols) { + $self->_pod($class, "=item \* L"); + } + + $self->_pod($class, '=back'); + $self->_pod_cut($class); + + $self->{_uniqs_started}{$class} = 1; + } + elsif ($method eq 'set_primary_key') { + $self->_pod($class, "=head1 PRIMARY KEY"); + $self->_pod($class, '=over 4'); + + foreach my $col (@_) { + $self->_pod($class, "=item \* L"); + } + + $self->_pod($class, '=back'); + $self->_pod_cut($class); + } } sub _pod_class_list {