X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FSchema%2FLoader%2FBase.pm;h=4de438547e236951505bf8996fe55fe05e795990;hb=802a117545a5bd1a5d20152835a3aa7fb2e7b39f;hp=e2733a59e5332c382e6c1b71fff0450c79747e61;hpb=bb46cd4b8d2cf185f37b632bc339d9669267c7fb;p=dbsrgits%2FDBIx-Class-Schema-Loader.git diff --git a/lib/DBIx/Class/Schema/Loader/Base.pm b/lib/DBIx/Class/Schema/Loader/Base.pm index e2733a5..4de4385 100644 --- a/lib/DBIx/Class/Schema/Loader/Base.pm +++ b/lib/DBIx/Class/Schema/Loader/Base.pm @@ -20,7 +20,7 @@ use File::Temp (); use Class::Unload; use Class::Inspector (); use Scalar::Util 'looks_like_number'; -use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path slurp_file/; +use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path slurp_file sigwarn_silencer/; use DBIx::Class::Schema::Loader::Optional::Dependencies (); use Try::Tiny; use DBIx::Class (); @@ -29,7 +29,7 @@ use List::MoreUtils qw/all any firstidx uniq/; use File::Temp 'tempfile'; use namespace::clean; -our $VERSION = '0.07021'; +our $VERSION = '0.07041'; __PACKAGE__->mk_group_ro_accessors('simple', qw/ schema @@ -61,6 +61,10 @@ __PACKAGE__->mk_group_ro_accessors('simple', qw/ use_moose only_autoclean overwrite_modifications + dry_run + generated_classes + omit_version + omit_timestamp relationship_attrs @@ -109,6 +113,8 @@ __PACKAGE__->mk_group_accessors('simple', qw/ db_schema qualify_objects moniker_parts + moniker_part_separator + moniker_part_map /); my $CURRENT_V = 'v7'; @@ -303,6 +309,11 @@ If true, will not print the usual C messages. Does not affect warnings (except for warnings related to L.) +=head2 dry_run + +If true, don't actually write out the generated files. This can only be +used with static schema generation. + =head2 generate_pod By default POD will be generated for columns and relationships, using database @@ -380,18 +391,103 @@ same database and schema as the table/column whose comment is being retrieved. =head2 relationship_attrs -Hashref of attributes to pass to each generated relationship, listed -by type. Also supports relationship type 'all', containing options to -pass to all generated relationships. Attributes set for more specific -relationship types override those set in 'all'. +Hashref of attributes to pass to each generated relationship, listed by type. +Also supports relationship type 'all', containing options to pass to all +generated relationships. Attributes set for more specific relationship types +override those set in 'all', and any attributes specified by this option +override the introspected attributes of the foreign key if any. For example: relationship_attrs => { - belongs_to => { is_deferrable => 0 }, + has_many => { cascade_delete => 1, cascade_copy => 1 }, + might_have => { cascade_delete => 1, cascade_copy => 1 }, }, -use this to turn off DEFERRABLE on your foreign key constraints. +use this to turn L cascades to on on your +L and +L relationships, they default +to off. + +Can also be a coderef, for more precise control, in which case the coderef gets +this hash of parameters (as a list:) + + rel_name # the name of the relationship + rel_type # the type of the relationship: 'belongs_to', 'has_many' or 'might_have' + local_source # the DBIx::Class::ResultSource object for the source the rel is *from* + remote_source # the DBIx::Class::ResultSource object for the source the rel is *to* + local_table # a DBIx::Class::Schema::Loader::Table object for the table of the source the rel is from + local_cols # an arrayref of column names of columns used in the rel in the source it is from + remote_table # a DBIx::Class::Schema::Loader::Table object for the table of the source the rel is to + remote_cols # an arrayref of column names of columns used in the rel in the source it is to + attrs # the attributes that would be set + +it should return the new hashref of attributes, or nothing for no changes. + +For example: + + relationship_attrs => sub { + my %p = @_; + + say "the relationship name is: $p{rel_name}"; + say "the relationship is a: $p{rel_type}"; + say "the local class is: ", $p{local_source}->result_class; + say "the remote class is: ", $p{remote_source}->result_class; + say "the local table is: ", $p{local_table}->sql_name; + say "the rel columns in the local table are: ", (join ", ", @{$p{local_cols}}); + say "the remote table is: ", $p{remote_table}->sql_name; + say "the rel columns in the remote table are: ", (join ", ", @{$p{remote_cols}}); + + if ($p{local_table} eq 'dogs' && @{$p{local_cols}} == 1 && $p{local_cols}[0] eq 'name') { + $p{attrs}{could_be_snoopy} = 1; + + reutrn $p{attrs}; + } + }, + +These are the default attributes: + + has_many => { + cascade_delete => 0, + cascade_copy => 0, + }, + might_have => { + cascade_delete => 0, + cascade_copy => 0, + }, + belongs_to => { + on_delete => 'CASCADE', + on_update => 'CASCADE', + is_deferrable => 1, + }, + +For L relationships, these +defaults are overridden by the attributes introspected from the foreign key in +the database, if this information is available (and the driver is capable of +retrieving it.) + +This information overrides the defaults mentioned above, and is then itself +overridden by the user's L for C if any are +specified. + +In general, for most databases, for a plain foreign key with no rules, the +values for a L relationship +will be: + + on_delete => 'NO ACTION', + on_update => 'NO ACTION', + is_deferrable => 0, + +In the cases where an attribute is not supported by the DB, a value matching +the actual behavior is used, for example Oracle does not support C +rules, so C is set to C. This is done so that the +behavior of the schema is preserved when cross deploying to a different RDBMS +such as SQLite for testing. + +In the cases where the DB does not support C foreign keys, the +value is set to C<1> if L has a working C<< +$storage->with_deferred_fk_checks >>. This is done so that the same +L code can be used, and cross deployed from and to such databases. =head2 debug @@ -435,6 +531,7 @@ the table. The L option is an arrayref of methods on the table class corresponding to parts of the fully qualified table name, defaulting to C<['name']>, in the order those parts are used to create the moniker name. +The parts are joined together using L. The C<'name'> entry B be present. @@ -448,25 +545,81 @@ C, C =item * Informix, MSSQL, Sybase ASE -C, C, C +C, C, C =back +=head2 moniker_part_separator + +String used to join L when creating the moniker. +Defaults to the empty string. Use C<::> to get a separate namespace per +database and/or schema. + =head2 constraint -Only load tables matching regex. Best specified as a qr// regex. +Only load matching tables. =head2 exclude -Exclude tables matching regex. Best specified as a qr// regex. +Exclude matching tables. + +These can be specified either as a regex (preferrably on the C +form), or as an arrayref of arrayrefs. Regexes are matched against +the (unqualified) table name, while arrayrefs are matched according to +L. + +For example: + + db_schema => [qw(some_schema other_schema)], + moniker_parts => [qw(schema name)], + constraint => [ + [ qr/\Asome_schema\z/ => qr/\A(?:foo|bar)\z/ ], + [ qr/\Aother_schema\z/ => qr/\Abaz\z/ ], + ], + +In this case only the tables C and C in C and +C in C will be dumped. =head2 moniker_map -Overrides the default table name to moniker translation. Can be either a -hashref of table keys and moniker values, or a coderef for a translator -function taking a L argument -(which stringifies to the unqualified table name) and returning a scalar -moniker. If the hash entry does not exist, or the function returns a false +Overrides the default table name to moniker translation. Either + +=over + +=item * + +a nested hashref, which will be traversed according to L + +For example: + + moniker_parts => [qw(schema name)], + moniker_map => { + foo => { + bar => "FooishBar", + }, + }, + +In which case the table C in the C schema would get the moniker +C. + +=item * + +a hashref of unqualified table name keys and moniker values + +=item * + +a coderef for a translator function taking a L
argument (which stringifies to the +unqualified table name) and returning a scalar moniker + +The function is also passed a coderef that can be called with either +of the hashref forms to get the moniker mapped accordingly. This is +useful if you need to handle some monikers specially, but want to use +the hashref form for the rest. + +=back + +If the hash entry does not exist, or the function returns a false value, the code falls back to default behavior for that table name. The default behavior is to split on case transition and non-alphanumeric @@ -481,6 +634,26 @@ together. Examples: stations_visited | StationVisited routeChange | RouteChange +=head2 moniker_part_map + +Map for overriding the monikerization of individual L. +The keys are the moniker part to override, the value is either a +hashref of coderef for mapping the corresponding part of the +moniker. If a coderef is used, it gets called with the moniker part +and the hash key the code ref was found under. + +For example: + + moniker_part_map => { + schema => sub { ... }, + }, + +Given the table C, the code ref would be called with the +arguments C and C, plus a coderef similar to the one +described in L. + +L takes precedence over this. + =head2 col_accessor_map Same as moniker_map, but for column accessor names. If a coderef is @@ -496,6 +669,7 @@ passed, the code is called with arguments of schema_class => name of the schema class we are building, column_info => hashref of column info (data_type, is_nullable, etc), } + coderef ref that can be called with a hashref map the L
stringifies to the unqualified table name. @@ -522,7 +696,7 @@ instance, you could have and relationships that would have been named C will now be named C except that in the table whose moniker is C it will be named C. -If it is a coderef, the argument passed will be a hashref of this form: +If it is a coderef, it will be passed a hashref of this form: { name => default relationship name, @@ -533,8 +707,14 @@ If it is a coderef, the argument passed will be a hashref of this form: remote_class => name of the DBIC class we are related to, remote_moniker => moniker of the DBIC class we are related to, remote_columns => columns in the other table in the relationship, + # for type => "many_to_many" only: + link_class => name of the DBIC class for the link table + link_moniker => moniker of the DBIC class for the link table + link_rel_name => name of the relationship to the link table } +In addition it is passed a coderef that can be called with a hashref map. + DBICSL will try to use the value returned as the relationship name. =head2 inflect_plural @@ -594,7 +774,7 @@ load certain components for specified Result classes. For example: 'InflateColumn::DateTime', ], } - + You may use this in conjunction with L. =head2 result_roles @@ -614,7 +794,7 @@ certain roles for specified Result classes. For example: ], RouteChange => 'YourApp::Role::TripEvent', } - + You may use this in conjunction with L. =head2 use_namespaces @@ -682,6 +862,14 @@ made to Loader-generated code. Again, you should be using version control on your schema classes. Be careful with this option. +=head2 omit_version + +Omit the package version from the signature comment. + +=head2 omit_timestamp + +Omit the creation timestamp from the signature comment. + =head2 custom_column_info Hook for adding extra attributes to the @@ -747,7 +935,8 @@ L = C or greater is required with this option. Set to true to prepend the L to table names for C<< __PACKAGE__->table >> calls, and to some other things like Oracle sequences. -This attribute is automatically set to true for multi db_schema configurations. +This attribute is automatically set to true for multi db_schema configurations, +unless explicitly set to false by the user. =head2 use_moose @@ -832,7 +1021,7 @@ L. =cut -# ensure that a peice of object data is a valid arrayref, creating +# ensure that a piece of object data is a valid arrayref, creating # an empty one or encapsulating whatever's there. sub _ensure_arrayref { my $self = shift; @@ -885,7 +1074,7 @@ sub new { } $self->result_components_map($self->{result_component_map}) } - + if (defined $self->{result_role_map}) { if (defined $self->result_roles_map) { croak "Specify only one of result_roles_map or result_role_map"; @@ -951,6 +1140,7 @@ sub new { $self->{class_to_table} = {}; $self->{classes} = {}; $self->{_upgrading_classes} = {}; + $self->{generated_classes} = []; $self->{schema_class} ||= ( ref $self->{schema} || $self->{schema} ); $self->{schema} ||= $self->{schema_class}; @@ -962,6 +1152,10 @@ sub new { if $self->{dump_overwrite}; $self->{dynamic} = ! $self->{dump_directory}; + + croak "dry_run can only be used with static schema generation" + if $self->dynamic and $self->dry_run; + $self->{temp_directory} ||= File::Temp::tempdir( 'dbicXXXX', TMPDIR => 1, CLEANUP => 1, @@ -1052,7 +1246,7 @@ sub new { if (defined $self->db_schema) { if (ref $self->db_schema eq 'ARRAY') { - if (@{ $self->db_schema } > 1) { + if (@{ $self->db_schema } > 1 && not defined $self->{qualify_objects}) { $self->{qualify_objects} = 1; } elsif (@{ $self->db_schema } == 0) { @@ -1060,7 +1254,7 @@ sub new { } } elsif (not ref $self->db_schema) { - if ($self->db_schema eq '%') { + if ($self->db_schema eq '%' && not defined $self->{qualify_objects}) { $self->{qualify_objects} = 1; } @@ -1083,6 +1277,13 @@ sub new { } } + if (not defined $self->moniker_part_separator) { + $self->moniker_part_separator(''); + } + if (not defined $self->moniker_part_map) { + $self->moniker_part_map({}), + } + return $self; } @@ -1251,7 +1452,7 @@ sub _validate_classes { foreach my $c (@classes) { # components default to being under the DBIx::Class namespace unless they - # are preceeded with a '+' + # are preceded with a '+' if ( $key =~ m/component/ && $c !~ s/^\+// ) { $c = 'DBIx::Class::' . $c; } @@ -1274,8 +1475,10 @@ sub _find_file_in_inc { foreach my $prefix (@INC) { my $fullpath = File::Spec->catfile($prefix, $file); + # abs_path pure-perl fallback warns for non-existent files + local $SIG{__WARN__} = sigwarn_silencer(qr/^stat\(.*\Q$file\E\)/); return $fullpath if -f $fullpath - # abs_path throws on Windows for nonexistant files + # abs_path throws on Windows for nonexistent files and (try { Cwd::abs_path($fullpath) }) ne ((try { Cwd::abs_path(File::Spec->catfile($self->dump_directory, $file)) }) || ''); } @@ -1515,8 +1718,8 @@ sub _load_tables { my $moniker_parts = [ @{ $self->moniker_parts } ]; - my $have_schema = 1 if any { $_ eq 'schema' } @{ $self->moniker_parts }; - my $have_database = 1 if any { $_ eq 'database' } @{ $self->moniker_parts }; + my $have_schema = any { $_ eq 'schema' } @{ $self->moniker_parts }; + my $have_database = any { $_ eq 'database' } @{ $self->moniker_parts }; unshift @$moniker_parts, 'schema' if $use_schema && !$have_schema; unshift @$moniker_parts, 'database' if $use_database && !$have_database; @@ -1530,9 +1733,9 @@ sub _load_tables { # check if there are still clashes my %by_moniker; - + while (my ($t, $m) = each %new_monikers) { - push @{ $by_moniker{$m} }, $t; + push @{ $by_moniker{$m} }, $t; } foreach my $m (grep @{ $by_moniker{$_} } > 1, keys %by_moniker) { @@ -1566,6 +1769,8 @@ sub _load_tables { # The relationship loader needs a working schema local $self->{quiet} = 1; local $self->{dump_directory} = $self->{temp_directory}; + local $self->{generated_classes} = []; + local $self->{dry_run} = 0; $self->_reload_classes(\@tables); $self->_load_relationships(\@tables); @@ -1600,7 +1805,9 @@ sub _reload_classes { $self->_dump_to_dir(map { $self->classes->{$_->sql_name} } @tables); unshift @INC, $self->dump_directory; - + + return if $self->dry_run; + my @to_register; my %have_source = map { $_ => $self->schema->source($_) } $self->schema->sources; @@ -1608,7 +1815,7 @@ sub _reload_classes { for my $table (@tables) { my $moniker = $self->monikers->{$table->sql_name}; my $class = $self->classes->{$table->sql_name}; - + { no warnings 'redefine'; local *Class::C3::reinitialize = sub {}; # to speed things up, reinitialized below @@ -1696,6 +1903,8 @@ sub get_dump_filename { sub _ensure_dump_subdirs { my ($self, $class) = (@_); + return if $self->dry_run; + my @name_parts = split(/::/, $class); pop @name_parts; # we don't care about the very last element, # which is a filename @@ -1778,7 +1987,7 @@ sub _dump_to_dir { my $result_base_class = $self->result_base_class || 'DBIx::Class::Core'; foreach my $src_class (@classes) { - my $src_text = + my $src_text = qq|use utf8;\n| . qq|package $src_class;\n\n| . qq|# Created by DBIx::Class::Schema::Loader\n| @@ -1832,8 +2041,8 @@ sub _dump_to_dir { sub _sig_comment { my ($self, $version, $ts) = @_; return qq|\n\n# Created by DBIx::Class::Schema::Loader| - . qq| v| . $version - . q| @ | . $ts + . (defined($version) ? q| v| . $version : '') + . (defined($ts) ? q| @ | . $ts : '') . qq|\n# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:|; } @@ -1843,7 +2052,7 @@ sub _write_classfile { my $filename = $self->_get_dump_filename($class); $self->_ensure_dump_subdirs($class); - if (-f $filename && $self->really_erase_my_files) { + if (-f $filename && $self->really_erase_my_files && !$self->dry_run) { warn "Deleting existing file '$filename' due to " . "'really_erase_my_files' setting\n" unless $self->quiet; unlink($filename); @@ -1867,7 +2076,7 @@ sub _write_classfile { if (-f $old_filename) { $custom_content = ($self->_parse_generated_file ($old_filename))[4]; - unlink $old_filename; + unlink $old_filename unless $self->dry_run; } } @@ -1950,9 +2159,13 @@ sub _write_classfile { } } + push @{$self->generated_classes}, $class; + + return if $self->dry_run; + $text .= $self->_sig_comment( - $self->version_to_dump, - POSIX::strftime('%Y-%m-%d %H:%M:%S', localtime) + $self->omit_version ? undef : $self->version_to_dump, + $self->omit_timestamp ? undef : POSIX::strftime('%Y-%m-%d %H:%M:%S', localtime) ); open(my $fh, '>:encoding(UTF-8)', $filename) @@ -1978,7 +2191,7 @@ sub _default_moose_custom_content { if (not $is_schema) { return qq|\n__PACKAGE__->meta->make_immutable;|; } - + return qq|\n__PACKAGE__->meta->make_immutable(inline_constructor => 0);|; } @@ -2005,13 +2218,16 @@ sub _parse_generated_file { qr{^(# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:)([A-Za-z0-9/+]{22})\r?\n}; my ($md5, $ts, $ver, $gen); + local $_; while(<$fh>) { if(/$mark_re/) { my $pre_md5 = $1; $md5 = $2; # Pull out the version and timestamp from the line above - ($ver, $ts) = $gen =~ m/^# Created by DBIx::Class::Schema::Loader v(.*?) @ (.*?)\r?\Z/m; + ($ver, $ts) = $gen =~ m/^# Created by DBIx::Class::Schema::Loader( v[\d.]+)?( @ [\d-]+ [\d:]+)?\r?\Z/m; + $ver =~ s/^ v// if $ver; + $ts =~ s/^ @ // if $ts; $gen .= $pre_md5; croak "Checksum mismatch in '$fn', the auto-generated part of the file has been modified outside of this loader. Aborting.\nIf you want to overwrite these modifications, set the 'overwrite_modifications' loader option.\n" @@ -2253,10 +2469,32 @@ sub _run_user_map { my $default_ident = $default_code->( $ident, @extra ); my $new_ident; if( $map && ref $map eq 'HASH' ) { - $new_ident = $map->{ $ident }; + if (my @parts = try{ @{ $ident } }) { + my $part_map = $map; + while (@parts) { + my $part = shift @parts; + last unless exists $part_map->{ $part }; + if ( !ref $part_map->{ $part } && !@parts ) { + $new_ident = $part_map->{ $part }; + last; + } + elsif ( ref $part_map->{ $part } eq 'HASH' ) { + $part_map = $part_map->{ $part }; + } + } + } + if( !$new_ident && !ref $map->{ $ident } ) { + $new_ident = $map->{ $ident }; + } } elsif( $map && ref $map eq 'CODE' ) { - $new_ident = $map->( $ident, $default_ident, @extra ); + my $cb = sub { + my ($cb_map) = @_; + croak "reentered map must be a hashref" + unless 'HASH' eq ref($cb_map); + return $self->_run_user_map($cb_map, $default_code, $ident, @extra); + }; + $new_ident = $map->( $ident, $default_ident, @extra, $cb ); } $new_ident ||= $default_ident; @@ -2303,6 +2541,11 @@ sub _make_column_accessor_name { return $accessor; } +sub _table_is_view { + #my ($self, $table) = @_; + return 0; +} + # Set up metadata (cols, pks, etc) sub _setup_src_meta { my ($self, $table) = @_; @@ -2313,6 +2556,9 @@ sub _setup_src_meta { my $table_class = $self->classes->{$table->sql_name}; my $table_moniker = $self->monikers->{$table->sql_name}; + $self->_dbic_stmt($table_class, 'table_class', 'DBIx::Class::ResultSource::View') + if $self->_table_is_view($table); + $self->_dbic_stmt($table_class, 'table', $table->dbic_name); my $cols = $self->_table_columns($table); @@ -2325,7 +2571,8 @@ sub _setup_src_meta { my $context = { table_class => $table_class, table_moniker => $table_moniker, - table_name => $table, + table_name => $table, # bugwards compatibility, RT#84050 + table => $table, full_table_name => $table->dbic_name, schema_class => $schema_class, column_info => $info, @@ -2469,7 +2716,8 @@ sub _default_table2moniker { my $v = $self->_get_naming_v('monikers'); - my @name_parts = map $table->$_, @{ $self->moniker_parts }; + my @moniker_parts = @{ $self->moniker_parts }; + my @name_parts = map $table->$_, @moniker_parts; my $name_idx = firstidx { $_ eq 'name' } @{ $self->moniker_parts }; @@ -2478,6 +2726,16 @@ sub _default_table2moniker { foreach my $i (0 .. $#name_parts) { my $part = $name_parts[$i]; + my $moniker_part = $self->_run_user_map( + $self->moniker_part_map->{$moniker_parts[$i]}, + sub { '' }, + $part, $moniker_parts[$i], + ); + if (length $moniker_part) { + push @all_parts, $moniker_part; + next; + } + if ($i != $name_idx || $v >= 8) { $part = $self->_to_identifier('monikers', $part, '_', 1); } @@ -2505,10 +2763,10 @@ sub _default_table2moniker { @part_parts = split /\s+/, $inflected; } - push @all_parts, map ucfirst, @part_parts; + push @all_parts, join '', map ucfirst, @part_parts; } - return join '', @all_parts; + return join $self->moniker_part_separator, @all_parts; } sub _table2moniker { @@ -2704,12 +2962,12 @@ sub _make_pod { elsif ($method eq 'add_unique_constraint') { $self->_pod($class, '=head1 UNIQUE CONSTRAINTS') unless $self->{_uniqs_started}{$class}; - + my ($name, $cols) = @_; $self->_pod($class, "=head2 C<$name>"); $self->_pod($class, '=over 4'); - + foreach my $col (@$cols) { $self->_pod($class, "=item \* L"); } @@ -2722,7 +2980,7 @@ sub _make_pod { elsif ($method eq 'set_primary_key') { $self->_pod($class, "=head1 PRIMARY KEY"); $self->_pod($class, '=over 4'); - + foreach my $col (@_) { $self->_pod($class, "=item \* L"); } @@ -2777,7 +3035,7 @@ sub __table_comment { if (my $code = $self->can('_table_comment')) { return $self->_filter_comment($self->$code(@_)); } - + return ''; } @@ -2881,6 +3139,11 @@ Returns a hashref of table to class mappings. In some cases it will contain multiple entries per table for the original and normalized table names, as above in L. +=head2 generated_classes + +Returns an arrayref of classes that were actually generated (i.e. not +skipped because there were no changes). + =head1 NON-ENGLISH DATABASES If you use the loader on a database with table and column names in a language