*\.DS_Store
*~
.#*
-*#
\ No newline at end of file
+*#
use Getopt::Long();
my $getopt = Getopt::Long::Parser->new(
- config => [qw/gnu_getopt bundling_override no_ignore_case pass_through/]
+ config => [qw/gnu_getopt bundling_override no_ignore_case pass_through/]
);
my $args = {
- skip_author_deps => undef,
+ skip_author_deps => undef,
};
$getopt->getoptions($args, 'skip_author_deps');
if (@ARGV) {
- warn "\nIgnoring unrecognized option(s): @ARGV\n\n";
+ warn "\nIgnoring unrecognized option(s): @ARGV\n\n";
}
use FindBin;
if ($Module::Install::AUTHOR && ! $args->{skip_author_deps}) {
eval { require Module::Install::ReadmeFromPod }
- or die "\nYou need Module::Install::ReadmeFromPod installed to run this Makefile.PL in author mode:\n\n$@\n";
+ or die "\nYou need Module::Install::ReadmeFromPod installed to run this Makefile.PL in author mode:\n\n$@\n";
warn "\n*** AUTHOR MODE: some optional dependencies converted to hard requires.\n\n";
- server link support for Oracle and MSSQL
- Relationships
- - While scanning for many-to-many, scan for implied rels as well (if
- foo->belongs_to('bar') and baz->belongs_to('bar'), does that impliy
- foo->might_have('baz') and the reverse?)
+ - While scanning for many-to-many, scan for implied rels as well (if
+ foo->belongs_to('bar') and baz->belongs_to('bar'), does that impliy
+ foo->might_have('baz') and the reverse?)
- Backends
- SQLite
For example:
- relationship_attrs => {
- has_many => { cascade_delete => 1, cascade_copy => 1 },
- might_have => { cascade_delete => 1, cascade_copy => 1 },
- },
+ relationship_attrs => {
+ has_many => { cascade_delete => 1, cascade_copy => 1 },
+ might_have => { cascade_delete => 1, cascade_copy => 1 },
+ },
use this to turn L<DBIx::Class> cascades to on on your
L<has_many|DBIx::Class::Relationship/has_many> and
Same as moniker_map, but for column accessor names. If a coderef is
passed, the code is called with arguments of
- the name of the column in the underlying database,
- default accessor name that DBICSL would ordinarily give this column,
- {
- table_class => name of the DBIC class we are building,
- table_moniker => calculated moniker for this table (after moniker_map if present),
- table => table object of interface DBIx::Class::Schema::Loader::Table,
- full_table_name => schema-qualified name of the database table (RDBMS specific),
- schema_class => name of the schema class we are building,
- column_info => hashref of column info (data_type, is_nullable, etc),
- }
- coderef ref that can be called with a hashref map
+ the name of the column in the underlying database,
+ default accessor name that DBICSL would ordinarily give this column,
+ {
+ table_class => name of the DBIC class we are building,
+ table_moniker => calculated moniker for this table (after moniker_map if present),
+ table => table object of interface DBIx::Class::Schema::Loader::Table,
+ full_table_name => schema-qualified name of the database table (RDBMS specific),
+ schema_class => name of the schema class we are building,
+ column_info => hashref of column info (data_type, is_nullable, etc),
+ }
+ coderef ref that can be called with a hashref map
the L<table object|DBIx::Class::Schema::Loader::Table> stringifies to the
unqualified table name.
loads the given components into every Result class, this option allows you to
load certain components for specified Result classes. For example:
- result_components_map => {
- StationVisited => '+YourApp::Schema::Component::StationVisited',
- RouteChange => [
- '+YourApp::Schema::Component::RouteChange',
- 'InflateColumn::DateTime',
- ],
- }
+ result_components_map => {
+ StationVisited => '+YourApp::Schema::Component::StationVisited',
+ RouteChange => [
+ '+YourApp::Schema::Component::RouteChange',
+ 'InflateColumn::DateTime',
+ ],
+ }
You may use this in conjunction with L</components>.
applies the given roles to every Result class, this option allows you to apply
certain roles for specified Result classes. For example:
- result_roles_map => {
- StationVisited => [
- 'YourApp::Role::Building',
- 'YourApp::Role::Destination',
- ],
- RouteChange => 'YourApp::Role::TripEvent',
- }
+ result_roles_map => {
+ StationVisited => [
+ 'YourApp::Role::Building',
+ 'YourApp::Role::Destination',
+ ],
+ RouteChange => 'YourApp::Role::TripEvent',
+ }
You may use this in conjunction with L</result_roles>.
For example:
- custom_column_info => sub {
- my ($table, $column_name, $column_info) = @_;
+ custom_column_info => sub {
+ my ($table, $column_name, $column_info) = @_;
- if ($column_name eq 'dog' && $column_info->{default_value} eq 'snoopy') {
- return { is_snoopy => 1 };
- }
- },
+ if ($column_name eq 'dog' && $column_info->{default_value} eq 'snoopy') {
+ return { is_snoopy => 1 };
+ }
+ },
This attribute can also be used to set C<inflate_datetime> on a non-datetime
column so it also receives the L</datetime_timezone> and/or L</datetime_locale>.
return unless -e $filename;
my ($old_gen, $old_md5, $old_ver, $old_ts, $old_custom) =
- $self->_parse_generated_file($filename);
+ $self->_parse_generated_file($filename);
return unless $old_ver;
}
$self->_ext_stmt($class,
- qq|# These lines were loaded from '$real_inc_path' found in \@INC.\n|
- .qq|# They are now part of the custom portion of this file\n|
- .qq|# for you to hand-edit. If you do not either delete\n|
- .qq|# this section or remove that file from \@INC, this section\n|
- .qq|# will be repeated redundantly when you re-create this\n|
- .qq|# file again via Loader! See skip_load_external to disable\n|
- .qq|# this feature.\n|
+ qq|# These lines were loaded from '$real_inc_path' found in \@INC.\n|
+ .qq|# They are now part of the custom portion of this file\n|
+ .qq|# for you to hand-edit. If you do not either delete\n|
+ .qq|# this section or remove that file from \@INC, this section\n|
+ .qq|# will be repeated redundantly when you re-create this\n|
+ .qq|# file again via Loader! See skip_load_external to disable\n|
+ .qq|# this feature.\n|
);
chomp $code;
$self->_ext_stmt($class, $code);
$self->_ext_stmt($class,
- qq|# End of lines loaded from '$real_inc_path' |
+ qq|# End of lines loaded from '$real_inc_path'|
);
}
chomp $code;
$self->_ext_stmt($class, $code);
$self->_ext_stmt($class,
- qq|# End of lines loaded from '$old_real_inc_path' |
+ qq|# End of lines loaded from '$old_real_inc_path'|
);
}
}
}
sub _moose_metaclass {
- return undef unless $INC{'Class/MOP.pm'}; # if CMOP is not loaded the class could not have loaded in the 1st place
+ return undef unless $INC{'Class/MOP.pm'}; # if CMOP is not loaded the class could not have loaded in the 1st place
- my $class = $_[1];
+ my $class = $_[1];
- my $mc = try { Class::MOP::class_of($class) }
- or return undef;
+ my $mc = try { Class::MOP::class_of($class) }
+ or return undef;
- return $mc->isa('Moose::Meta::Class') ? $mc : undef;
+ return $mc->isa('Moose::Meta::Class') ? $mc : undef;
}
# We use this instead of ensure_class_loaded when there are package symbols we
}
}
else {
- $src_text .= qq|use base '$result_base_class';\n|;
+ $src_text .= qq|use base '$result_base_class';\n|;
}
$self->_write_classfile($src_class, $src_text);
my $compare_to;
if ($old_md5) {
- $compare_to = $text . $self->_sig_comment($old_ver, $old_ts);
- if (Digest::MD5::md5_base64(encode 'UTF-8', $compare_to) eq $old_md5) {
- return unless $self->_upgrading_from && $is_schema;
- }
+ $compare_to = $text . $self->_sig_comment($old_ver, $old_ts);
+ if (Digest::MD5::md5_base64(encode 'UTF-8', $compare_to) eq $old_md5) {
+ return unless $self->_upgrading_from && $is_schema;
+ }
}
push @{$self->generated_classes}, $class;
return if $self->dry_run;
$text .= $self->_sig_comment(
- $self->omit_version ? undef : $self->version_to_dump,
- $self->omit_timestamp ? undef : POSIX::strftime('%Y-%m-%d %H:%M:%S', localtime)
+ $self->omit_version ? undef : $self->version_to_dump,
+ $self->omit_timestamp ? undef : POSIX::strftime('%Y-%m-%d %H:%M:%S', localtime)
);
open(my $fh, '>:raw:encoding(UTF-8)', $filename)
push @roles, @{ $self->result_roles_map->{$table_moniker} }
if exists $self->result_roles_map->{$table_moniker};
- for my $class ($base, @components,
- ($self->use_moose ? 'Moose::Object' : ()), @roles) {
+ for my $class (
+ $base, @components, @roles,
+ ($self->use_moose ? 'Moose::Object' : ()),
+ ) {
$self->ensure_class_loaded($class);
push @methods, @{ Class::Inspector->methods($class) || [] };
sub { $self->_default_column_accessor_name( shift ) },
$column_name,
$column_context_info,
- );
+ );
return $accessor;
}
$self->moniker_map,
sub { $self->_default_table2moniker( shift ) },
$table
- );
+ );
}
sub _load_relationships {
looks_like_number($s) ? $s : qq{'$s'};
" $_: $s"
- } sort keys %$attrs,
+ } sort keys %$attrs,
);
if (my $comment = $self->__column_comment($self->class_to_table->{$class}, $col_counter, $name)) {
$self->_pod( $class, $comment );
my $self = shift;
my $quote_char = $self->dbh->get_info(29)
- || $self->schema->storage->sql_maker->quote_char
- || q{"};
+ || $self->schema->storage->sql_maker->quote_char
+ || q{"};
# For our usage as regex matches, concatenating multiple quote_char
# values works fine (e.g. s/[\Q<>\E]// if quote_char was [ '<', '>' ])
sub _build_name_sep {
my $self = shift;
return $self->dbh->get_info(41)
- || $self->schema->storage->sql_maker->name_sep
- || '.';
+ || $self->schema->storage->sql_maker->name_sep
+ || '.';
}
# Override this in vendor modules to do things at the end of ->new()
if (ref $system_schema) {
$matches = 1
if $schema_name =~ $system_schema
- && $schema !~ $system_schema;
+ && $schema !~ $system_schema;
}
else {
$matches = 1
# if (lc($data_type) eq 'varchar') {
# $result->{$col}{size}[1] = $info->{colmin};
# }
-
+
my ($default_type, $default) = @{$info}{qw/deflt_type deflt/};
next unless $default_type;
=head1 COLUMN NAME CASE ISSUES
By default column names from unquoted DDL will be generated in lowercase, for
-consistency with other backends.
+consistency with other backends.
Set the L<preserve_case|DBIx::Class::Schema::Loader::Base/preserve_case> option
to true if you would like to have column names in the internal case, which is
JOIN [$db].INFORMATION_SCHEMA.KEY_COLUMN_USAGE fk_kcu
ON fk_kcu.constraint_name = fk_tc.constraint_name
AND fk_kcu.table_name = fk_tc.table_name
- AND fk_kcu.table_schema = fk_tc.table_schema
+ AND fk_kcu.table_schema = fk_tc.table_schema
JOIN [$db].INFORMATION_SCHEMA.TABLE_CONSTRAINTS uk_tc
ON uk_tc.constraint_name = rc.unique_constraint_name
AND uk_tc.table_schema = rc.unique_constraint_schema
$delete_rule, $update_rule) = $sth->fetchrow_array) {
push @{ $rels{$fk}{local_columns} }, $self->_lc($col);
push @{ $rels{$fk}{remote_columns} }, $self->_lc($remote_col);
-
+
$rels{$fk}{remote_table} = DBIx::Class::Schema::Loader::Table::Sybase->new(
loader => $self,
name => $remote_table,
my $type_info = $dbh->type_info($type_num);
return undef if not $type_info;
-
+
my $type_name = $type_info->{TYPE_NAME};
# fix up truncated type names
ON fk.foreign_table_id = fkt.table_id
JOIN sysuser fku
ON fkt.creator = fku.user_id
-JOIN sysidx pki
+JOIN sysidx pki
ON fk.primary_table_id = pki.table_id AND fk.primary_index_id = pki.index_id
-JOIN sysidx fki
+JOIN sysidx fki
ON fk.foreign_table_id = fki.table_id AND fk.foreign_index_id = fki.index_id
JOIN sysidxcol fkic
ON fkt.table_id = fkic.table_id AND fki.index_id = fkic.index_id
local $self->dbh->{FetchHashKeyName} = 'NAME_lc';
my $sth = $self->dbh->prepare(
- "pragma table_info(" . $self->dbh->quote_identifier($table) . ")"
+ "pragma table_info(" . $self->dbh->quote_identifier($table) . ")"
);
$sth->execute;
my $cols = $sth->fetchall_hashref('name');
# set it is_auto_increment. This isn't 100%, but it's better than the
# alternatives.
while (my ($col_name, $info) = each %$result) {
- if ($cols{$col_name}{pk}) {
- $num_pk++;
- if (lc($cols{$col_name}{type}) eq 'integer') {
- $pk_col = $col_name;
+ if ($cols{$col_name}{pk}) {
+ $num_pk++;
+ if (lc($cols{$col_name}{type}) eq 'integer') {
+ $pk_col = $col_name;
+ }
}
- }
}
while (my ($col, $info) = each %$result) {
${ $info->{default_value} } = 'current_timestamp';
}
if ($num_pk == 1 and defined $pk_col and $pk_col eq $col) {
- $info->{is_auto_increment} = 1;
+ $info->{is_auto_increment} = 1;
}
}
foreign \s+ key \s* \( \s* $local_cols \s* \) \s* references \s* (?:\S+|".+?(?<!")") \s*
(?:\( \s* $remote_cols \s* \) \s*)?
(?:(?:
- on \s+ (?:delete|update) \s+ (?:set \s+ null|set \s+ default|cascade|restrict|no \s+ action)
+ on \s+ (?:delete|update) \s+ (?:set \s+ null|set \s+ default|cascade|restrict|no \s+ action)
|
- match \s* (?:\S+|".+?(?<!")")
+ match \s* (?:\S+|".+?(?<!")")
) \s*)*
((?:not)? \s* deferrable)?
/sxi;
if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
bless $self, $subclass;
$self->_rebless;
- }
+ }
}
}
local $self->dbh->{FetchHashKeyName} = 'NAME_lc';
my $sth = $self->dbh->prepare(<<"EOF");
-sp_pkeys @{[ $self->dbh->quote($table->name) ]},
+sp_pkeys @{[ $self->dbh->quote($table->name) ]},
@{[ $self->dbh->quote($table->schema) ]},
@{[ $self->dbh->quote($db) ]}
EOF
# Empty. POD only.
-=head1 NAME
-
+=head1 NAME
+
DBIx::Class::Schema::Loader::DBI::Writing - Loader subclass writing guide for DBI
=head1 SYNOPSIS
- package DBIx::Class::Schema::Loader::DBI::Foo;
+ package DBIx::Class::Schema::Loader::DBI::Foo;
- # THIS IS JUST A TEMPLATE TO GET YOU STARTED.
+ # THIS IS JUST A TEMPLATE TO GET YOU STARTED.
- use strict;
- use warnings;
- use base 'DBIx::Class::Schema::Loader::DBI';
- use mro 'c3';
+ use strict;
+ use warnings;
+ use base 'DBIx::Class::Schema::Loader::DBI';
+ use mro 'c3';
- sub _table_uniq_info {
- my ($self, $table) = @_;
+ sub _table_uniq_info {
+ my ($self, $table) = @_;
- # ... get UNIQUE info for $table somehow
- # and return a data structure that looks like this:
+ # ... get UNIQUE info for $table somehow
+ # and return a data structure that looks like this:
- return [
- [ 'keyname' => [ 'colname' ] ],
- [ 'keyname2' => [ 'col1name', 'col2name' ] ],
- [ 'keyname3' => [ 'colname' ] ],
- ];
+ return [
+ [ 'keyname' => [ 'colname' ] ],
+ [ 'keyname2' => [ 'col1name', 'col2name' ] ],
+ [ 'keyname3' => [ 'colname' ] ],
+ ];
- # Where the "keyname"'s are just unique identifiers, such as the
- # name of the unique constraint, or the names of the columns involved
- # concatenated if you wish.
- }
+ # Where the "keyname"'s are just unique identifiers, such as the
+ # name of the unique constraint, or the names of the columns involved
+ # concatenated if you wish.
+ }
- sub _table_comment {
- my ( $self, $table ) = @_;
- return 'Comment';
- }
+ sub _table_comment {
+ my ( $self, $table ) = @_;
+ return 'Comment';
+ }
- sub _column_comment {
- my ( $self, $table, $column_number ) = @_;
- return 'Col. comment';
- }
+ sub _column_comment {
+ my ( $self, $table, $column_number ) = @_;
+ return 'Col. comment';
+ }
- 1;
+ 1;
=head1 DETAILS
local $self->{db_schema} = [ $f_schema ] if $f_schema;
first {
- lc($_->name) eq lc($f_table)
+ lc($_->name) eq lc($f_table)
&& ((not $f_schema) || lc($_->schema) eq lc($f_schema))
} $self->_tables_list;
};
class names. The values are arrayrefs of hashes containing method name and
arguments, like so:
- {
- 'Some::Source::Class' => [
- { method => 'belongs_to', arguments => [
- 'col1', 'Another::Source::Class' ] },
- { method => 'has_many', arguments => [
- 'anothers', 'Yet::Another::Source::Class', 'col15' ] },
- ],
- 'Another::Source::Class' => [
- # ...
- ],
- # ...
- }
+ {
+ 'Some::Source::Class' => [
+ { method => 'belongs_to', arguments => [
+ 'col1', 'Another::Source::Class' ] },
+ { method => 'has_many', arguments => [
+ 'anothers', 'Yet::Another::Source::Class', 'col15' ] },
+ ],
+ 'Another::Source::Class' => [
+ # ...
+ ],
+ # ...
+ }
=cut
=head1 SYNOPSIS
- dbicdump <configuration_file>
- dbicdump [-I <lib-path>] [-o <loader_option>=<value> ] \
- <schema_class> <connect_info>
+ dbicdump <configuration_file>
+ dbicdump [-I <lib-path>] [-o <loader_option>=<value> ] \
+ <schema_class> <connect_info>
Examples:
- $ dbicdump schema.conf
+ $ dbicdump schema.conf
- $ dbicdump -o dump_directory=./lib \
- -o components='["InflateColumn::DateTime"]' \
- MyApp::Schema dbi:SQLite:./foo.db
+ $ dbicdump -o dump_directory=./lib \
+ -o components='["InflateColumn::DateTime"]' \
+ MyApp::Schema dbi:SQLite:./foo.db
- $ dbicdump -o dump_directory=./lib \
- -o components='["InflateColumn::DateTime"]' \
- MyApp::Schema dbi:SQLite:./foo.db '{ quote_char => "\"" }'
+ $ dbicdump -o dump_directory=./lib \
+ -o components='["InflateColumn::DateTime"]' \
+ MyApp::Schema dbi:SQLite:./foo.db '{ quote_char => "\"" }'
- $ dbicdump -Ilib -o dump_directory=./lib \
- -o components='["InflateColumn::DateTime"]' \
- -o preserve_case=1 \
- MyApp::Schema dbi:mysql:database=foo user pass '{ quote_char => "`" }'
+ $ dbicdump -Ilib -o dump_directory=./lib \
+ -o components='["InflateColumn::DateTime"]' \
+ -o preserve_case=1 \
+ MyApp::Schema dbi:mysql:database=foo user pass \
+ '{ quote_char => "`" }'
- $ dbicdump -o dump_directory=./lib \
- -o components='["InflateColumn::DateTime"]' \
- MyApp::Schema 'dbi:mysql:database=foo;host=domain.tld;port=3306' user pass
+ $ dbicdump -o dump_directory=./lib \
+ -o components='["InflateColumn::DateTime"]' \
+ MyApp::Schema 'dbi:mysql:database=foo;host=domain.tld;port=3306' \
+ user pass
On Windows that would be:
- $ dbicdump -o dump_directory=.\lib ^
- -o components="[q{InflateColumn::DateTime}]" ^
- -o preserve_case=1 ^
- MyApp::Schema dbi:mysql:database=foo user pass "{ quote_char => q{`} }"
-
+ $ dbicdump -o dump_directory=.\lib ^
+ -o components="[q{InflateColumn::DateTime}]" ^
+ -o preserve_case=1 ^
+ MyApp::Schema dbi:mysql:database=foo user pass ^
+ "{ quote_char => q{`} }"
+
Configuration files must have schema_class and connect_info sections,
an example of a general config file is as follows:
schema_class MyApp::Schema
lib /extra/perl/libs
-
+
# connection string
<connect_info>
dsn dbi:mysql:example
user root
pass secret
</connect_info>
-
+
# dbic loader options
<loader_options>
dump_directory ./lib
my $configuration_file = shift @ARGV;
- my $configurations =
- Config::Any->load_files( {
- use_ext => 1,
- flatten_to_hash => 1,
- files => [$configuration_file] } );
-
+ my $configurations = Config::Any->load_files({
+ use_ext => 1,
+ flatten_to_hash => 1,
+ files => [$configuration_file]
+ });
+
my $c = (values %$configurations)[0];
-
+
unless (keys %{$c->{connect_info}} && $c->{schema_class}) {
pod2usage(1);
}
}
lib->import($_) for @libs;
-
+
my ($dsn, $user, $pass, $options) =
map { $c->{connect_info}->{$_} } qw/dsn user pass options/;
$options ||= {};
$c->{loader_options}->{dump_directory} ||=
$loader_options->{dump_directory};
-
+
make_schema_at(
$c->{schema_class},
$c->{loader_options} || {},
else {
my ($schema_class, @loader_connect_info) = @ARGV
or pod2usage(1);
-
+
my $dsn = shift @loader_connect_info;
-
+
my ($user, $pass) = $dsn =~ /sqlite/i ? ('', '')
: splice @loader_connect_info, 0, 2;
-
+
my @extra_connect_info_opts = map parse_value($_), @loader_connect_info;
-
+
make_schema_at(
$schema_class,
$loader_options,
my $tester = dbixcsl_common_tests->new(
vendor => 'SQLite',
auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT',
- dsn => "dbi:$class:dbname=$tdir/sqlite_test",
+ dsn => "dbi:$class:dbname=$tdir/sqlite_test.db",
user => '',
password => '',
connect_info_opts => {
$tester->run_tests();
END {
- unlink "$tdir/sqlite_test" unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP};
+ unlink "$tdir/sqlite_test.db" unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP};
}
# http://www.postgresql.org/docs/7.4/interactive/datatype.html
#
# Numeric Types
- boolean => { data_type => 'boolean' },
- bool => { data_type => 'boolean' },
+ boolean => { data_type => 'boolean' },
+ bool => { data_type => 'boolean' },
'bool default false'
=> { data_type => 'boolean', default_value => \'false' },
'bool default true'
'bool default 1::bool'
=> { data_type => 'boolean', default_value => \'true' },
- bigint => { data_type => 'bigint' },
- int8 => { data_type => 'bigint' },
- bigserial => { data_type => 'bigint', is_auto_increment => 1 },
- serial8 => { data_type => 'bigint', is_auto_increment => 1 },
- integer => { data_type => 'integer' },
- int => { data_type => 'integer' },
- int4 => { data_type => 'integer' },
- serial => { data_type => 'integer', is_auto_increment => 1 },
- serial4 => { data_type => 'integer', is_auto_increment => 1 },
- smallint => { data_type => 'smallint' },
- int2 => { data_type => 'smallint' },
-
- money => { data_type => 'money' },
-
- 'double precision' => { data_type => 'double precision' },
- float8 => { data_type => 'double precision' },
- real => { data_type => 'real' },
- float4 => { data_type => 'real' },
+ bigint => { data_type => 'bigint' },
+ int8 => { data_type => 'bigint' },
+ bigserial => { data_type => 'bigint', is_auto_increment => 1 },
+ serial8 => { data_type => 'bigint', is_auto_increment => 1 },
+ integer => { data_type => 'integer' },
+ int => { data_type => 'integer' },
+ int4 => { data_type => 'integer' },
+ serial => { data_type => 'integer', is_auto_increment => 1 },
+ serial4 => { data_type => 'integer', is_auto_increment => 1 },
+ smallint => { data_type => 'smallint' },
+ int2 => { data_type => 'smallint' },
+
+ money => { data_type => 'money' },
+
+ 'double precision' => { data_type => 'double precision' },
+ float8 => { data_type => 'double precision' },
+ real => { data_type => 'real' },
+ float4 => { data_type => 'real' },
'float(24)' => { data_type => 'real' },
'float(25)' => { data_type => 'double precision' },
'float(53)' => { data_type => 'double precision' },
float => { data_type => 'double precision' },
- numeric => { data_type => 'numeric' },
- decimal => { data_type => 'numeric' },
- 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] },
- 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] },
+ numeric => { data_type => 'numeric' },
+ decimal => { data_type => 'numeric' },
+ 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] },
+ 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] },
# Bit String Types
- 'bit varying(2)' => { data_type => 'varbit', size => 2 },
- 'varbit(2)' => { data_type => 'varbit', size => 2 },
- 'varbit' => { data_type => 'varbit' },
- bit => { data_type => 'bit', size => 1 },
- 'bit(3)' => { data_type => 'bit', size => 3 },
+ 'bit varying(2)' => { data_type => 'varbit', size => 2 },
+ 'varbit(2)' => { data_type => 'varbit', size => 2 },
+ 'varbit' => { data_type => 'varbit' },
+ bit => { data_type => 'bit', size => 1 },
+ 'bit(3)' => { data_type => 'bit', size => 3 },
# Network Types
- inet => { data_type => 'inet' },
- cidr => { data_type => 'cidr' },
- macaddr => { data_type => 'macaddr' },
+ inet => { data_type => 'inet' },
+ cidr => { data_type => 'cidr' },
+ macaddr => { data_type => 'macaddr' },
# Geometric Types
- point => { data_type => 'point' },
- line => { data_type => 'line' },
- lseg => { data_type => 'lseg' },
- box => { data_type => 'box' },
- path => { data_type => 'path' },
- polygon => { data_type => 'polygon' },
- circle => { data_type => 'circle' },
+ point => { data_type => 'point' },
+ line => { data_type => 'line' },
+ lseg => { data_type => 'lseg' },
+ box => { data_type => 'box' },
+ path => { data_type => 'path' },
+ polygon => { data_type => 'polygon' },
+ circle => { data_type => 'circle' },
# Character Types
- 'character varying(2)' => { data_type => 'varchar', size => 2 },
- 'varchar(2)' => { data_type => 'varchar', size => 2 },
- 'character(2)' => { data_type => 'char', size => 2 },
- 'char(2)' => { data_type => 'char', size => 2 },
+ 'character varying(2)' => { data_type => 'varchar', size => 2 },
+ 'varchar(2)' => { data_type => 'varchar', size => 2 },
+ 'character(2)' => { data_type => 'char', size => 2 },
+ 'char(2)' => { data_type => 'char', size => 2 },
# check that default null is correctly rewritten
'char(3) default null' => { data_type => 'char', size => 3,
default_value => \'null' },
- 'character' => { data_type => 'char', size => 1 },
- 'char' => { data_type => 'char', size => 1 },
- text => { data_type => 'text' },
+ 'character' => { data_type => 'char', size => 1 },
+ 'char' => { data_type => 'char', size => 1 },
+ text => { data_type => 'text' },
# varchar with no size has unlimited size, we rewrite to 'text'
- varchar => { data_type => 'text',
+ varchar => { data_type => 'text',
original => { data_type => 'varchar' } },
# check default null again (to make sure ref is safe)
'varchar(3) default null' => { data_type => 'varchar', size => 3,
default_value => \'null' },
# Datetime Types
- date => { data_type => 'date' },
- interval => { data_type => 'interval' },
- 'interval(2)' => { data_type => 'interval', size => 2 },
- time => { data_type => 'time' },
- 'time(2)' => { data_type => 'time', size => 2 },
- 'time without time zone' => { data_type => 'time' },
- 'time(2) without time zone' => { data_type => 'time', size => 2 },
- 'time with time zone' => { data_type => 'time with time zone' },
- 'time(2) with time zone' => { data_type => 'time with time zone', size => 2 },
- timestamp => { data_type => 'timestamp' },
- 'timestamp default now()'
- => { data_type => 'timestamp', default_value => \'current_timestamp',
+ date => { data_type => 'date' },
+ interval => { data_type => 'interval' },
+ 'interval(2)' => { data_type => 'interval', size => 2 },
+ time => { data_type => 'time' },
+ 'time(2)' => { data_type => 'time', size => 2 },
+ 'time without time zone' => { data_type => 'time' },
+ 'time(2) without time zone' => { data_type => 'time', size => 2 },
+ 'time with time zone' => { data_type => 'time with time zone' },
+ 'time(2) with time zone' => { data_type => 'time with time zone', size => 2 },
+ timestamp => { data_type => 'timestamp' },
+ 'timestamp default now()' => { data_type => 'timestamp',
+ default_value => \'current_timestamp',
original => { default_value => \'now()' } },
- 'timestamp(2)' => { data_type => 'timestamp', size => 2 },
- 'timestamp without time zone' => { data_type => 'timestamp' },
- 'timestamp(2) without time zone' => { data_type => 'timestamp', size => 2 },
+ 'timestamp(2)' => { data_type => 'timestamp', size => 2 },
+ 'timestamp without time zone' => { data_type => 'timestamp' },
+ 'timestamp(2) without time zone' => { data_type => 'timestamp', size => 2 },
- 'timestamp with time zone' => { data_type => 'timestamp with time zone' },
- 'timestamp(2) with time zone' => { data_type => 'timestamp with time zone', size => 2 },
+ 'timestamp with time zone' => { data_type => 'timestamp with time zone' },
+ 'timestamp(2) with time zone' => { data_type => 'timestamp with time zone', size => 2 },
# Blob Types
- bytea => { data_type => 'bytea' },
+ bytea => { data_type => 'bytea' },
# Enum Types
- pg_loader_test_enum => { data_type => 'enum', extra => { custom_type_name => 'pg_loader_test_enum',
- list => [ qw/foo bar baz/] } },
+ pg_loader_test_enum => { data_type => 'enum',
+ extra => { custom_type_name => 'pg_loader_test_enum',
+ list => [ qw/foo bar baz/] } },
},
pre_create => [
q{
delete $uniqs{primary};
- is_deeply ((values %uniqs)[0], ['four_id'],
- 'unique constraint is correct in schema name with dash');
+ is_deeply(
+ (values %uniqs)[0], ['four_id'],
+ 'unique constraint is correct in schema name with dash'
+ );
lives_and {
ok $rsrc = $test_schema->source('PgLoaderTest6');
delete $uniqs{primary};
- is_deeply ((values %uniqs)[0], ['six_id'],
- 'unique constraint is correct in schema name with dot');
+ is_deeply(
+ (values %uniqs)[0], ['six_id'],
+ 'unique constraint is correct in schema name with dot'
+ );
lives_and {
ok $test_schema->source('PgLoaderTest6')
# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
BEGIN {
- if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
- unshift @INC, $_ for split /:/, $lib_dirs;
- }
+ if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
+ unshift @INC, $_ for split /:/, $lib_dirs;
+ }
}
use lib qw(t/lib);
my (%dsns, $common_version);
for (qw/MSSQL MSSQL_ODBC MSSQL_ADO/) {
- next unless $ENV{"DBICTEST_${_}_DSN"};
-
- (my $dep_group = lc "rdbms_$_") =~ s/mssql$/mssql_sybase/;
- if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) {
- diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group)
- . " to test with $_";
- next;
- }
-
- $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"};
- $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"};
- $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"};
-
- require DBI;
- my $dbh = DBI->connect (@{$dsns{$_}}{qw/dsn user password/}, { RaiseError => 1, PrintError => 0} );
- my $srv_ver = eval {
- $dbh->get_info(18)
- ||
- $dbh->selectrow_hashref('master.dbo.xp_msver ProductVersion')->{Character_Value}
- } || 0;
-
- my ($maj_srv_ver) = $srv_ver =~ /^(\d+)/;
-
- if (! defined $common_version or $common_version > $maj_srv_ver ) {
- $common_version = $maj_srv_ver;
- }
+ next unless $ENV{"DBICTEST_${_}_DSN"};
+
+ (my $dep_group = lc "rdbms_$_") =~ s/mssql$/mssql_sybase/;
+ if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) {
+ diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group)
+ . " to test with $_";
+ next;
+ }
+
+ $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"};
+ $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"};
+ $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"};
+
+ require DBI;
+ my $dbh = DBI->connect (@{$dsns{$_}}{qw/dsn user password/}, { RaiseError => 1, PrintError => 0} );
+ my $srv_ver = eval {
+ $dbh->get_info(18)
+ ||
+ $dbh->selectrow_hashref('master.dbo.xp_msver ProductVersion')->{Character_Value}
+ } || 0;
+
+ my ($maj_srv_ver) = $srv_ver =~ /^(\d+)/;
+
+ if (! defined $common_version or $common_version > $maj_srv_ver ) {
+ $common_version = $maj_srv_ver;
+ }
}
plan skip_all => 'You need to set the DBICTEST_MSSQL_DSN, _USER and _PASS and/or the DBICTEST_MSSQL_ODBC_DSN, _USER and _PASS environment variables'
- unless %dsns;
+ unless %dsns;
my $mssql_2008_new_data_types = {
- date => { data_type => 'date' },
- time => { data_type => 'time' },
- 'time(0)'=> { data_type => 'time', size => 0 },
- 'time(1)'=> { data_type => 'time', size => 1 },
- 'time(2)'=> { data_type => 'time', size => 2 },
- 'time(3)'=> { data_type => 'time', size => 3 },
- 'time(4)'=> { data_type => 'time', size => 4 },
- 'time(5)'=> { data_type => 'time', size => 5 },
- 'time(6)'=> { data_type => 'time', size => 6 },
- 'time(7)'=> { data_type => 'time' },
- datetimeoffset => { data_type => 'datetimeoffset' },
- 'datetimeoffset(0)' => { data_type => 'datetimeoffset', size => 0 },
- 'datetimeoffset(1)' => { data_type => 'datetimeoffset', size => 1 },
- 'datetimeoffset(2)' => { data_type => 'datetimeoffset', size => 2 },
- 'datetimeoffset(3)' => { data_type => 'datetimeoffset', size => 3 },
- 'datetimeoffset(4)' => { data_type => 'datetimeoffset', size => 4 },
- 'datetimeoffset(5)' => { data_type => 'datetimeoffset', size => 5 },
- 'datetimeoffset(6)' => { data_type => 'datetimeoffset', size => 6 },
- 'datetimeoffset(7)' => { data_type => 'datetimeoffset' },
- datetime2 => { data_type => 'datetime2' },
- 'datetime2(0)' => { data_type => 'datetime2', size => 0 },
- 'datetime2(1)' => { data_type => 'datetime2', size => 1 },
- 'datetime2(2)' => { data_type => 'datetime2', size => 2 },
- 'datetime2(3)' => { data_type => 'datetime2', size => 3 },
- 'datetime2(4)' => { data_type => 'datetime2', size => 4 },
- 'datetime2(5)' => { data_type => 'datetime2', size => 5 },
- 'datetime2(6)' => { data_type => 'datetime2', size => 6 },
- 'datetime2(7)' => { data_type => 'datetime2' },
-
- hierarchyid => { data_type => 'hierarchyid' },
+ date => { data_type => 'date' },
+ time => { data_type => 'time' },
+ 'time(0)'=> { data_type => 'time', size => 0 },
+ 'time(1)'=> { data_type => 'time', size => 1 },
+ 'time(2)'=> { data_type => 'time', size => 2 },
+ 'time(3)'=> { data_type => 'time', size => 3 },
+ 'time(4)'=> { data_type => 'time', size => 4 },
+ 'time(5)'=> { data_type => 'time', size => 5 },
+ 'time(6)'=> { data_type => 'time', size => 6 },
+ 'time(7)'=> { data_type => 'time' },
+ datetimeoffset => { data_type => 'datetimeoffset' },
+ 'datetimeoffset(0)' => { data_type => 'datetimeoffset', size => 0 },
+ 'datetimeoffset(1)' => { data_type => 'datetimeoffset', size => 1 },
+ 'datetimeoffset(2)' => { data_type => 'datetimeoffset', size => 2 },
+ 'datetimeoffset(3)' => { data_type => 'datetimeoffset', size => 3 },
+ 'datetimeoffset(4)' => { data_type => 'datetimeoffset', size => 4 },
+ 'datetimeoffset(5)' => { data_type => 'datetimeoffset', size => 5 },
+ 'datetimeoffset(6)' => { data_type => 'datetimeoffset', size => 6 },
+ 'datetimeoffset(7)' => { data_type => 'datetimeoffset' },
+ datetime2 => { data_type => 'datetime2' },
+ 'datetime2(0)' => { data_type => 'datetime2', size => 0 },
+ 'datetime2(1)' => { data_type => 'datetime2', size => 1 },
+ 'datetime2(2)' => { data_type => 'datetime2', size => 2 },
+ 'datetime2(3)' => { data_type => 'datetime2', size => 3 },
+ 'datetime2(4)' => { data_type => 'datetime2', size => 4 },
+ 'datetime2(5)' => { data_type => 'datetime2', size => 5 },
+ 'datetime2(6)' => { data_type => 'datetime2', size => 6 },
+ 'datetime2(7)' => { data_type => 'datetime2' },
+
+ hierarchyid => { data_type => 'hierarchyid' },
};
my $tester = dbixcsl_common_tests->new(
local $SIG{__WARN__} = sub {
push @warns, $_[0] unless $_[0] =~ /\bcollides\b/;
};
-
+
make_schema_at(
'MSSQLMultiDatabase',
{
numeric => { data_type => 'numeric' },
decimal => { data_type => 'numeric' },
dec => { data_type => 'numeric' },
- 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] },
- 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] },
- 'dec(6,3)' => { data_type => 'numeric', size => [6,3] },
+ 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] },
+ 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] },
+ 'dec(6,3)' => { data_type => 'numeric', size => [6,3] },
# Boolean Type
# XXX this should map to 'boolean'
push @warns, $_[0] unless $_[0] =~ /\bcollides\b/
|| $_[0] =~ /unreferencable/;
};
-
+
make_schema_at(
'InformixMultiDatabase',
{
}
if (my $dbh1 = try { $schema->storage->dbh }) {
-
+
try {
$dbh1->do('DROP TABLE informix_loader_test5');
$dbh1->do('DROP TABLE informix_loader_test4');
# Takes a $schema as input, runs 4 basic tests
sub test_schema {
- my ($testname, $schema) = @_;
+ my ($testname, $schema) = @_;
- warnings_are ( sub {
- $schema = $schema->clone if !ref $schema;
- isa_ok($schema, 'DBIx::Class::Schema', $testname);
+ warnings_are {
+ $schema = $schema->clone if !ref $schema;
+ isa_ok($schema, 'DBIx::Class::Schema', $testname);
- my $rel_foo_rs = $schema->resultset('Bar')->search({ barid => 3})->search_related('fooref');
- isa_ok($rel_foo_rs, 'DBIx::Class::ResultSet', $testname);
+ my $rel_foo_rs = $schema->resultset('Bar')->search({ barid => 3})->search_related('fooref');
+ isa_ok($rel_foo_rs, 'DBIx::Class::ResultSet', $testname);
- my $rel_foo = $rel_foo_rs->next;
- isa_ok($rel_foo, "DBICTest::Schema::_${testname}::Foo", $testname);
+ my $rel_foo = $rel_foo_rs->next;
+ isa_ok($rel_foo, "DBICTest::Schema::_${testname}::Foo", $testname);
- is($rel_foo->footext, 'Foo record associated with the Bar with barid 3', "$testname correct object");
+ is($rel_foo->footext, 'Foo record associated with the Bar with barid 3', "$testname correct object");
- my $foo_rs = $schema->resultset('Foo');
- my $foo_new = $foo_rs->create({footext => "${testname}_foo"});
- is ($foo_rs->search({footext => "${testname}_foo"})->count, 1, "$testname object created") || die;
- }, [], "No warnings during $testname invocations");
+ my $foo_rs = $schema->resultset('Foo');
+ my $foo_new = $foo_rs->create({footext => "${testname}_foo"});
+ is ($foo_rs->search({footext => "${testname}_foo"})->count, 1, "$testname object created") || die;
+ } [], "No warnings during $testname invocations";
}
my @invocations = (
my $cref = shift @invocations;
my $schema = do {
- local $SIG{__WARN__} = sigwarn_silencer(
- qr/Deleting existing file .+ due to 'really_erase_my_files' setting/
- );
- $cref->();
+ local $SIG{__WARN__} = sigwarn_silencer(
+ qr/Deleting existing file .+ due to 'really_erase_my_files' setting/
+ );
+ $cref->();
};
test_schema($style, $schema);
rmtree($dump_path, 1, 1);
lives_ok {
- warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) }
- [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
+ warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) }
+ [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
} 'no death with dump_directory set' or diag "Dump failed: $@";
is_deeply(
DBICTest::Schema::1->_loader_invoked(undef);
SKIP: {
- skip "ActiveState perl produces additional warnings", 1
- if ($^O eq 'MSWin32');
+ skip "ActiveState perl produces additional warnings", 1
+ if ($^O eq 'MSWin32');
- warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) }
- [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
+ warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) }
+ [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
- is_deeply(
- [ sort @{ DBICTest::Schema::1->loader->generated_classes } ],
- [ ],
- 'no classes generated on second dump'
- );
+ is_deeply(
+ [ sort @{ DBICTest::Schema::1->loader->generated_classes } ],
+ [ ],
+ 'no classes generated on second dump'
+ );
- rmtree($dump_path, 1, 1);
+ rmtree($dump_path, 1, 1);
}
lives_ok {
- warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }
- [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
+ warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }
+ [ qr|^Dumping manual schema|, qr|^Schema dump completed| ];
} 'no death with dump_directory set (overwrite1)' or diag "Dump failed: $@";
is_deeply(
DBICTest::Schema::2->_loader_invoked(undef);
lives_ok {
- warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }
- [
- qr/^Dumping manual schema/,
- qr|^Deleting .+Schema/2.+ due to 'really_erase_my_files'|,
- qr|^Deleting .+Schema/2/Result/Foo.+ due to 'really_erase_my_files'|,
- qr|^Deleting .+Schema/2/Result/Bar.+ due to 'really_erase_my_files'|,
- qr/^Schema dump completed/
- ];
+ warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }
+ [
+ qr/^Dumping manual schema/,
+ qr|^Deleting .+Schema/2.+ due to 'really_erase_my_files'|,
+ qr|^Deleting .+Schema/2/Result/Foo.+ due to 'really_erase_my_files'|,
+ qr|^Deleting .+Schema/2/Result/Bar.+ due to 'really_erase_my_files'|,
+ qr/^Schema dump completed/
+ ];
} 'no death with dump_directory set (overwrite2)' or diag "Dump failed: $@";
is_deeply(
# test loading external content
$t->dump_test(
- classname => 'DBICTest::Schema::_no_skip_load_external',
- regexes => {
- Foo => [
- qr/package DBICTest::Schema::_no_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s
- ],
- },
+ classname => 'DBICTest::Schema::_no_skip_load_external',
+ regexes => {
+ Foo => [
+ qr/package DBICTest::Schema::_no_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s
+ ],
+ },
);
# test skipping external content
$t->dump_test(
- classname => 'DBICTest::Schema::_skip_load_external',
- options => {
- skip_load_external => 1,
- },
- neg_regexes => {
- Foo => [
- qr/package DBICTest::Schema::_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s
- ],
- },
+ classname => 'DBICTest::Schema::_skip_load_external',
+ options => {
+ skip_load_external => 1,
+ },
+ neg_regexes => {
+ Foo => [
+ qr/package DBICTest::Schema::_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s
+ ],
+ },
);
$t->cleanup;
# test config_file
{
- my $config_file = File::Temp->new (UNLINK => 1);
-
- print $config_file "{ skip_relationships => 1 }\n";
- close $config_file;
-
- $t->dump_test(
- classname => 'DBICTest::Schema::_config_file',
- options => { config_file => "$config_file" },
- neg_regexes => {
- Foo => [
- qr/has_many/,
- ],
- },
- );
+ my $config_file = File::Temp->new (UNLINK => 1);
+
+ print $config_file "{ skip_relationships => 1 }\n";
+ close $config_file;
+
+ $t->dump_test(
+ classname => 'DBICTest::Schema::_config_file',
+ options => { config_file => "$config_file" },
+ neg_regexes => {
+ Foo => [
+ qr/has_many/,
+ ],
+ },
+ );
}
# proper exception
$t->dump_test(
- classname => 'DBICTest::Schema::_clashing_monikers',
- test_db_class => 'make_dbictest_db_clashing_monikers',
- error => qr/tables (?:"bar", "bars"|"bars", "bar") reduced to the same source moniker 'Bar'/,
+ classname => 'DBICTest::Schema::_clashing_monikers',
+ test_db_class => 'make_dbictest_db_clashing_monikers',
+ error => qr/tables (?:"bar", "bars"|"bars", "bar") reduced to the same source moniker 'Bar'/,
);
# test out the POD and "use utf8;"
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- custom_column_info => sub {
- my ($table, $col, $info) = @_;
- return +{ extra => { is_footext => 1 } } if $col eq 'footext';
- },
- result_base_class => 'My::ResultBaseClass',
- additional_classes => 'TestAdditional',
- additional_base_classes => 'TestAdditionalBase',
- left_base_classes => 'TestLeftBase',
- components => [ 'TestComponent', '+TestComponentFQN' ],
- },
- regexes => {
- schema => [
- qr/^use utf8;\n/,
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_classes/,
- ],
- Foo => [
- qr/^use utf8;\n/,
- qr/package DBICTest::DumpMore::1::Foo;/,
- qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Foo\n\n=cut\n\nuse strict;\nuse warnings;\n\n/,
- qr/\n=head1 BASE CLASS: L<My::ResultBaseClass>\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/,
- qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L<TestAdditional>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L<TestAdditionalBase>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L<TestLeftBase>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L<DBIx::Class::TestComponent>\n\n=item \* L<TestComponentFQN>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 TABLE: C<foo>\n\n=cut\n\n__PACKAGE__->table\("foo"\);\n\n/,
- qr/\n=head1 ACCESSORS\n\n/,
- qr/\n=head2 fooid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/,
- qr/\n=head2 footext\n\n data_type: 'text'\n default_value: 'footext'\n extra: \{is_footext => 1\}\n is_nullable: 1\n\n/,
- qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/fooid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("fooid"\);\n/,
- qr/\n=head1 RELATIONS\n\n/,
- qr/\n=head2 bars\n\nType: has_many\n\nRelated object: L<DBICTest::DumpMore::1::Bar>\n\n=cut\n\n/,
- qr/1;\n$/,
- ],
- Bar => [
- qr/^use utf8;\n/,
- qr/package DBICTest::DumpMore::1::Bar;/,
- qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Bar\n\n=cut\n\nuse strict;\nuse warnings;\n\n/,
- qr/\n=head1 BASE CLASS: L<My::ResultBaseClass>\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/,
- qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L<TestAdditional>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L<TestAdditionalBase>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L<TestLeftBase>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L<DBIx::Class::TestComponent>\n\n=item \* L<TestComponentFQN>\n\n=back\n\n=cut\n\n/,
- qr/\n=head1 TABLE: C<bar>\n\n=cut\n\n__PACKAGE__->table\("bar"\);\n\n/,
- qr/\n=head1 ACCESSORS\n\n/,
- qr/\n=head2 barid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/,
- qr/\n=head2 fooref\n\n data_type: 'integer'\n is_foreign_key: 1\n is_nullable: 1\n\n/,
- qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/barid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("barid"\);\n/,
- qr/\n=head1 RELATIONS\n\n/,
- qr/\n=head2 fooref\n\nType: belongs_to\n\nRelated object: L<DBICTest::DumpMore::1::Foo>\n\n=cut\n\n/,
- qr/\n1;\n$/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ custom_column_info => sub {
+ my ($table, $col, $info) = @_;
+ return +{ extra => { is_footext => 1 } } if $col eq 'footext';
+ },
+ result_base_class => 'My::ResultBaseClass',
+ additional_classes => 'TestAdditional',
+ additional_base_classes => 'TestAdditionalBase',
+ left_base_classes => 'TestLeftBase',
+ components => [ 'TestComponent', '+TestComponentFQN' ],
+ },
+ regexes => {
+ schema => [
+ qr/^use utf8;\n/,
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_classes/,
+ ],
+ Foo => [
+ qr/^use utf8;\n/,
+ qr/package DBICTest::DumpMore::1::Foo;/,
+ qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Foo\n\n=cut\n\nuse strict;\nuse warnings;\n\n/,
+ qr/\n=head1 BASE CLASS: L<My::ResultBaseClass>\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/,
+ qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L<TestAdditional>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L<TestAdditionalBase>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L<TestLeftBase>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L<DBIx::Class::TestComponent>\n\n=item \* L<TestComponentFQN>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 TABLE: C<foo>\n\n=cut\n\n__PACKAGE__->table\("foo"\);\n\n/,
+ qr/\n=head1 ACCESSORS\n\n/,
+ qr/\n=head2 fooid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/,
+ qr/\n=head2 footext\n\n data_type: 'text'\n default_value: 'footext'\n extra: \{is_footext => 1\}\n is_nullable: 1\n\n/,
+ qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/fooid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("fooid"\);\n/,
+ qr/\n=head1 RELATIONS\n\n/,
+ qr/\n=head2 bars\n\nType: has_many\n\nRelated object: L<DBICTest::DumpMore::1::Bar>\n\n=cut\n\n/,
+ qr/1;\n$/,
+ ],
+ Bar => [
+ qr/^use utf8;\n/,
+ qr/package DBICTest::DumpMore::1::Bar;/,
+ qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Bar\n\n=cut\n\nuse strict;\nuse warnings;\n\n/,
+ qr/\n=head1 BASE CLASS: L<My::ResultBaseClass>\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/,
+ qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L<TestAdditional>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L<TestAdditionalBase>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L<TestLeftBase>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L<DBIx::Class::TestComponent>\n\n=item \* L<TestComponentFQN>\n\n=back\n\n=cut\n\n/,
+ qr/\n=head1 TABLE: C<bar>\n\n=cut\n\n__PACKAGE__->table\("bar"\);\n\n/,
+ qr/\n=head1 ACCESSORS\n\n/,
+ qr/\n=head2 barid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/,
+ qr/\n=head2 fooref\n\n data_type: 'integer'\n is_foreign_key: 1\n is_nullable: 1\n\n/,
+ qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/barid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("barid"\);\n/,
+ qr/\n=head1 RELATIONS\n\n/,
+ qr/\n=head2 fooref\n\nType: belongs_to\n\nRelated object: L<DBICTest::DumpMore::1::Foo>\n\n=cut\n\n/,
+ qr/\n1;\n$/,
+ ],
+ },
);
$t->append_to_class('DBICTest::DumpMore::1::Foo',q{# XXX This is my custom content XXX});
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- regexes => {
- schema => [
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_classes/,
- ],
- Foo => [
- qr/package DBICTest::DumpMore::1::Foo;/,
- qr/->set_primary_key/,
- qr/1;\n# XXX This is my custom content XXX/,
- ],
- Bar => [
- qr/package DBICTest::DumpMore::1::Bar;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ regexes => {
+ schema => [
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_classes/,
+ ],
+ Foo => [
+ qr/package DBICTest::DumpMore::1::Foo;/,
+ qr/->set_primary_key/,
+ qr/1;\n# XXX This is my custom content XXX/,
+ ],
+ Bar => [
+ qr/package DBICTest::DumpMore::1::Bar;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- really_erase_my_files => 1
- },
- regexes => {
- schema => [
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_classes/,
- ],
- Foo => [
- qr/package DBICTest::DumpMore::1::Foo;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- Bar => [
- qr/package DBICTest::DumpMore::1::Bar;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- },
- neg_regexes => {
- Foo => [
- qr/# XXX This is my custom content XXX/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ really_erase_my_files => 1
+ },
+ regexes => {
+ schema => [
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_classes/,
+ ],
+ Foo => [
+ qr/package DBICTest::DumpMore::1::Foo;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ Bar => [
+ qr/package DBICTest::DumpMore::1::Bar;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ },
+ neg_regexes => {
+ Foo => [
+ qr/# XXX This is my custom content XXX/,
+ ],
+ },
);
# test namespaces
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_namespaces => 1,
- generate_pod => 0
- },
- neg_regexes => {
- 'Result/Foo' => [
- qr/^=/m,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_namespaces => 1,
+ generate_pod => 0
+ },
+ neg_regexes => {
+ 'Result/Foo' => [
+ qr/^=/m,
+ ],
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => 'foo_schema',
- qualify_objects => 1,
- use_namespaces => 1
- },
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/Foo' => [
- qr/^\Q__PACKAGE__->table("foo_schema.foo");\E/m,
- # the has_many relname should not have the schema in it!
- qr/^__PACKAGE__->has_many\(\n "bars"/m,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => 'foo_schema',
+ qualify_objects => 1,
+ use_namespaces => 1
+ },
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
- },
+ regexes => {
+ 'Result/Foo' => [
+ qr/^\Q__PACKAGE__->table("foo_schema.foo");\E/m,
+ # the has_many relname should not have the schema in it!
+ qr/^__PACKAGE__->has_many\(\n "bars"/m,
+ ],
+ },
);
# test qualify_objects
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => [ 'foo_schema', 'bar_schema' ],
- qualify_objects => 0,
- use_namespaces => 1,
- },
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/Foo' => [
- # the table name should not include the db schema
- qr/^\Q__PACKAGE__->table("foo");\E/m,
- ],
- 'Result/Bar' => [
- # the table name should not include the db schema
- qr/^\Q__PACKAGE__->table("bar");\E/m,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => [ 'foo_schema', 'bar_schema' ],
+ qualify_objects => 0,
+ use_namespaces => 1,
+ },
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
- },
+ regexes => {
+ 'Result/Foo' => [
+ # the table name should not include the db schema
+ qr/^\Q__PACKAGE__->table("foo");\E/m,
+ ],
+ 'Result/Bar' => [
+ # the table name should not include the db schema
+ qr/^\Q__PACKAGE__->table("bar");\E/m,
+ ],
+ },
);
# test moniker_parts
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => 'my_schema',
- moniker_parts => ['_schema', 'name'],
- qualify_objects => 1,
- use_namespaces => 1,
- },
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/MySchemaFoo' => [
- qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
- # the has_many relname should not have the schema in it, but the class should
- qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchemaBar"/m,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => 'my_schema',
+ moniker_parts => ['_schema', 'name'],
+ qualify_objects => 1,
+ use_namespaces => 1,
+ },
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
- },
+ regexes => {
+ 'Result/MySchemaFoo' => [
+ qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
+ # the has_many relname should not have the schema in it, but the class should
+ qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchemaBar"/m,
+ ],
+ },
);
# test moniker_part_separator
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => 'my_schema',
- moniker_parts => ['_schema', 'name'],
- moniker_part_separator => '::',
- qualify_objects => 1,
- use_namespaces => 1,
- },
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/MySchema/Foo' => [
- qr/^package DBICTest::DumpMore::1::Result::MySchema::Foo;/m,
- qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
- # the has_many relname should not have the schema in it, but the class should
- qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchema::Bar"/m,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => 'my_schema',
+ moniker_parts => ['_schema', 'name'],
+ moniker_part_separator => '::',
+ qualify_objects => 1,
+ use_namespaces => 1,
+ },
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
- },
+ regexes => {
+ 'Result/MySchema/Foo' => [
+ qr/^package DBICTest::DumpMore::1::Result::MySchema::Foo;/m,
+ qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
+ # the has_many relname should not have the schema in it, but the class should
+ qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchema::Bar"/m,
+ ],
+ },
);
# test moniker_part_separator + moniker_map + recursive constraints
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => 'my_schema',
- moniker_parts => ['_schema', 'name'],
- moniker_part_separator => '::',
- qualify_objects => 1,
- use_namespaces => 1,
- moniker_map => {
- my_schema => { foo => "MySchema::Floop" },
- },
- constraint => [ [ qr/my_schema/ => qr/foo|bar/ ] ],
- exclude => [ [ qr/my_schema/ => qr/bar/ ] ],
- },
- generated_results => [qw(MySchema::Floop)],
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/MySchema/Floop' => [
- qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m,
- qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
- ],
- },
- neg_regexes => {
- 'Result/MySchema/Floop' => [
- # the bar table should not be loaded, so no relationship should exist
- qr/^__PACKAGE__->has_many\(\n "bars"/m,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => 'my_schema',
+ moniker_parts => ['_schema', 'name'],
+ moniker_part_separator => '::',
+ qualify_objects => 1,
+ use_namespaces => 1,
+ moniker_map => {
+ my_schema => { foo => "MySchema::Floop" },
+ },
+ constraint => [ [ qr/my_schema/ => qr/foo|bar/ ] ],
+ exclude => [ [ qr/my_schema/ => qr/bar/ ] ],
+ },
+ generated_results => [qw(MySchema::Floop)],
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
- },
+ regexes => {
+ 'Result/MySchema/Floop' => [
+ qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m,
+ qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
+ ],
+ },
+ neg_regexes => {
+ 'Result/MySchema/Floop' => [
+ # the bar table should not be loaded, so no relationship should exist
+ qr/^__PACKAGE__->has_many\(\n "bars"/m,
+ ],
+ },
);
# test moniker_map + moniker_part_map
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- db_schema => 'my_schema',
- moniker_parts => ['_schema', 'name'],
- moniker_part_separator => '::',
- moniker_part_map => {
- _schema => {
- my_schema => 'OtherSchema',
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ db_schema => 'my_schema',
+ moniker_parts => ['_schema', 'name'],
+ moniker_part_separator => '::',
+ moniker_part_map => {
+ _schema => {
+ my_schema => 'OtherSchema',
+ },
},
- },
- moniker_map => {
- my_schema => {
- foo => 'MySchema::Floop',
+ moniker_map => {
+ my_schema => {
+ foo => 'MySchema::Floop',
+ },
},
+ qualify_objects => 1,
+ use_namespaces => 1,
},
- qualify_objects => 1,
- use_namespaces => 1,
- },
- warnings => [
- qr/^db_schema is not supported on SQLite/,
- ],
- regexes => {
- 'Result/MySchema/Floop' => [
- qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m,
- qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
- # the has_many relname should not have the schema in it, but the class should
- qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::OtherSchema::Bar"/m,
- ],
- 'Result/OtherSchema/Bar' => [
- qr/^package DBICTest::DumpMore::1::Result::OtherSchema::Bar;/m,
- qr/^\Q__PACKAGE__->table("my_schema.bar");\E/m,
- # the has_many relname should not have the schema in it, but the class should
- qr/^__PACKAGE__->belongs_to\(\n "fooref",\n "DBICTest::DumpMore::1::Result::MySchema::Floop"/m,
+ warnings => [
+ qr/^db_schema is not supported on SQLite/,
],
+ regexes => {
+ 'Result/MySchema/Floop' => [
+ qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m,
+ qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m,
+ # the has_many relname should not have the schema in it, but the class should
+ qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::OtherSchema::Bar"/m,
+ ],
+ 'Result/OtherSchema/Bar' => [
+ qr/^package DBICTest::DumpMore::1::Result::OtherSchema::Bar;/m,
+ qr/^\Q__PACKAGE__->table("my_schema.bar");\E/m,
+ # the has_many relname should not have the schema in it, but the class should
+ qr/^__PACKAGE__->belongs_to\(\n "fooref",\n "DBICTest::DumpMore::1::Result::MySchema::Floop"/m,
+ ],
- },
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_namespaces => 1
- },
- regexes => {
- schema => [
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_namespaces/,
- ],
- 'Result/Foo' => [
- qr/package DBICTest::DumpMore::1::Result::Foo;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- 'Result/Bar' => [
- qr/package DBICTest::DumpMore::1::Result::Bar;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_namespaces => 1
+ },
+ regexes => {
+ schema => [
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_namespaces/,
+ ],
+ 'Result/Foo' => [
+ qr/package DBICTest::DumpMore::1::Result::Foo;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ 'Result/Bar' => [
+ qr/package DBICTest::DumpMore::1::Result::Bar;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_namespaces => 1,
- result_namespace => 'Res',
- resultset_namespace => 'RSet',
- default_resultset_class => 'RSetBase',
- },
- regexes => {
- schema => [
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_namespaces/,
- qr/result_namespace => "Res"/,
- qr/resultset_namespace => "RSet"/,
- qr/default_resultset_class => "RSetBase"/,
- ],
- 'Res/Foo' => [
- qr/package DBICTest::DumpMore::1::Res::Foo;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- 'Res/Bar' => [
- qr/package DBICTest::DumpMore::1::Res::Bar;/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_namespaces => 1,
+ result_namespace => 'Res',
+ resultset_namespace => 'RSet',
+ default_resultset_class => 'RSetBase',
+ },
+ regexes => {
+ schema => [
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_namespaces/,
+ qr/result_namespace => "Res"/,
+ qr/resultset_namespace => "RSet"/,
+ qr/default_resultset_class => "RSetBase"/,
+ ],
+ 'Res/Foo' => [
+ qr/package DBICTest::DumpMore::1::Res::Foo;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ 'Res/Bar' => [
+ qr/package DBICTest::DumpMore::1::Res::Bar;/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_namespaces => 1,
- result_namespace => '+DBICTest::DumpMore::1::Res',
- resultset_namespace => 'RSet',
- default_resultset_class => 'RSetBase',
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- regexes => {
- schema => [
- qr/package DBICTest::DumpMore::1;/,
- qr/->load_namespaces/,
- qr/result_namespace => "\+DBICTest::DumpMore::1::Res"/,
- qr/resultset_namespace => "RSet"/,
- qr/default_resultset_class => "RSetBase"/,
- qr/use base 'My::SchemaBaseClass'/,
- ],
- 'Res/Foo' => [
- qr/package DBICTest::DumpMore::1::Res::Foo;/,
- qr/use base 'My::ResultBaseClass'/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- 'Res/Bar' => [
- qr/package DBICTest::DumpMore::1::Res::Bar;/,
- qr/use base 'My::ResultBaseClass'/,
- qr/->set_primary_key/,
- qr/1;\n$/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_namespaces => 1,
+ result_namespace => '+DBICTest::DumpMore::1::Res',
+ resultset_namespace => 'RSet',
+ default_resultset_class => 'RSetBase',
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ regexes => {
+ schema => [
+ qr/package DBICTest::DumpMore::1;/,
+ qr/->load_namespaces/,
+ qr/result_namespace => "\+DBICTest::DumpMore::1::Res"/,
+ qr/resultset_namespace => "RSet"/,
+ qr/default_resultset_class => "RSetBase"/,
+ qr/use base 'My::SchemaBaseClass'/,
+ ],
+ 'Res/Foo' => [
+ qr/package DBICTest::DumpMore::1::Res::Foo;/,
+ qr/use base 'My::ResultBaseClass'/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ 'Res/Bar' => [
+ qr/package DBICTest::DumpMore::1::Res::Bar;/,
+ qr/use base 'My::ResultBaseClass'/,
+ qr/->set_primary_key/,
+ qr/1;\n$/,
+ ],
+ },
);
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_namespaces => 1,
- result_base_class => 'My::MissingResultBaseClass',
- },
- error => qr/My::MissingResultBaseClass.*is not installed/,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_namespaces => 1,
+ result_base_class => 'My::MissingResultBaseClass',
+ },
+ error => qr/My::MissingResultBaseClass.*is not installed/,
);
# test quote_char in connect_info for dbicdump
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- extra_connect_info => [
- '',
- '',
- { quote_char => '"' },
- ],
+ classname => 'DBICTest::DumpMore::1',
+ extra_connect_info => [
+ '',
+ '',
+ { quote_char => '"' },
+ ],
);
# test fix for RT#70507 (end comment and 1; gets lost if left with actual
$t->dump_test(
classname => 'DBICTest::DumpMore::omit_version',
options => {
- omit_version => 1,
+ omit_version => 1,
},
regexes => {
- Foo => [
- qr/^\# Created by DBIx::Class::Schema::Loader @ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d$/m,
- ],
+ Foo => [
+ qr/^\# Created by DBIx::Class::Schema::Loader @ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d$/m,
+ ],
},
);
$t->dump_test(
classname => 'DBICTest::DumpMore::omit_timestamp',
options => {
- omit_timestamp => 1,
+ omit_timestamp => 1,
},
regexes => {
- Foo => [
- qr/^\# Created by DBIx::Class::Schema::Loader v[\d.]+$/m,
- ],
+ Foo => [
+ qr/^\# Created by DBIx::Class::Schema::Loader v[\d.]+$/m,
+ ],
},
);
$t->dump_test(
classname => 'DBICTest::DumpMore::omit_both',
options => {
- omit_version => 1,
- omit_timestamp => 1,
+ omit_version => 1,
+ omit_timestamp => 1,
},
# A positive regex here would match the top comment
neg_regexes => {
- Foo => [
- qr/^\# Created by DBIx::Class::Schema::Loader.+$/m,
- ],
+ Foo => [
+ qr/^\# Created by DBIx::Class::Schema::Loader.+$/m,
+ ],
},
);
'dynamic schema in 0.04006 mode warning';
contains $warning, 'DBIx::Class::Schema::Loader::Manual::UpgradingFromV4',
'warning refers to upgrading doc';
-
+
run_v4_tests($res);
}
# first dump a fresh use_moose=1 schema
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_moose => 1,
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- result_roles => ['TestRole', 'TestRole2'],
- },
- regexes => {
- schema => [
- qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
- ],
- Foo => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/=head1 L<Moose> ROLES APPLIED\n\n=over 4\n\n=item \* L<TestRole>\n\n=item \* L<TestRole2>\n\n=back\n\n=cut\n\n/,
- qr/\nwith 'TestRole', 'TestRole2';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- ],
- Bar => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/=head1 L<Moose> ROLES APPLIED\n\n=over 4\n\n=item \* L<TestRole>\n\n=item \* L<TestRole2>\n\n=back\n\n=cut\n\n/,
- qr/\nwith 'TestRole', 'TestRole2';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_moose => 1,
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ result_roles => ['TestRole', 'TestRole2'],
+ },
+ regexes => {
+ schema => [
+ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
+ ],
+ Foo => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/=head1 L<Moose> ROLES APPLIED\n\n=over 4\n\n=item \* L<TestRole>\n\n=item \* L<TestRole2>\n\n=back\n\n=cut\n\n/,
+ qr/\nwith 'TestRole', 'TestRole2';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ ],
+ Bar => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/=head1 L<Moose> ROLES APPLIED\n\n=over 4\n\n=item \* L<TestRole>\n\n=item \* L<TestRole2>\n\n=back\n\n=cut\n\n/,
+ qr/\nwith 'TestRole', 'TestRole2';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ ],
+ },
);
$t->cleanup;
# check protect_overloads works as expected
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_moose => 1,
- only_autoclean => 1,
- },
- regexes => {
- schema => [
- qr/\nuse namespace::autoclean;\n/,
- ],
- Foo => [
- qr/\nuse namespace::autoclean;\n/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_moose => 1,
+ only_autoclean => 1,
+ },
+ regexes => {
+ schema => [
+ qr/\nuse namespace::autoclean;\n/,
+ ],
+ Foo => [
+ qr/\nuse namespace::autoclean;\n/,
+ ],
+ },
);
$t->cleanup;
# now upgrade a fresh non-moose schema to use_moose=1
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_moose => 0,
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- regexes => {
- schema => [
- qr/\nuse base 'My::SchemaBaseClass';\n/,
- ],
- Foo => [
- qr/\nuse base 'My::ResultBaseClass';\n/,
- ],
- Bar => [
- qr/\nuse base 'My::ResultBaseClass';\n/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_moose => 0,
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ regexes => {
+ schema => [
+ qr/\nuse base 'My::SchemaBaseClass';\n/,
+ ],
+ Foo => [
+ qr/\nuse base 'My::ResultBaseClass';\n/,
+ ],
+ Bar => [
+ qr/\nuse base 'My::ResultBaseClass';\n/,
+ ],
+ },
);
# check that changed custom content is upgraded for Moose bits
$t->append_to_class('DBICTest::DumpMore::1::Foo', q{# XXX This is my custom content XXX});
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_moose => 1,
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- regexes => {
- schema => [
- qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
- ],
- Foo => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- qr/# XXX This is my custom content XXX/,
- ],
- Bar => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_moose => 1,
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ regexes => {
+ schema => [
+ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
+ ],
+ Foo => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ qr/# XXX This is my custom content XXX/,
+ ],
+ Bar => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ ],
+ },
);
$t->cleanup;
# check with a fresh non-moose schema that Moose custom content added to a use_moose=0 schema is not repeated
$t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- regexes => {
- schema => [
- qr/\nuse base 'My::SchemaBaseClass';\n/,
- ],
- Foo => [
- qr/\nuse base 'My::ResultBaseClass';\n/,
- ],
- Bar => [
- qr/\nuse base 'My::ResultBaseClass';\n/,
- ],
- },
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ regexes => {
+ schema => [
+ qr/\nuse base 'My::SchemaBaseClass';\n/,
+ ],
+ Foo => [
+ qr/\nuse base 'My::ResultBaseClass';\n/,
+ ],
+ Bar => [
+ qr/\nuse base 'My::ResultBaseClass';\n/,
+ ],
+ },
);
# add Moose custom content then check it is not repeated
$t->append_to_class('DBICTest::DumpMore::1::Foo', qq{use Moose;\n__PACKAGE__->meta->make_immutable;\n1;\n});
for my $supply_use_moose (1, 0) {
- $t->dump_test(
- classname => 'DBICTest::DumpMore::1',
- options => {
- $supply_use_moose ? (use_moose => 1) : (),
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- regexes => {
- schema => [
- qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
- ],
- Foo => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- ],
- Bar => [
- qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
- qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
- ],
- },
- neg_regexes => {
- Foo => [
-# qr/\nuse Moose;\n.*\nuse Moose;/s, # TODO
- qr/\n__PACKAGE__->meta->make_immutable;\n.*\n__PACKAGE__->meta->make_immutable;/s,
- ],
- },
- );
+ $t->dump_test(
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ $supply_use_moose ? (use_moose => 1) : (),
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ regexes => {
+ schema => [
+ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/,
+ ],
+ Foo => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ ],
+ Bar => [
+ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/,
+ qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/,
+ ],
+ },
+ neg_regexes => {
+ Foo => [
+ # qr/\nuse Moose;\n.*\nuse Moose;/s, # TODO
+ qr/\n__PACKAGE__->meta->make_immutable;\n.*\n__PACKAGE__->meta->make_immutable;/s,
+ ],
+ },
+ );
}
# check that a moose schema can *not* be downgraded
$t->dump_test (
- classname => 'DBICTest::DumpMore::1',
- options => {
- use_moose => 0,
- result_base_class => 'My::ResultBaseClass',
- schema_base_class => 'My::SchemaBaseClass',
- },
- error => qr/\QIt is not possible to "downgrade" a schema that was loaded with use_moose => 1\E/,
+ classname => 'DBICTest::DumpMore::1',
+ options => {
+ use_moose => 0,
+ result_base_class => 'My::ResultBaseClass',
+ schema_base_class => 'My::SchemaBaseClass',
+ },
+ error => qr/\QIt is not possible to "downgrade" a schema that was loaded with use_moose => 1\E/,
);
done_testing;
}
$original_class_data{$class} = $text;
- if ($class =~ /::1$/) {
+ if ($class =~ /::1$/) {
$text = "No Gotcha!";
- }
+ }
else {
- $text .= q{my $foo = "Kilroy was here";};
- }
+ $text .= q{my $foo = "Kilroy was here";};
+ }
return $text;
},
);
'correct number of result files passed to filter';
my $foo = slurp_file "$dump_path/DBICTest/Schema/1/Result/Foo.pm";
-ok(! -e "$dump_path/DBICTest/Schema/1.pm",
- "No package means no file written");
-ok($original_class_data{"DBICTest::Schema::1"},
- "Even though we processed the missing class");
-like($foo, qr/# Created by .* THE FIRST PART/s,
- "We get the whole autogenerated text");
-like($foo, qr/Kilroy was here/, "Can insert text");
+ok ! -e "$dump_path/DBICTest/Schema/1.pm",
+ "No package means no file written";
+ok $original_class_data{"DBICTest::Schema::1"},
+ "Even though we processed the missing class";
+like $foo, qr/# Created by .* THE FIRST PART/s,
+ "We get the whole autogenerated text";
+like $foo, qr/Kilroy was here/, "Can insert text";
DBICTest::Schema::2->connect($make_dbictest_db::dsn);
my $regular = schema_with();
is( ref($regular->source('Bar')->relationship_info('fooref')), 'HASH',
'regularly-made schema has fooref rel',
- );
+);
my $skip_rel = schema_with( skip_relationships => 1 );
is_deeply( $skip_rel->source('Bar')->relationship_info('fooref'), undef,
'skip_relationships blocks generation of fooref rel',
- );
+);
# test hashref as rel_name_map
my $hash_relationship = schema_with(
is( ref($hash_relationship->source('Foo')->relationship_info('got_bars')),
'HASH',
'single level hash in rel_name_map picked up correctly'
- );
+);
is( ref($hash_relationship->source('Bar')->relationship_info('got_fooref')),
'HASH',
'double level hash in rel_name_map picked up correctly'
- );
+);
# test coderef as rel_name_map
my $code_relationship = schema_with(
is_deeply(
$args,
{
- name => 'bars',
- type => 'has_many',
+ name => 'bars',
+ type => 'has_many',
local_class =>
"DBICTest::Schema::${schema_counter}::Result::Foo",
- local_moniker => 'Foo',
- local_columns => ['fooid'],
+ local_moniker => 'Foo',
+ local_columns => ['fooid'],
remote_class =>
"DBICTest::Schema::${schema_counter}::Result::Bar",
- remote_moniker => 'Bar',
- remote_columns => ['fooref'],
- },
- 'correct args for Foo passed'
- );
+ remote_moniker => 'Bar',
+ remote_columns => ['fooref'],
+ },
+ 'correct args for Foo passed'
+ );
}
- elsif ($args->{local_moniker} eq 'Bar') {
+ elsif ($args->{local_moniker} eq 'Bar') {
is_deeply(
$args,
{
- name => 'fooref',
- type => 'belongs_to',
+ name => 'fooref',
+ type => 'belongs_to',
local_class =>
"DBICTest::Schema::${schema_counter}::Result::Bar",
- local_moniker => 'Bar',
- local_columns => ['fooref'],
+ local_moniker => 'Bar',
+ local_columns => ['fooref'],
remote_class =>
"DBICTest::Schema::${schema_counter}::Result::Foo",
- remote_moniker => 'Foo',
- remote_columns => ['fooid'],
- },
- 'correct args for Foo passed'
- );
- }
+ remote_moniker => 'Foo',
+ remote_columns => ['fooid'],
+ },
+ 'correct args for Foo passed'
+ );
+ }
else {
fail( 'correct args passed to rel_name_map' );
diag "args were: ", explain $args;
}
- return $orig->({
- Bar => { fooref => 'fooref_caught' },
- Foo => { bars => 'bars_caught' },
- });
+ return $orig->({
+ Bar => { fooref => 'fooref_caught' },
+ Foo => { bars => 'bars_caught' },
+ });
}
- );
+);
is( ref($code_relationship->source('Foo')->relationship_info('bars_caught')),
'HASH',
'rel_name_map overrode local_info correctly'
- );
+);
is( ref($code_relationship->source('Bar')->relationship_info('fooref_caught')),
'HASH',
'rel_name_map overrode remote_info correctly'
- );
+);
throws_ok {
schema_with( rel_name_map => sub { $_[-1]->(sub{}) } ),
} qr/relationship_attrs/, 'throws error for invalid (arrayref) relationship_attrs';
{
- my $nodelete = schema_with( relationship_attrs =>
- {
- all => { cascade_delete => 0 },
- belongs_to => { cascade_delete => 1 },
- },
- );
+ my $nodelete = schema_with( relationship_attrs => {
+ all => { cascade_delete => 0 },
+ belongs_to => { cascade_delete => 1 },
+ });
my $bars_info = $nodelete->source('Foo')->relationship_info('bars');
#use Data::Dumper;
#die Dumper([ $nodelete->source('Foo')->relationships() ]);
my $fooref_info = $nodelete->source('Bar')->relationship_info('fooref');
is( ref($fooref_info), 'HASH',
- 'fooref rel is present',
- );
+ 'fooref rel is present',
+ );
is( $bars_info->{attrs}->{cascade_delete}, 0,
- 'relationship_attrs settings seem to be getting through to the generated rels',
- );
+ 'relationship_attrs settings seem to be getting through to the generated rels',
+ );
is( $fooref_info->{attrs}->{cascade_delete}, 1,
- 'belongs_to in relationship_attrs overrides all def',
- );
+ 'belongs_to in relationship_attrs overrides all def',
+ );
}
# test relationship_attrs coderef
like $p{remote_source}->result_class,
qr/^DBICTest::Schema::\d+::Result::Bar\z/,
'correct remote source';
-
+
$p{attrs}{snoopy} = 1;
return $p{attrs};
like $p{remote_source}->result_class,
qr/^DBICTest::Schema::\d+::Result::Foo\z/,
'correct remote source';
-
+
$p{attrs}{scooby} = 1;
return $p{attrs};
sub run_test_sequence {
my %p = @_;
- die "specify a $_ test param" for grep !$p{$_},
+ die "specify a $_ test param" for grep !$p{$_},
qw( testname schema_opts schema_class foo_class );
- my $schema;
+ my $schema;
lives_ok { $schema = make_schema_with(%p) } "($p{testname}) get schema";
SKIP: {
lives_ok {
$foo_rs = $schema->resultset('Foo')->search();
} "($p{testname}) get a ResultSet for Foo";
-
+
# get a foo
my $foo;
lives_ok {
$foo = $foo_rs->first();
} "($p{testname}) get the first foo";
-
+
ok(defined $foo, "($p{testname}) \$foo is defined");
-
+
SKIP: {
skip 'foo is not defined', 3 unless defined $foo;
-
+
isa_ok $foo, $p{foo_class};
-
+
# call the file-defined method
my $biz;
lives_ok {
$biz = $foo->biz();
} "($p{testname}) call the file-defined Foo->biz method";
-
+
SKIP: {
skip 'no point in checking value if method was not found', 1 unless defined $biz;
-
+
ok(
- $biz eq 'foo bar biz baz boz noz schnozz',
+ $biz eq 'foo bar biz baz boz noz schnozz',
"($p{testname}) biz() method returns correct string"
);
}
}
}
}
-
+
sub make_schema_with {
my %p = @_;
return DBIx::Class::Schema::Loader::make_schema_at(
make_schema_at(
'DBICTest::Schema::_test_schema_base',
{
- naming => 'current',
- schema_base_class => 'TestSchemaBaseClass',
+ naming => 'current',
+ schema_base_class => 'TestSchemaBaseClass',
schema_components => ['TestSchemaComponent'],
},
[ $make_dbictest_db::dsn ],
# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
BEGIN {
- if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
- unshift @INC, $_ for split /:/, $lib_dirs;
- }
+ if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
+ unshift @INC, $_ for split /:/, $lib_dirs;
+ }
}
my ($dsn, $user, $pass);
for (qw/MSSQL_ODBC MSSQL_ADO MSSQL/) {
- next unless $ENV{"DBICTEST_${_}_DSN"};
+ next unless $ENV{"DBICTEST_${_}_DSN"};
- $dsn = $ENV{"DBICTEST_${_}_DSN"};
- $user = $ENV{"DBICTEST_${_}_USER"};
- $pass = $ENV{"DBICTEST_${_}_PASS"};
+ $dsn = $ENV{"DBICTEST_${_}_DSN"};
+ $user = $ENV{"DBICTEST_${_}_USER"};
+ $pass = $ENV{"DBICTEST_${_}_PASS"};
- last;
+ last;
}
plan skip_all => 'perl 5.8 required for this test'
dbixcsl_common_tests->new(
vendor => 'SQLite',
auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY',
- dsn => "dbi:$class:dbname=$tdir/sqlite_test",
+ dsn => "dbi:$class:dbname=$tdir/sqlite_test.db",
user => '',
password => '',
)->run_tests;
END {
- unlink "$tdir/sqlite_test" if $ENV{SCHEMA_LOADER_TESTS_BACKCOMPAT};
+ unlink "$tdir/sqlite_test.db" if $ENV{SCHEMA_LOADER_TESTS_BACKCOMPAT};
}
my ($table, $col) = @_;
return (
qq{ CREATE SEQUENCE ${table}_${col}_seq START WITH 1 INCREMENT BY 1},
- qq{
+ qq{
CREATE OR REPLACE TRIGGER ${table}_${col}_trigger
BEFORE INSERT ON ${table}
FOR EACH ROW
DBICTest::Schema::1->_loader_invoked(undef);
SKIP: {
- my @warnings_regexes = (
- qr|Dumping manual schema|,
- qr|Schema dump completed|,
- );
+ my @warnings_regexes = (
+ qr|Dumping manual schema|,
+ qr|Schema dump completed|,
+ );
- skip "ActiveState perl produces additional warnings", scalar @warnings_regexes
- if ($^O eq 'MSWin32');
+ skip "ActiveState perl produces additional warnings", scalar @warnings_regexes
+ if ($^O eq 'MSWin32');
- my @warn_output;
- {
- local $SIG{__WARN__} = sub { push(@warn_output, @_) };
- DBICTest::Schema::1->connect($make_dbictest_db::dsn);
- }
+ my @warn_output;
+ {
+ local $SIG{__WARN__} = sub { push(@warn_output, @_) };
+ DBICTest::Schema::1->connect($make_dbictest_db::dsn);
+ }
- like(shift @warn_output, $_) foreach (@warnings_regexes);
+ like(shift @warn_output, $_) foreach (@warnings_regexes);
- rmtree($dump_path, 1, 1);
+ rmtree($dump_path, 1, 1);
}
eval { DBICTest::Schema::2->connect($make_dbictest_db::dsn) };
my $file_regexes = $tdata{regexes};
my $file_neg_regexes = $tdata{neg_regexes} || {};
my $schema_regexes = delete $file_regexes->{schema};
-
+
my $schema_path = $DUMP_PATH . '/' . $schema_class;
$schema_path =~ s{::}{/}g;
dump_file_like($schema_path . '.pm', @$schema_regexes);
# Only MySQL uses this
$self->{innodb} ||= '';
-
+
$self->{verbose} = $ENV{TEST_VERBOSE} || 0;
return bless $self => $class;
eval qq{
package $schema_class;
use base qw/DBIx::Class::Schema::Loader/;
-
+
__PACKAGE__->loader_options(\%loader_opts);
__PACKAGE__->connection(\@connect_info);
};
my $rs_rel17 = $obj16->search_related('loader_test17_loader16_ones');
isa_ok($rs_rel17->first, $class17);
is($rs_rel17->first->id, 3);
-
+
# XXX test m:m 18 <- 20 -> 19
-
+
# XXX test double-fk m:m 21 <- 22 -> 21
# test double multi-col fk 26 -> 25
my $class11 = $classes->{loader_test11};
my $rsobj11 = $conn->resultset($moniker11);
- isa_ok( $rsobj10, "DBIx::Class::ResultSet" );
+ isa_ok( $rsobj10, "DBIx::Class::ResultSet" );
isa_ok( $rsobj11, "DBIx::Class::ResultSet" );
my $obj10 = $rsobj10->create({ subject => 'xyzzy' });
SKIP: {
skip 'Previous eval block failed', 3
unless ($@ eq '');
-
+
my $results = $rsobj10->search({ subject => 'xyzzy' });
is( $results->count(), 1,
'One $rsobj10 returned from search' );
my $class13 = $classes->{loader_test13};
my $rsobj13 = $conn->resultset($moniker13);
- isa_ok( $rsobj12, "DBIx::Class::ResultSet" );
+ isa_ok( $rsobj12, "DBIx::Class::ResultSet" );
isa_ok( $rsobj13, "DBIx::Class::ResultSet" );
my $obj13 = $rsobj13->find(1);
my $class15 = $classes->{loader_test15};
my $rsobj15 = $conn->resultset($moniker15);
- isa_ok( $rsobj14, "DBIx::Class::ResultSet" );
+ isa_ok( $rsobj14, "DBIx::Class::ResultSet" );
isa_ok( $rsobj15, "DBIx::Class::ResultSet" );
my $obj15 = $rsobj15->find(1);
$make_auto_inc->(qw/loader_test1 id/),
q{ INSERT INTO loader_test1 (dat) VALUES('foo') },
- q{ INSERT INTO loader_test1 (dat) VALUES('bar') },
- q{ INSERT INTO loader_test1 (dat) VALUES('baz') },
+ q{ INSERT INTO loader_test1 (dat) VALUES('bar') },
+ q{ INSERT INTO loader_test1 (dat) VALUES('baz') },
- qq{
+ qq{
CREATE TABLE loader_test2 (
id $self->{auto_inc_pk},
dat VARCHAR(32) NOT NULL,
},
$make_auto_inc->(qw/loader_test2 id/),
- q{ INSERT INTO loader_test2 (dat, dat2) VALUES('aaa', 'zzz') },
- q{ INSERT INTO loader_test2 (dat, dat2) VALUES('bbb', 'yyy') },
- q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ccc', 'xxx') },
- q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ddd', 'www') },
+ q{ INSERT INTO loader_test2 (dat, dat2) VALUES('aaa', 'zzz') },
+ q{ INSERT INTO loader_test2 (dat, dat2) VALUES('bbb', 'yyy') },
+ q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ccc', 'xxx') },
+ q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ddd', 'www') },
qq{
CREATE TABLE LOADER_TEST23 (
) $self->{innodb}
},
- q{ INSERT INTO loader_test3 (id,dat) VALUES(1,'aaa') },
- q{ INSERT INTO loader_test3 (id,dat) VALUES(2,'bbb') },
- q{ INSERT INTO loader_test3 (id,dat) VALUES(3,'ccc') },
- q{ INSERT INTO loader_test3 (id,dat) VALUES(4,'ddd') },
+ q{ INSERT INTO loader_test3 (id,dat) VALUES(1,'aaa') },
+ q{ INSERT INTO loader_test3 (id,dat) VALUES(2,'bbb') },
+ q{ INSERT INTO loader_test3 (id,dat) VALUES(3,'ccc') },
+ q{ INSERT INTO loader_test3 (id,dat) VALUES(4,'ddd') },
qq{
CREATE TABLE loader_test4 (
},
q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(123,1,'aaa') },
- q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(124,2,'bbb') },
+ q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(124,2,'bbb') },
q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(125,3,'ccc') },
q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(126,4,'ddd') },
q{ INSERT INTO loader_test22 (parent, child) VALUES (11,13)},
q{ INSERT INTO loader_test22 (parent, child) VALUES (13,17)},
- qq{
+ qq{
CREATE TABLE loader_test25 (
id1 INTEGER NOT NULL,
id2 INTEGER NOT NULL,
dat VARCHAR(8)
) $self->{innodb}
},
-
+
q{ INSERT INTO loader_test14 (id,dat) VALUES (123,'aaa') },
qq{
LOADER_TEST23
LoAdEr_test24
/;
-
+
my @tables_auto_inc = (
[ qw/loader_test1 id/ ],
[ qw/loader_test2 id/ ],
loader_test11
loader_test10
/;
-
+
my @tables_advanced_auto_inc = (
[ qw/loader_test10 id10/ ],
[ qw/loader_test11 id11/ ],
our @EXPORT_OK = '$tdir';
die "/t does not exist, this can't be right...\n"
- unless -d 't';
+ unless -d 't';
my $tbdir = 't/var';
unless (-d $tbdir) {
- mkdir $tbdir or die "Unable to create $tbdir: $!\n";
+ mkdir $tbdir or die "Unable to create $tbdir: $!\n";
}
our $tdir = tempdir(DIR => $tbdir);
my $self;
if( ref($_[0]) eq 'HASH') {
- my $args = shift;
- $self = { (%$args) };
+ my $args = shift;
+ $self = { (%$args) };
}
else {
- $self = { @_ };
+ $self = { @_ };
}
# Only MySQL uses this
{
my @loader_warnings;
local $SIG{__WARN__} = sub { push(@loader_warnings, @_); };
- eval qq{
- package @{[SCHEMA_CLASS]};
- use base qw/DBIx::Class::Schema::Loader/;
+ eval qq{
+ package @{[SCHEMA_CLASS]};
+ use base qw/DBIx::Class::Schema::Loader/;
- __PACKAGE__->loader_options(\%loader_opts);
- __PACKAGE__->connection(\@\$connect_info);
- };
+ __PACKAGE__->loader_options(\%loader_opts);
+ __PACKAGE__->connection(\@\$connect_info);
+ };
ok(!$@, "Loader initialization") or diag $@;
foreach my $ucname (keys %uniq1) {
my $cols_arrayref = $uniq1{$ucname};
if(@$cols_arrayref == 1 && $cols_arrayref->[0] eq 'dat') {
- $uniq1_test = 1;
- last;
+ $uniq1_test = 1;
+ last;
}
}
ok($uniq1_test, "Unique constraint");
my $uniq2_test = 0;
foreach my $ucname (keys %uniq2) {
my $cols_arrayref = $uniq2{$ucname};
- if(@$cols_arrayref == 2
- && $cols_arrayref->[0] eq 'dat2'
- && $cols_arrayref->[1] eq 'dat') {
+ if (@$cols_arrayref == 2
+ && $cols_arrayref->[0] eq 'dat2'
+ && $cols_arrayref->[1] eq 'dat'
+ ) {
$uniq2_test = 2;
last;
}
is $obj5->i_d2, 1, 'Find on multi-col PK';
}
else {
- my $obj5 = $rsobj5->find({id1 => 1, id2 => 1});
+ my $obj5 = $rsobj5->find({id1 => 1, id2 => 1});
is $obj5->id2, 1, 'Find on multi-col PK';
}
ok($class6->column_info('loader_test2_id')->{is_foreign_key}, 'Foreign key detected');
ok($class6->column_info('id')->{is_foreign_key}, 'Foreign key detected');
- my $id2_info = try { $class6->column_info('id2') } ||
- $class6->column_info('Id2');
+ my $id2_info = try { $class6->column_info('id2') } ||
+ $class6->column_info('Id2');
ok($id2_info->{is_foreign_key}, 'Foreign key detected');
unlike slurp_file $conn->_loader->get_dump_filename($class6),
-qr/\n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
- \s+ "(\w+?)"
- .*?
- \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
- \s+ "\1"/xs,
-'did not create two relationships with the same name';
+ qr{
+ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
+ \s+ "(\w+?)"
+ .*?
+ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
+ \s+ "\1"
+ }xs,
+ 'did not create two relationships with the same name';
unlike slurp_file $conn->_loader->get_dump_filename($class8),
-qr/\n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
- \s+ "(\w+?)"
- .*?
- \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
- \s+ "\1"/xs,
-'did not create two relationships with the same name';
+ qr{
+ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
+ \s+ "(\w+?)"
+ .*?
+ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\(
+ \s+ "\1"
+ }xs,
+ 'did not create two relationships with the same name';
# check naming of ambiguous relationships
my $rel_info = $class6->relationship_info('lovely_loader_test7') || {};
# test outer join for nullable referring columns:
is $class32->column_info('rel2')->{is_nullable}, 1,
- 'is_nullable detection';
+ 'is_nullable detection';
ok($class32->column_info('rel1')->{is_foreign_key}, 'Foreign key detected');
ok($class32->column_info('rel2')->{is_foreign_key}, 'Foreign key detected');
my $obj30 = try { $rsobj30->find(123) } || $rsobj30->search({ id => 123 })->single;
isa_ok( $obj30->loader_test2, $class2);
- ok($rsobj30->result_source->column_info('loader_test2')->{is_foreign_key},
- 'Foreign key detected');
+ ok $rsobj30->result_source->column_info('loader_test2')->{is_foreign_key},
+ 'Foreign key detected';
}
$conn->storage->disconnect; # for Firebird
ok eval {
my %opts = (
- naming => 'current',
- constraint => $self->CONSTRAINT,
- dump_directory => DUMP_DIR,
- debug => ($ENV{SCHEMA_LOADER_TESTS_DEBUG}||0)
+ naming => 'current',
+ constraint => $self->CONSTRAINT,
+ dump_directory => DUMP_DIR,
+ debug => ($ENV{SCHEMA_LOADER_TESTS_DEBUG}||0)
);
my $guard = $conn->txn_scope_guard;
q{ INSERT INTO loader_test22 (parent, child) VALUES (11,13)},
q{ INSERT INTO loader_test22 (parent, child) VALUES (13,17)},
- qq{
+ qq{
CREATE TABLE loader_test25 (
id1 INTEGER NOT NULL,
id2 INTEGER NOT NULL,
sub DESTROY {
my $self = shift;
unless ($ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) {
- $self->drop_tables if $self->{_created};
- rmtree DUMP_DIR
+ $self->drop_tables if $self->{_created};
+ rmtree DUMP_DIR
}
}
no strict 'refs';
@{$schema_class . '::ISA'} = ('DBIx::Class::Schema::Loader');
$schema_class->loader_options(
- quiet => 1,
- %{$tdata{options}},
+ quiet => 1,
+ %{$tdata{options}},
);
my @warns;
my $check_warns = $tdata{warnings};
is(@$warns, @$check_warns, "$schema_class warning count")
- or diag @$warns;
+ or diag @$warns;
for(my $i = 0; $i <= $#$check_warns; $i++) {
like(($warns->[$i] || ''), $check_warns->[$i], "$schema_class warning $i");
our @EXPORT_OK = '$tdir';
die "/t does not exist, this can't be right...\n"
- unless -d 't';
+ unless -d 't';
my $tbdir = 't/var';
unless (-d $tbdir) {
- mkdir $tbdir or die "Unable to create $tbdir: $!\n";
+ mkdir $tbdir or die "Unable to create $tbdir: $!\n";
}
our $tdir = tempdir(DIR => $tbdir);
foo_id INTEGER NOT NULL REFERENCES foo(foo_id),
bar_id INTEGER NOT NULL REFERENCES bar(bar_id),
PRIMARY KEY (foo_id, bar_id)
- )|,
+ )|,
q|INSERT INTO foo (foo_id) VALUES (1)|,
q|INSERT INTO foo (foo_id) VALUES (2)|,
q|INSERT INTO bar (bar_id) VALUES (1)|,
q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (1,2)|,
q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (2,1)|,
q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (2,2)|,
- );
+);
END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; }
--- /dev/null
+use DBIx::Class::Schema::Loader::Optional::Dependencies
+ -skip_all_without => 'test_whitespace';
+
+use warnings;
+use strict;
+
+use Test::More;
+use File::Glob 'bsd_glob';
+use lib 't/lib';
+
+# FIXME - temporary workaround for RT#82032, RT#82033
+# also add all scripts (no extension) and some extra extensions
+# we want to check
+{
+ no warnings 'redefine';
+ my $is_pm = sub {
+ $_[0] !~ /\./ || $_[0] =~ /\.(?:pm|pod|skip|bash|sql|json|proto)$/i || $_[0] =~ /::/;
+ };
+
+ *Test::EOL::_is_perl_module = $is_pm;
+ *Test::NoTabs::_is_perl_module = $is_pm;
+}
+
+my @pl_targets = qw/t xt lib script maint/;
+Test::EOL::all_perl_files_ok({ trailing_whitespace => 1 }, @pl_targets);
+Test::NoTabs::all_perl_files_ok(@pl_targets);
+
+# check some non-"perl files" in the root separately
+# use .gitignore as a guide of what to skip
+# (or do not test at all if no .gitignore is found)
+if (open(my $gi, '<', '.gitignore')) {
+ my $skipnames;
+ while (my $ln = <$gi>) {
+ next if $ln =~ /^\s*$/;
+ chomp $ln;
+ $ln =~ s{^/}{};
+ $skipnames->{$_}++ for bsd_glob($ln);
+ }
+
+ # that we want to check anyway
+ delete $skipnames->{'META.yml'};
+
+ for my $fn (bsd_glob('*')) {
+ next if $skipnames->{$fn};
+ next unless -f $fn;
+ Test::EOL::eol_unix_ok($fn, { trailing_whitespace => 1 });
+ Test::NoTabs::notabs_ok($fn);
+ }
+}
+
+# FIXME - Test::NoTabs and Test::EOL declare 'no_plan' which conflicts with done_testing
+# https://github.com/schwern/test-more/issues/14
+#done_testing;