### This may look crazy, but it in fact tangibly ( by 50(!)% ) shortens
# the skip-test time when everything requested is unavailable
-use if $ENV{RELEASE_TESTING} => 'warnings';
-use if $ENV{RELEASE_TESTING} => 'strict';
+BEGIN {
+ if ( $ENV{RELEASE_TESTING} ) {
+ require warnings and warnings->import;
+ require strict and strict->import;
+ }
+}
sub croak {
require Carp;
# POD is generated automatically by calling _gen_pod from the
# Makefile.PL in $AUTHOR mode
-# NOTE: the rationale for 2 JSON::Any versions is that
-# we need the newer only to work around JSON::XS, which
-# itself is an optional dep
-my $min_json_any = {
- 'JSON::Any' => '1.23',
-};
-my $test_and_dist_json_any = {
- 'JSON::Any' => '1.31',
-};
-
+# *DELIBERATELY* not making a group for these - they must disappear
+# forever as optdeps in the first place
my $moose_basic = {
'Moose' => '0.98',
'MooseX::Types' => '0.21',
};
my $dbic_reqs = {
+
+ # NOTE: the rationale for 2 JSON::Any versions is that
+ # we need the newer only to work around JSON::XS, which
+ # itself is an optional dep
+ _json_any => {
+ req => {
+ 'JSON::Any' => '1.23',
+ },
+ },
+
+ _json_xs_compatible_json_any => {
+ req => {
+ 'JSON::Any' => '1.31',
+ },
+ },
+
+ # a common placeholder for engines with IC::DT support based off DT::F::S
+ _ic_dt_strptime_based => {
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::Strptime' => '1.2',
+ },
+ },
+ }
+ },
+
+ _rdbms_generic_odbc => {
+ req => {
+ 'DBD::ODBC' => 0,
+ }
+ },
+
+ _rdbms_generic_ado => {
+ req => {
+ 'DBD::ADO' => 0,
+ }
+ },
+
+ # must list any dep used by adhoc testing
+ # this prevents the "skips due to forgotten deps" issue
+ test_adhoc => {
+ req => {
+ 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
+ 'Class::DBI' => '3.000005',
+ 'Date::Simple' => '3.03',
+ 'YAML' => '0',
+ 'Class::Unload' => '0.07',
+ 'Time::Piece' => '0',
+ 'Time::Piece::MySQL' => '0',
+ 'DBD::mysql' => '4.023',
+ },
+ },
+
replicated => {
req => $moose_basic,
pod => {
},
},
+ config_file_reader => {
+ pod => {
+ title => 'Generic config reader',
+ desc => 'Modules required for generic config file parsing, currently Config::Any (rarely used at runtime)',
+ },
+ req => {
+ 'Config::Any' => '0.20',
+ },
+ },
+
admin => {
+ include => [qw( _json_any config_file_reader )],
req => {
%$moose_basic,
- %$min_json_any,
'MooseX::Types::Path::Class' => '0.05',
'MooseX::Types::JSON' => '0.02',
},
},
},
+ ic_file => {
+ req => {
+ 'Path::Class' => '0.18',
+ },
+ pod => {
+ title => 'DBIx::Class::InflateColumn::File (Deprecated)',
+ desc => 'Modules required for the deprecated L<DBIx::Class::InflateColumn::File>',
+ },
+ },
+
+ ic_dt => {
+ req => {
+ 'DateTime' => '0.55',
+ 'DateTime::TimeZone::OlsonDB' => 0,
+ },
+ pod => {
+ title => 'InflateColumn::DateTime support',
+ desc =>
+ 'Modules required for L<DBIx::Class::InflateColumn::DateTime>. '
+ . 'Note that this group does not require much on its own, but '
+ . 'instead is augmented by various RDBMS-specific groups. See the '
+ . 'documentation of each C<rbms_*> group for details',
+ },
+ },
+
id_shortener => {
req => {
'Math::BigInt' => '1.80',
},
},
- test_component_accessor => {
+ cdbicompat => {
req => {
- 'Class::Unload' => '0.07',
+ 'Class::Data::Inheritable' => '0',
+ 'Class::Trigger' => '0',
+ 'DBIx::ContextualFetch' => '0',
+ 'Clone' => '0.32',
+ },
+ pod => {
+ title => 'DBIx::Class::CDBICompat support',
+ desc => 'Modules required for L<DBIx::Class::CDBICompat>'
},
},
},
test_prettydebug => {
- req => $min_json_any,
+ include => '_json_any',
},
test_admin_script => {
- include => 'admin_script',
+ include => [qw( admin_script _json_xs_compatible_json_any )],
req => {
- %$test_and_dist_json_any,
'JSON' => 0,
'JSON::PP' => 0,
'Cpanel::JSON::XS' => 0,
},
},
- test_dt => {
- req => {
- 'DateTime' => '0.55',
- 'DateTime::Format::Strptime' => '1.2',
- },
- },
-
- test_dt_sqlite => {
- include => 'test_dt',
- req => {
- # t/36datetime.t
- # t/60core.t
- 'DateTime::Format::SQLite' => '0',
- },
- },
-
- test_dt_mysql => {
- include => 'test_dt',
- req => {
- # t/inflate/datetime_mysql.t
- # (doesn't need Mysql itself)
- 'DateTime::Format::MySQL' => '0',
- },
- },
-
- test_dt_pg => {
- include => 'test_dt',
- req => {
- # t/inflate/datetime_pg.t
- # (doesn't need PG itself)
- 'DateTime::Format::Pg' => '0.16004',
- },
- },
-
- test_cdbicompat => {
- include => 'test_dt',
- req => {
- 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
- 'Time::Piece::MySQL' => '0',
- 'Date::Simple' => '3.03',
+ binary_data => {
+ pod => {
+ title => 'Binary datatype support (certain RDBMS)',
+ desc =>
+ 'Some RDBMS engines require specific versions of the respective DBD '
+ . 'driver for binary data support. Note that this group does not '
+ . 'require anything on its own, but instead is augmented by various '
+ . 'RDBMS-specific groups. See the documentation of each rbms_* group '
+ . 'for details',
},
},
- rdbms_generic_odbc => {
- req => {
- 'DBD::ODBC' => 0,
- }
- },
-
- rdbms_generic_ado => {
- req => {
- 'DBD::ADO' => 0,
- }
- },
-
# this is just for completeness as SQLite
# is a core dep of DBIC for testing
rdbms_sqlite => {
title => 'SQLite support',
desc => 'Modules required to connect to SQLite',
},
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::SQLite' => '0',
+ },
+ },
+ },
+ },
+
+ # centralize the specification, as we have ICDT tests which can
+ # test the full behavior of RDBMS-specific ICDT on top of bare SQLite
+ _ic_dt_pg_base => {
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::Pg' => '0.16004',
+ },
+ },
+ },
+ },
+
+ ic_dt_pg => {
+ include => [qw( ic_dt _ic_dt_pg_base )],
},
rdbms_pg => {
+ include => '_ic_dt_pg_base',
req => {
- # when changing this list make sure to adjust xt/optional_deps.t
'DBD::Pg' => 0,
},
pod => {
title => 'PostgreSQL support',
desc => 'Modules required to connect to PostgreSQL',
},
+ augment => {
+ binary_data => {
+ req => {
+ 'DBD::Pg' => '2.009002'
+ },
+ }
+ },
+ },
+
+ _rdbms_mssql_common => {
+ include => '_ic_dt_strptime_based',
},
rdbms_mssql_odbc => {
- include => 'rdbms_generic_odbc',
+ include => [qw( _rdbms_generic_odbc _rdbms_mssql_common )],
pod => {
title => 'MSSQL support via DBD::ODBC',
desc => 'Modules required to connect to MSSQL via DBD::ODBC',
},
rdbms_mssql_sybase => {
+ include => '_rdbms_mssql_common',
req => {
'DBD::Sybase' => 0,
},
},
rdbms_mssql_ado => {
- include => 'rdbms_generic_ado',
+ include => [qw( _rdbms_generic_ado _rdbms_mssql_common )],
pod => {
title => 'MSSQL support via DBD::ADO (Windows only)',
desc => 'Modules required to connect to MSSQL via DBD::ADO. This particular DBD is available on Windows only',
},
},
+ _rdbms_msaccess_common => {
+ include => '_ic_dt_strptime_based',
+ },
+
rdbms_msaccess_odbc => {
- include => 'rdbms_generic_odbc',
+ include => [qw( _rdbms_generic_odbc _rdbms_msaccess_common )],
pod => {
title => 'MS Access support via DBD::ODBC',
desc => 'Modules required to connect to MS Access via DBD::ODBC',
},
rdbms_msaccess_ado => {
- include => 'rdbms_generic_ado',
+ include => [qw( _rdbms_generic_ado _rdbms_msaccess_common )],
pod => {
title => 'MS Access support via DBD::ADO (Windows only)',
desc => 'Modules required to connect to MS Access via DBD::ADO. This particular DBD is available on Windows only',
},
},
+ # centralize the specification, as we have ICDT tests which can
+ # test the full behavior of RDBMS-specific ICDT on top of bare SQLite
+ _ic_dt_mysql_base => {
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::MySQL' => '0',
+ },
+ },
+ },
+ },
+
+ ic_dt_mysql => {
+ include => [qw( ic_dt _ic_dt_mysql_base )],
+ },
+
rdbms_mysql => {
+ include => '_ic_dt_mysql_base',
req => {
'DBD::mysql' => 0,
},
title => 'Oracle support',
desc => 'Modules required to connect to Oracle',
},
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::Oracle' => '0',
+ },
+ },
+ },
},
rdbms_ase => {
+ include => '_ic_dt_strptime_based',
req => {
'DBD::Sybase' => 0,
},
},
},
+ _rdbms_db2_common => {
+ augment => {
+ ic_dt => {
+ req => {
+ 'DateTime::Format::DB2' => '0',
+ },
+ },
+ },
+ },
+
rdbms_db2 => {
+ include => '_rdbms_db2_common',
req => {
'DBD::DB2' => 0,
},
},
rdbms_db2_400 => {
- include => 'rdbms_generic_odbc',
+ include => [qw( _rdbms_generic_odbc _rdbms_db2_common )],
pod => {
title => 'DB2 on AS/400 support',
desc => 'Modules required to connect to DB2 on AS/400',
},
rdbms_informix => {
+ include => '_ic_dt_strptime_based',
req => {
'DBD::Informix' => 0,
},
},
},
+ _rdbms_sqlanywhere_common => {
+ include => '_ic_dt_strptime_based',
+ },
+
rdbms_sqlanywhere => {
+ include => '_rdbms_sqlanywhere_common',
req => {
'DBD::SQLAnywhere' => 0,
},
},
rdbms_sqlanywhere_odbc => {
- include => 'rdbms_generic_odbc',
+ include => [qw( _rdbms_generic_odbc _rdbms_sqlanywhere_common )],
pod => {
title => 'SQLAnywhere support via DBD::ODBC',
desc => 'Modules required to connect to SQLAnywhere via DBD::ODBC',
},
},
+ _rdbms_firebird_common => {
+ include => '_ic_dt_strptime_based',
+ },
+
rdbms_firebird => {
+ include => '_rdbms_firebird_common',
req => {
'DBD::Firebird' => 0,
},
},
rdbms_firebird_interbase => {
+ include => '_rdbms_firebird_common',
req => {
'DBD::InterBase' => 0,
},
},
rdbms_firebird_odbc => {
- include => 'rdbms_generic_odbc',
+ include => [qw( _rdbms_generic_odbc _rdbms_firebird_common )],
pod => {
title => 'Firebird support via DBD::ODBC',
desc => 'Modules required to connect to Firebird via DBD::ODBC',
},
},
+ test_rdbms_sqlite => {
+ include => 'rdbms_sqlite',
+ req => {
+ ###
+ ### IMPORTANT - do not raise this dependency
+ ### even though many bugfixes are present in newer versions, the general DBIC
+ ### rule is to bend over backwards for available DBDs (given upgrading them is
+ ### often *not* easy or even possible)
+ ###
+ 'DBD::SQLite' => '1.29',
+ },
+ },
+
test_rdbms_pg => {
include => 'rdbms_pg',
env => [
DBICTEST_PG_USER => 0,
DBICTEST_PG_PASS => 0,
],
- req => {
- # the order does matter because the rdbms support group might require
- # a different version that the test group
- #
- # when changing this list make sure to adjust xt/optional_deps.t
- 'DBD::Pg' => '2.009002', # specific version to test bytea
- },
},
test_rdbms_mssql_odbc => {
DBICTEST_ORA_PASS => 0,
],
req => {
- 'DateTime::Format::Oracle' => '0',
'DBD::Oracle' => '1.24',
},
},
},
dist_dir => {
+ # we need to run the dbicadmin so we can self-generate its POD
+ # also we do not want surprises in case JSON::XS is in the path
+ # so make sure we get an always-working JSON::Any
+ include => [qw(
+ admin_script
+ _json_xs_compatible_json_any
+ id_shortener
+ deploy
+ test_pod
+ test_podcoverage
+ test_whitespace
+ test_strictures
+ )],
req => {
- %$test_and_dist_json_any,
'ExtUtils::MakeMaker' => '6.64',
+ 'Module::Install' => '1.06',
'Pod::Inherit' => '0.91',
},
},
sub req_group_list {
+{ map
{ $_ => $_[0]->_groups_to_reqs($_) }
- keys %$dbic_reqs
+ grep { $_ !~ /^_/ } keys %$dbic_reqs
}
}
my ($self, $groups) = @_;
my $reqs = $self->_groups_to_reqs($groups);
- my $mods_missing = $self->modreq_missing_for($groups);
+
+ my $mods_missing = $reqs->{missing_envvars}
+ ? $self->_list_physically_missing_modules( $reqs->{modreqs} )
+ : $self->modreq_missing_for($groups)
+ ;
return '' if
! $mods_missing
or return '';
join ' ', map
- { $reqs->{modreqs}{$_} ? qq("$_~>=$reqs->{modreqs}{$_}") : $_ }
+ { $reqs->{modreqs}{$_} ? "$_~$reqs->{modreqs}{$_}" : $_ }
sort { lc($a) cmp lc($b) } keys %$modreq_errors
;
}
+my $tb;
+sub skip_without {
+ my ($self, $groups) = @_;
+
+ $tb ||= do { local $@; eval { Test::Builder->new } }
+ or croak "Calling skip_without() before loading Test::Builder makes no sense";
+
+ if ( my $err = $self->req_missing_for($groups) ) {
+ my ($fn, $ln) = (caller(0))[1,2];
+ $tb->skip("block in $fn around line $ln requires $err");
+
+ BEGIN { ${^WARNING_BITS} = "" }
+
+ last SKIP;
+ }
+
+ 1;
+}
+
sub die_unless_req_ok_for {
if (my $err = shift->req_missing_for(shift) ) {
die "Unable to continue due to missing requirements: $err\n";
join '/', @res;
}
+my $groupname_re = qr/ [a-z_] [0-9_a-z]* /x;
+my $modname_re = qr/ [A-Z_a-z] [0-9A-Z_a-z]* (?:::[0-9A-Z_a-z]+)* /x;
+my $modver_re = qr/ [0-9]+ (?: \. [0-9]+ )? /x;
+
+# Expand includes from a random group in a specific order:
+# nonvariable groups first, then their includes, then the variable groups,
+# then their includes.
+# This allows reliably marking the rest of the mod reqs as variable (this is
+# also why variable includes are currently not allowed)
+sub __expand_includes {
+ my ($groups, $seen) = @_;
+
+ # !! DIFFERENT !! behavior and return depending on invocation mode
+ # (easier to recurse this way)
+ my $is_toplevel = $seen
+ ? 0
+ : !! ($seen = {})
+ ;
+ my ($res_per_type, $missing_envvars);
-### Private OO API
-our %req_unavailability_cache;
-
-# this method is just a lister and envvar/metadata checker - it does not try to load anything
-my $processed_groups = {};
-sub _groups_to_reqs {
- my ($self, $groups) = @_;
-
- $groups = [ $groups || () ]
- unless ref $groups eq 'ARRAY';
-
- croak "@{[ (caller(1))[3] ]}() expects a requirement group name or arrayref of group names"
- unless @$groups;
-
- my $ret = {
- modreqs => {},
- modreqs_fully_documented => 1,
- };
-
- for my $group ( grep { ! $processed_groups->{$_} } @$groups ) {
-
- $group =~ /\A [A-Za-z][0-9A-Z_a-z]* \z/x
- or croak "Invalid requirement group name '$group': only ascii alphanumerics and _ are allowed";
+ # breadth-first evaluation, with non-variable includes on top
+ for my $g (@$groups) {
- croak "Requirement group '$group' is not defined" unless defined $dbic_reqs->{$group};
+ croak "Invalid requirement group name '$g': only ascii alphanumerics and _ are allowed"
+ if $g !~ qr/ \A $groupname_re \z/x;
- my $group_reqs = $dbic_reqs->{$group}{req};
+ my $r = $dbic_reqs->{$g}
+ or croak "Requirement group '$g' is not defined";
- # sanity-check
- for (keys %$group_reqs) {
+ # always do this check *before* the $seen check
+ croak "Group '$g' with variable effective_modreqs can not be specified as an 'include'"
+ if ( $r->{env} and ! $is_toplevel );
- $_ =~ /\A [A-Z_a-z][0-9A-Z_a-z]* (?:::[0-9A-Z_a-z]+)* \z /x
- or croak "Requirement '$_' in group '$group' is not a valid module name";
+ next if $seen->{$g}++;
- # !!!DO NOT CHANGE!!!
- # remember - version.pm may not be available on the system
- croak "Requirement '$_' in group '$group' specifies an invalid version '$group_reqs->{$_}' (only plain non-underscored floating point decimals are supported)"
- if ( ($group_reqs->{$_}||0) !~ / \A [0-9]+ (?: \. [0-9]+ )? \z /x );
- }
+ my $req_type = 'static';
- # check if we have all required envvars if such names are defined
- my ($some_envs_required, $some_envs_missing);
- if (my @e = @{$dbic_reqs->{$group}{env} || [] }) {
+ if ( my @e = @{$r->{env}||[]} ) {
- croak "Unexpected 'env' attribute under group '$group' (only allowed in test_* groups)"
- unless $group =~ /^test_/;
+ croak "Unexpected 'env' attribute under group '$g' (only allowed in test_* groups)"
+ unless $g =~ /^test_/;
- croak "Unexpected *odd* list in 'env' under group '$group'"
+ croak "Unexpected *odd* list in 'env' under group '$g'"
if @e % 2;
- my @group_envnames_list;
-
# deconstruct the whole thing
+ my (@group_envnames_list, $some_envs_required, $some_required_missing);
while (@e) {
push @group_envnames_list, my $envname = shift @e;
$some_envs_required ||= 1;
- $some_envs_missing ||= (
+ $some_required_missing ||= (
! defined $ENV{$envname}
or
! length $ENV{$envname}
);
}
- croak "None of the envvars in group '$group' declared as required, making the requirement moot"
+ croak "None of the envvars in group '$g' declared as required, making the requirement moot"
unless $some_envs_required;
- push @{$ret->{missing_envvars}}, \@group_envnames_list if $some_envs_missing;
+ if ($some_required_missing) {
+ push @{$missing_envvars->{$g}}, \@group_envnames_list;
+ $req_type = 'variable';
+ }
}
- # get the reqs for includes if any
- my $inc_reqs;
- if (my $incs = $dbic_reqs->{$group}{include}) {
- $incs = [ $incs ] unless ref $incs eq 'ARRAY';
+ push @{$res_per_type->{"base_${req_type}"}}, $g;
- croak "Malformed 'include' for group '$group': must be another existing group name or arrayref of existing group names"
- unless @$incs;
+ if (my $i = $dbic_reqs->{$g}{include}) {
+ $i = [ $i ] unless ref $i eq 'ARRAY';
- local $processed_groups->{$group} = 1;
+ croak "Malformed 'include' for group '$g': must be another existing group name or arrayref of existing group names"
+ unless @$i;
- my $subreqs = $self->_groups_to_reqs($incs);
+ push @{$res_per_type->{"incs_${req_type}"}}, @$i;
+ }
+ }
- croak "Includes with variable effective_modreqs not yet supported"
- if $subreqs->{effective_modreqs_differ};
+ my @ret = map {
+ @{ $res_per_type->{"base_${_}"} || [] },
+ ( $res_per_type->{"incs_${_}"} ? __expand_includes( $res_per_type->{"incs_${_}"}, $seen ) : () ),
+ } qw(static variable);
+
+ return ! $is_toplevel ? @ret : do {
+ my $rv = {};
+ $rv->{$_} = {
+ idx => 1 + keys %$rv,
+ missing_envvars => $missing_envvars->{$_},
+ } for @ret;
+ $rv->{$_}{user_requested} = 1 for @$groups;
+ $rv;
+ };
+}
- $inc_reqs = $subreqs->{modreqs};
+### Private OO API
+our %req_unavailability_cache;
+# this method is just a lister and envvar/metadata checker - it does not try to load anything
+sub _groups_to_reqs {
+ my ($self, $want) = @_;
+
+ $want = [ $want || () ]
+ unless ref $want eq 'ARRAY';
+
+ croak "@{[ (caller(1))[3] ]}() expects a requirement group name or arrayref of group names"
+ unless @$want;
+
+ my $ret = {
+ modreqs => {},
+ modreqs_fully_documented => 1,
+ };
+
+ my $groups;
+ for my $piece (@$want) {
+ if ($piece =~ qr/ \A $groupname_re \z /x) {
+ push @$groups, $piece;
+ }
+ elsif ( my ($mod, $ver) = $piece =~ qr/ \A ($modname_re) \>\= ($modver_re) \z /x ) {
+ croak "Ad hoc module specification lists '$mod' twice"
+ if exists $ret->{modreqs}{$mod};
+
+ croak "Ad hoc module specification '${mod} >= $ver' (or greater) not listed in the test_adhoc optdep group" if (
+ ! defined $dbic_reqs->{test_adhoc}{req}{$mod}
+ or
+ $dbic_reqs->{test_adhoc}{req}{$mod} < $ver
+ );
+
+ $ret->{modreqs}{$mod} = $ver;
+ $ret->{modreqs_fully_documented} = 0;
+ }
+ else {
+ croak "Unsupported argument '$piece' supplied to @{[ (caller(1))[3] ]}()"
+ }
+ }
+
+ my $all_groups = __expand_includes($groups);
+
+ # pre-assemble list of augmentations, perform basic sanity checks
+ # Note that below we *DO NOT* respect the source/target reationship, but
+ # instead always default to augment the "later" group
+ # This is done so that the "stable/variable" boundary keeps working as
+ # expected
+ my $augmentations;
+ for my $requesting_group (keys %$all_groups) {
+ if (my $ag = $dbic_reqs->{$requesting_group}{augment}) {
+ for my $target_group (keys %$ag) {
+
+ croak "Group '$requesting_group' claims to augment a non-existent group '$target_group'"
+ unless $dbic_reqs->{$target_group};
+
+ croak "Augmentation combined with variable effective_modreqs currently unsupported for group '$requesting_group'"
+ if $dbic_reqs->{$requesting_group}{env};
+
+ croak "Augmentation of group '$target_group' with variable effective_modreqs unsupported (requested by '$requesting_group')"
+ if $dbic_reqs->{$target_group}{env};
+
+ if (my @foreign = grep { $_ ne 'req' } keys %{$ag->{$target_group}} ) {
+ croak "Only 'req' augmentations are currently supported (group '$requesting_group' attempts to alter '$foreign[0]' of group '$target_group'";
+ }
+
+ $ret->{augments}{$target_group} = 1;
+
+ # no augmentation for stuff that hasn't been selected
+ if ( $all_groups->{$target_group} and my $ar = $ag->{$target_group}{req} ) {
+ push @{$augmentations->{
+ ( $all_groups->{$requesting_group}{idx} < $all_groups->{$target_group}{idx} )
+ ? $target_group
+ : $requesting_group
+ }}, $ar;
+ }
+ }
+ }
+ }
+
+ for my $group (sort { $all_groups->{$a}{idx} <=> $all_groups->{$b}{idx} } keys %$all_groups ) {
+
+ my $group_reqs = $dbic_reqs->{$group}{req};
+
+ # sanity-check
+ for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) {
+ for (keys %$req_bag) {
+
+ $_ =~ / \A $modname_re \z /x
+ or croak "Requirement '$_' in group '$group' is not a valid module name";
+
+ # !!!DO NOT CHANGE!!!
+ # remember - version.pm may not be available on the system
+ croak "Requirement '$_' in group '$group' specifies an invalid version '$req_bag->{$_}' (only plain non-underscored floating point decimals are supported)"
+ if ( ($req_bag->{$_}||0) !~ qr/ \A $modver_re \z /x );
+ }
+ }
+
+ if (my $e = $all_groups->{$group}{missing_envvars}) {
+ push @{$ret->{missing_envvars}}, @$e;
}
# assemble into the final ret
for my $type (
'modreqs',
- $some_envs_missing ? () : 'effective_modreqs'
+ ( $ret->{missing_envvars} ? () : 'effective_modreqs' ),
) {
- for my $req_bag ($group_reqs, $inc_reqs||()) {
+ for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) {
for my $mod (keys %$req_bag) {
$ret->{$type}{$mod} = $req_bag->{$mod}||0 if (
}
}
- $ret->{effective_modreqs_differ} ||= !!$some_envs_missing;
-
- $ret->{modreqs_fully_documented} &&= !!$dbic_reqs->{$group}{pod};
+ $ret->{modreqs_fully_documented} &&= !!$dbic_reqs->{$group}{pod}
+ if $all_groups->{$group}{user_requested};
$ret->{release_testing_mandatory} ||= !!$dbic_reqs->{$group}{release_testing_mandatory};
}
}
-# this method tries to load specified modreqs and returns a hashref of
+# this method tries to find/load specified modreqs and returns a hashref of
# module/loaderror pairs for anything that failed
sub _errorlist_for_modreqs {
# args supposedly already went through _groups_to_reqs and are therefore sanitized
$ret;
}
+# Unlike the above DO NOT try to load anything
+# This is executed when some needed envvars are not available
+# which in turn means a module load will never be reached anyway
+# This is important because some modules (especially DBDs) can be
+# *really* fickle when a require() is attempted, with pretty confusing
+# side-effects (especially on windows)
+sub _list_physically_missing_modules {
+ my ($self, $modreqs) = @_;
+
+ # in case there is a coderef in @INC there is nothing we can definitively prove
+ # so short circuit directly
+ return '' if grep { length ref $_ } @INC;
+
+ my @definitely_missing;
+ for my $mod (keys %$modreqs) {
+ (my $fn = $mod . '.pm') =~ s|::|/|g;
+
+ push @definitely_missing, $mod unless grep
+ # this should work on any combination of slashes
+ { $_ and -d $_ and -f "$_/$fn" and -r "$_/$fn" }
+ @INC
+ ;
+ }
+
+ join ' ', map
+ { $modreqs->{$_} ? "$_~$modreqs->{$_}" : $_ }
+ sort { lc($a) cmp lc($b) } @definitely_missing
+ ;
+}
+
# This is to be called by the author only (automatically in Makefile.PL)
sub _gen_pod {
"\n\n---------------------------------------------------------------------\n"
;
- # do not ask for a recent version, use 1.x API calls
- # this *may* execute on a smoker with old perl or whatnot
- require File::Path;
-
(my $modfn = __PACKAGE__ . '.pm') =~ s|::|/|g;
(my $podfn = "$pod_dir/$modfn") =~ s/\.pm$/\.pod/;
- (my $dir = $podfn) =~ s|/[^/]+$||;
- File::Path::mkpath([$dir]);
+ require DBIx::Class::_Util;
+ DBIx::Class::_Util::mkdir_p( DBIx::Class::_Util::parent_dir( $podfn ) );
my $sqltver = $class->req_list_for('deploy')->{'SQL::Translator'}
or die "Hrmm? No sqlt dep?";
...
- my %DBIC_DEPLOY_DEPS = %{ eval {
+ my %DBIC_DEPLOY_AND_ORACLE_DEPS = %{ eval {
require $class;
- $class->req_list_for('deploy');
+ $class->req_list_for([qw( deploy rdbms_oracle ic_dt )]);
} || {} };
\$EUMM_ARGS{PREREQ_PM} = {
- \%DBIC_DEPLOY_DEPS,
+ \%DBIC_DEPLOY_AND_ORACLE_DEPS,
\%{ \$EUMM_ARGS{PREREQ_PM} || {} },
};
#@@
push @chunks, '=head1 CURRENT REQUIREMENT GROUPS';
+ my $standalone_info;
+
for my $group (sort keys %$dbic_reqs) {
- my $p = $dbic_reqs->{$group}{pod}
- or next;
- my $modlist = $class->modreq_list_for($group);
+ my $info = $standalone_info->{$group} ||= $class->_groups_to_reqs($group);
- next unless keys %$modlist;
+ next unless (
+ $info->{modreqs_fully_documented}
+ and
+ ( $info->{augments} or $info->{modreqs} )
+ );
+
+ my $p = $dbic_reqs->{$group}{pod};
push @chunks, (
"=head2 $p->{title}",
- "$p->{desc}",
+ "=head3 $group",
+ $p->{desc},
'=over',
- ( map { "=item * $_" . ($modlist->{$_} ? " >= $modlist->{$_}" : '') } (sort keys %$modlist) ),
- '=back',
- "Requirement group: B<$group>",
);
+
+ if ( keys %{ $info->{modreqs}||{} } ) {
+ push @chunks, map
+ { "=item * $_" . ($info->{modreqs}{$_} ? " >= $info->{modreqs}{$_}" : '') }
+ ( sort keys %{ $info->{modreqs} } )
+ ;
+ }
+ else {
+ push @chunks, '=item * No standalone requirements',
+ }
+
+ push @chunks, '=back';
+
+ for my $ag ( sort keys %{ $info->{augments} || {} } ) {
+ my $ag_info = $standalone_info->{$ag} ||= $class->_groups_to_reqs($ag);
+
+ my $newreqs = $class->modreq_list_for([ $group, $ag ]);
+ for (keys %$newreqs) {
+ delete $newreqs->{$_} if (
+ ( defined $info->{modreqs}{$_} and $info->{modreqs}{$_} == $newreqs->{$_} )
+ or
+ ( defined $ag_info->{modreqs}{$_} and $ag_info->{modreqs}{$_} == $newreqs->{$_} )
+ );
+ }
+
+ if (keys %$newreqs) {
+ push @chunks, (
+ "Combined with L</$ag> additionally requires:",
+ '=over',
+ ( map
+ { "=item * $_" . ($newreqs->{$_} ? " >= $newreqs->{$_}" : '') }
+ ( sort keys %$newreqs )
+ ),
+ '=back',
+ );
+ }
+ }
}
the returned string could look like:
EOC
- push @chunks, qq{ "SQL::Translator~>=$sqltver" (see $class documentation for details)};
+ push @chunks, qq{ "SQL::Translator~$sqltver" (see $class documentation for details)};
push @chunks, <<'EOC';
The author is expected to prepend the necessary text to this message before
the returned string could look like:
EOC
- push @chunks, qq{ "SQL::Translator~>=$sqltver"};
+ push @chunks, qq{ "SQL::Translator~$sqltver"};
push @chunks, <<'EOC';
See also L</-list_missing>.
+=head2 skip_without
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=back
+
+A convenience wrapper around L<skip|Test::More/SKIP>. It does not take neither
+a reason (it is generated by L</req_missing_for>) nor an amount of skipped tests
+(it is always C<1>, thus mandating unconditional use of
+L<done_testing|Test::More/done_testing>). Most useful in combination with ad hoc
+requirement specifications:
+EOC
+
+ push @chunks, <<EOC;
+ SKIP: {
+ $class->skip_without([ deploy YAML>=0.90 ]);
+
+ ...
+ }
+EOC
+
+ push @chunks, <<'EOC';
+
=head2 die_unless_req_ok_for
=over