# functionality is moved to scripts. More about the problem (and the
# WONTFIX "explanation") here: https://github.com/travis-ci/travis-ci/issues/497
#
+# the entire run times out after 50 minutes, or after 5 minutes without
+# console output
#
# Smoke all branches except for blocked* and wip/*
language: perl
perl:
+ - "5.8"
- "5.20-extras"
env:
- CLEANTEST=false
- - CLEANTEST=true
+ - CLEANTEST=true NUMTHREADS=1
matrix:
fast_finish: true
include:
- # this particular perl is quite widespread
- - perl: 5.8.8_thr_mb
+ # CLEANTEST of minimum supported with non-tracing poisoning
+ - perl: 5.8.3_nt_mb
env:
- CLEANTEST=true
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.8.8
+ - DBIC_TRACE_PROFILE=console_monochrome
+ - BREWVER=5.8.3
+ - BREWOPTS="-Dusemorebits"
- # so is this one (test a sane CPAN.pm)
- - perl: 5.12.4_thr_mb
+ # Full Test of minimum supported without threads with non-tracing poisoning
+ - perl: 5.8.3_nt
env:
- - CLEANTEST=true
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.12.4
+ - CLEANTEST=false
+ - POISON_ENV=true
+ - BREWVER=5.8.3
- # this is the perl suse ships
- - perl: 5.10.0_thr_dbg
+ # Full Test of minimum supported with threads with non-tracing poisoning
+ - perl: 5.8.5_thr
+ env:
+ - CLEANTEST=false
+ - POISON_ENV=true
+ - DBIC_TRACE_PROFILE=console
+ - BREWVER=5.8.5
+ - BREWOPTS="-Duseithreads"
+
+ # CLEANTEST of solaris-like perl with non-tracing poisoning
+ - perl: 5.8.4_nt
env:
- CLEANTEST=true
- - BREWOPTS="-DDEBUGGING -Duseithreads"
- - BREWVER=5.10.0
+ - POISON_ENV=true
+ - DBIC_TRACE_PROFILE=console
+ - BREWVER=5.8.4
- # CLEANTEST of minimum supported
- - perl: 5.8.3_nt_mb
+ # CLEANTEST: this particular perl is quite widespread
+ - perl: 5.8.8_thr_mb
env:
- CLEANTEST=true
- - BREWOPTS="-Dusemorebits"
- - BREWVER=5.8.3
+ - BREWVER=5.8.8
+ - BREWOPTS="-Duseithreads -Dusemorebits"
- # Full Test of minimum supported with threads
- - perl: 5.8.5_thr
+ # CLEANTEST: this is the perl suse ships
+ - perl: 5.10.0_thr_dbg
env:
- - CLEANTEST=false
- - BREWOPTS="-Duseithreads"
- - BREWVER=5.8.5
- - DBIC_TRACE_PROFILE=console
+ - CLEANTEST=true
+ - BREWVER=5.10.0
+ - BREWOPTS="-DDEBUGGING -Duseithreads"
- # Full Test of minimum supported without threads
- - perl: 5.8.3_nt
+ # CLEANTEST: this one is in a number of debian-based LTS (test a sane CPAN.pm)
+ - perl: 5.14.2_thr_mb
env:
- - CLEANTEST=false
- - BREWOPTS=""
- - BREWVER=5.8.3
- - DBIC_TRACE_PROFILE=console_monochrome
+ - CLEANTEST=true
+ - BREWVER=5.14.2
+ - BREWOPTS="-Duseithreads -Dusemorebits"
###
# some permutations of tracing and envvar poisoning
- - perl: 5.16.3_thr_mb
+ - perl: 5.12.3_thr
env:
- - CLEANTEST=false
- - POISON_ENV=true
+ - CLEANTEST=true
- DBIC_TRACE=1
- - DBIC_MULTICREATE_DEBUG=0
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.16.3
-
- - perl: 5.20-extras
- env:
- - CLEANTEST=false
+ - DBIC_MULTICREATE_DEBUG=1
+ - DBIC_STORAGE_RETRY_DEBUG=1
- POISON_ENV=true
- DBIC_TRACE_PROFILE=console
+ - BREWVER=5.12.3
+ - BREWOPTS="-Duseithreads"
- - perl: 5.8
+ - perl: 5.16.3_thr_mb
env:
- - CLEANTEST=true
- - POISON_ENV=true
+ - CLEANTEST=false
- DBIC_TRACE=1
- - DBIC_TRACE_PROFILE=console
+ - POISON_ENV=true
+ - BREWVER=5.16.3
+ - BREWOPTS="-Duseithreads -Dusemorebits"
- perl: 5.18-extras
env:
- CLEANTEST=false
- - POISON_ENV=true
- DBIC_TRACE=1
+ - POISON_ENV=true
- DBIC_TRACE_PROFILE=console_monochrome
- - DBIC_MULTICREATE_DEBUG=0
###
# Start of the allow_failures block
- # old threaded with blead CPAN
- - perl: devcpan_5.8.7_thr
+ # threaded oldest possible with blead CPAN with non-tracing poisoning
+ - perl: devcpan_5.8.1_thr_mb
env:
- CLEANTEST=true
- - BREWOPTS="-Duseithreads"
- - BREWVER=5.8.7
- DEVREL_DEPS=true
+ - POISON_ENV=true
+ - BREWVER=5.8.1
+ - BREWOPTS="-Duseithreads -Dusemorebits"
- # 5.10.0 threaded with blead CPAN
- - perl: devcpan_5.10.0_thr_mb
+ # 5.8.4 threaded with blead CPAN with non-tracing poisoning
+ - perl: devcpan_5.8.4_thr
env:
- CLEANTEST=true
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.10.0
- DEVREL_DEPS=true
+ - POISON_ENV=true
+ - BREWVER=5.8.4
+ - BREWOPTS="-Duseithreads"
- # 5.12.2 with blead CPAN
- - perl: devcpan_5.12.2_thr
+ # 5.10.0 threaded with blead CPAN
+ - perl: devcpan_5.10.0_thr_mb
env:
- CLEANTEST=true
- - BREWOPTS="-Duseithreads"
- - BREWVER=5.12.2
- DEVREL_DEPS=true
+ - BREWVER=5.10.0
+ - BREWOPTS="-Duseithreads -Dusemorebits"
- # recentish threaded stable with blead CPAN
- - perl: devcpan_5.18.2_thr_mb
+ # 5.12.1 with blead CPAN
+ - perl: devcpan_5.12.1_thr
env:
- - CLEANTEST=false
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.18.2
+ - CLEANTEST=true
- DEVREL_DEPS=true
+ - BREWVER=5.12.1
+ - BREWOPTS="-Duseithreads"
- # bleadperl with stock CPAN, full depchain test
+ # bleadperl with stock CPAN, full depchain test with non-tracing poisoning
- perl: bleadperl
env:
- CLEANTEST=true
+ - POISON_ENV=true
- BREWVER=blead
- # bleadperl with blead CPAN
+ # bleadperl with blead CPAN with non-tracing poisoning
- perl: devcpan_bleadperl_thr_mb
env:
- CLEANTEST=false
- - BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=blead
- DEVREL_DEPS=true
+ - POISON_ENV=true
+ - BREWVER=blead
+ - BREWOPTS="-Duseithreads -Dusemorebits"
# which ones of the above can fail
allow_failures:
# these run with various dev snapshots - allowed to fail
- - perl: devcpan_5.8.7_thr
+ - perl: devcpan_5.8.1_thr_mb
+ - perl: devcpan_5.8.4_thr
- perl: devcpan_5.10.0_thr_mb
- - perl: devcpan_5.12.2_thr
- - perl: devcpan_5.18.2_thr_mb
+ - perl: devcpan_5.12.1_thr
- perl: bleadperl
- perl: devcpan_bleadperl_thr_mb
-# sourcing the files is *EXTREMELY* important - otherwise
-# no envvars will survive
-
-# the entire run times out after 50 minutes, or after 5 minutes without
-# console output
+###
+### For the following two phases -e is *set*
+###
before_install:
+ # common functions for all run phases below
+ #
+ # this is an exporter - sourcing it is crucial
+ # among other things it also sets -e
+ #
+ - source maint/travis-ci_scripts/common.bash
+
# Sets global envvars, downloads/configures debs based on CLEANTEST
# Sets extra DBICTEST_* envvars
#
+ # this is an exporter - sourcing it is crucial
+ #
- source maint/travis-ci_scripts/10_before_install.bash
install:
# Build and switch to a custom perl if requested
# Configure the perl env, preinstall some generic toolchain parts
+ # Possibly poison the environment
+ #
+ # this is an exporter - sourcing it is crucial
#
- source maint/travis-ci_scripts/20_install.bash
+###
+### From this point on -e is *unset*, rely on travis' error handling
+###
+ - set +e
+
before_script:
# Preinstall/install deps based on envvars/CLEANTEST
#
- - source maint/travis-ci_scripts/30_before_script.bash
+ # need to invoke the after_failure script manually
+ # because 'after_failure' runs only after 'script' fails
+ #
+ - maint/getstatus maint/travis-ci_scripts/30_before_script.bash || ( maint/travis-ci_scripts/50_after_failure.bash && /bin/false )
script:
# Run actual tests
#
- - source maint/travis-ci_scripts/40_script.bash
+ - maint/getstatus maint/travis-ci_scripts/40_script.bash
after_success:
# Check if we can assemble a dist properly if not in CLEANTEST
#
- - source maint/travis-ci_scripts/50_after_success.bash
+ - maint/getstatus maint/travis-ci_scripts/50_after_success.bash
after_failure:
- # No tasks yet
+ # Final sysinfo printout on fail
#
- #- source maint/travis-ci_scripts/50_after_failure.bash
+ - maint/getstatus maint/travis-ci_scripts/50_after_failure.bash
after_script:
# No tasks yet
#
- #- source maint/travis-ci_scripts/60_after_script.bash
-
- # if we do not unset this before we terminate the travis teardown will
- # mark the entire job as failed
- - set +e
+ #- maint/getstatus maint/travis-ci_scripts/60_after_script.bash
Revision history for DBIx::Class
+ * New Features
+ - DBIx::Class::Optional::Dependencies now properly understands
+ combinations of requirements and does the right thing with e.g.
+ ->req_list_for([qw( rdbms_oracle icdt )]) bringing in the Oracle
+ specific DateTime::Format dependencies
* Fixes
- Fix updating multiple CLOB/BLOB columns on Oracle
+ - Fix incorrect collapsing-parser source being generated in the
+ presence of unicode data among the collapse-points
+ - Fix endless loop on BareSourcelessResultClass->throw_exception(...)
+ - Fix hang in t/72pg.t when run against DBD::Pg 3.5.0. The ping()
+ implementation changes due to RT#100648 made an alarm() based
+ timeout lock-prone.
+
+ * Misc
+ - Skip tests in a way more intelligent and speedy manner when optional
+ dependencies are missing
+ - Make the Optional::Dependencies error messages cpanm-friendly
+ - Incompatibly change values (not keys) of the hash returned by
+ Optional::Dependencies::req_group_list (no known users in the wild)
+ - Depend on newer SQL::Abstract (fixing overly-aggressive parenthesis
+ opener: RT#99503)
+ - Depend on newer Moo, fixing some interoperability issues:
+ http://lists.scsys.co.uk/pipermail/dbix-class/2014-October/011787.html
+ - Fix intermittent failures in the LeakTracer on 5.18+
0.082801 2014-10-05 23:55 (UTC)
* Known Issues
use 5.008001;
use inc::Module::Install 1.06;
-BEGIN { makemaker_args( NORECURS => 1 ) } # needs to happen early for old EUMM
+BEGIN {
+ # needs to happen early for old EUMM
+ makemaker_args( NORECURS => 1 );
+
+ local @INC = ('lib', @INC);
+ require DBIx::Class::Optional::Dependencies;
+}
##
## DO NOT USE THIS HACK IN YOUR DISTS!!! (it makes #toolchain sad)
'Sub::Name' => '0.04',
# pure-perl (FatPack-able) libs
- 'Class::Accessor::Grouped' => '0.10010',
+ 'Class::Accessor::Grouped' => '0.10012',
'Class::C3::Componentised' => '1.0009',
'Class::Inspector' => '1.24',
'Config::Any' => '0.20',
'Data::Page' => '2.00',
'Devel::GlobalDestruction' => '0.09',
'Hash::Merge' => '0.12',
- 'Moo' => '1.004005',
+ 'Moo' => '1.006001',
'MRO::Compat' => '0.12',
'Module::Find' => '0.07',
'namespace::clean' => '0.24',
'Path::Class' => '0.18',
'Scope::Guard' => '0.03',
- 'SQL::Abstract' => '1.79',
+ 'SQL::Abstract' => '1.81',
'Try::Tiny' => '0.07',
# Technically this is not a core dependency - it is only required
'Test::Warn' => '0.21',
'Test::More' => '0.94',
- # needed for testing only, not for operation
- # we will move away from this dep eventually, perhaps to DBD::CSV or something
-###
-### IMPORTANT - do not raise this dependency
-### even though many bugfixes are present in newer versions, the general DBIC
-### rule is to bend over backwards for available DBDs (given upgrading them is
-### often *not* easy or even possible)
-###
- 'DBD::SQLite' => '1.29',
-
# this is already a dep of n::c, but just in case - used by t/55namespaces_cleaned.t
# remove and do a manual glob-collection if n::c is no longer a dep
'Package::Stash' => '0.28',
+
+ # needed for testing only, not for operation
+ # we will move away from this dep eventually, perhaps to DBD::CSV or something
+ %{ DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_sqlite') },
};
# if the user has this env var set and no SQLT installed, tests will fail
# envvar *and* is not running a full Makefile/make/maketest cycle - they get
# to keep the pieces
if ($ENV{DBICTEST_SQLT_DEPLOY}) {
- local @INC = ('lib', @INC);
- require DBIx::Class::Optional::Dependencies;
- my $dep_req = DBIx::Class::Optional::Dependencies->req_list_for('deploy');
- for (keys %$dep_req) {
- test_requires ($_ => $dep_req->{$_})
+ my $deploy_req = DBIx::Class::Optional::Dependencies->req_list_for('deploy');
+ for (keys %$deploy_req) {
+ test_requires ($_ => $deploy_req->{$_})
}
}
package DBIx::Class::Admin;
+use warnings;
+use strict;
+
# check deps
BEGIN {
- use DBIx::Class;
- die('The following modules are required for DBIx::Class::Admin ' . DBIx::Class::Optional::Dependencies->req_missing_for ('admin') )
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('admin');
+ require DBIx::Class::Optional::Dependencies;
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('admin') ) {
+ die "The following extra modules are required for DBIx::Class::Admin: $missing\n";
+ }
}
use JSON::Any qw(DWIW PP JSON CPANEL XS);
use strict;
use warnings;
-use base qw/DBIx::Class::Core DBIx::Class::DB/;
-
-# Modules CDBICompat needs that DBIx::Class does not.
-my @Extra_Modules = qw(
- Class::Trigger
- DBIx::ContextualFetch
- Clone
-);
-my @didnt_load;
-for my $module (@Extra_Modules) {
- push @didnt_load, $module unless eval qq{require $module};
+BEGIN {
+ require DBIx::Class::Optional::Dependencies;
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for('cdbicompat')) {
+ die "The following extra modules are required for DBIx::Class::CDBICompat: $missing\n";
+ }
}
-__PACKAGE__->throw_exception("@{[ join ', ', @didnt_load ]} are missing and are required for CDBICompat")
- if @didnt_load;
+use base qw/DBIx::Class::Core DBIx::Class::DB/;
__PACKAGE__->load_own_components(qw/
Constraints
use strict;
use warnings;
use Sub::Name ();
-use Storable 'dclone';
use List::Util ();
use base qw/DBIx::Class::Row/;
# Must do a complete deep copy else column groups
# might accidentally be shared.
- my $groups = dclone $class->_column_groups;
+ my $groups = DBIx::Class::_Util::deep_clone( $class->_column_groups );
if ($group eq 'Primary') {
$class->set_primary_key(@cols);
DBIx::Class::CDBICompat::Pager;
use strict;
+
+# even though fatalization has been proven over and over to be a universally
+# bad idea, this line has been part of the code from the beginning
+# leaving the compat layer as-is, something may in fact depend on that
use warnings FATAL => 'all';
*pager = \&page;
sub _extend_meta {
my ($class, $type, $rel, $val) = @_;
- my %hash = %{ Clone::clone($class->__meta_info || {}) };
+
+### Explicitly not using the deep cloner as Clone exhibits specific behavior
+### wrt CODE references - it simply passes them as-is to the new structure
+### (without deparse/eval cycles). There likely is code that relies on this
+### so we just let sleeping dogs lie.
+ my $hash = Clone::clone($class->__meta_info || {});
$val->{self_class} = $class;
$val->{type} = $type;
$val->{accessor} = $rel;
- $hash{$type}{$rel} = DBIx::Class::CDBICompat::Relationship->new($val);
- $class->__meta_info(\%hash);
+ $hash->{$type}{$rel} = DBIx::Class::CDBICompat::Relationship->new($val);
+ $class->__meta_info($hash);
}
DBIx::Class::CDBICompat::Retrieve;
use strict;
-use warnings FATAL => 'all';
+# even though fatalization has been proven over and over to be a universally
+# bad idea, this line has been part of the code from the beginning
+# leaving the compat layer as-is, something may in fact depend on that
+use warnings FATAL => 'all';
sub retrieve {
my $self = shift;
numbers => { -value => [1, 2, 3] },
});
-Alternatively:
+Or using the more generic (and more cumbersome) literal syntax:
$resultset->search({
- numbers => \[ '= ?', [numbers => [1, 2, 3]] ]
+ numbers => \[ '= ?', [ numbers => [1, 2, 3] ] ]
});
package DBIx::Class::Optional::Dependencies;
-use warnings;
-use strict;
-
-use Carp ();
+### This may look crazy, but it in fact tangibly ( by 50(!)% ) shortens
+# the skip-test time when everything requested is unavailable
+use if $ENV{RELEASE_TESTING} => 'warnings';
+use if $ENV{RELEASE_TESTING} => 'strict';
+
+sub croak {
+ require Carp;
+ Carp::croak(@_);
+};
+###
# NO EXTERNAL NON-5.8.1 CORE DEPENDENCIES EVER (e.g. C::A::G)
# This module is to be loaded by Makefile.PM on a pristine system
# POD is generated automatically by calling _gen_pod from the
# Makefile.PL in $AUTHOR mode
-# NOTE: the rationale for 2 JSON::Any versions is that
-# we need the newer only to work around JSON::XS, which
-# itself is an optional dep
-my $min_json_any = {
- 'JSON::Any' => '1.23',
-};
-my $test_and_dist_json_any = {
- 'JSON::Any' => '1.31',
-};
-
+# *DELIBERATELY* not making a group for these - they must disappear
+# forever as optdeps in the first place
my $moose_basic = {
'Moose' => '0.98',
'MooseX::Types' => '0.21',
'MooseX::Types::LoadableClass' => '0.011',
};
-my $replicated = {
- %$moose_basic,
-};
+my $dbic_reqs = {
-my $admin_basic = {
- %$moose_basic,
- %$min_json_any,
- 'MooseX::Types::Path::Class' => '0.05',
- 'MooseX::Types::JSON' => '0.02',
-};
+ # NOTE: the rationale for 2 JSON::Any versions is that
+ # we need the newer only to work around JSON::XS, which
+ # itself is an optional dep
+ _json_any => {
+ req => {
+ 'JSON::Any' => '1.23',
+ },
+ },
-my $admin_script = {
- %$moose_basic,
- %$admin_basic,
- 'Getopt::Long::Descriptive' => '0.081',
- 'Text::CSV' => '1.16',
-};
+ _json_xs_compatible_json_any => {
+ req => {
+ 'JSON::Any' => '1.31',
+ },
+ },
-my $datetime_basic = {
- 'DateTime' => '0.55',
- 'DateTime::Format::Strptime' => '1.2',
-};
+ # a common placeholder for engines with IC::DT support based off DT::F::S
+ _icdt_strptime_based => {
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::Strptime' => '1.2',
+ },
+ },
+ }
+ },
-my $id_shortener = {
- 'Math::BigInt' => '1.80',
- 'Math::Base36' => '0.07',
-};
+ _rdbms_generic_odbc => {
+ req => {
+ 'DBD::ODBC' => 0,
+ }
+ },
-my $rdbms_sqlite = {
- 'DBD::SQLite' => '0',
-};
-my $rdbms_pg = {
- 'DBD::Pg' => '0',
-};
-my $rdbms_mssql_odbc = {
- 'DBD::ODBC' => '0',
-};
-my $rdbms_mssql_sybase = {
- 'DBD::Sybase' => '0',
-};
-my $rdbms_mssql_ado = {
- 'DBD::ADO' => '0',
-};
-my $rdbms_msaccess_odbc = {
- 'DBD::ODBC' => '0',
-};
-my $rdbms_msaccess_ado = {
- 'DBD::ADO' => '0',
-};
-my $rdbms_mysql = {
- 'DBD::mysql' => '0',
-};
-my $rdbms_oracle = {
- 'DBD::Oracle' => '0',
- %$id_shortener,
-};
-my $rdbms_ase = {
- 'DBD::Sybase' => '0',
-};
-my $rdbms_db2 = {
- 'DBD::DB2' => '0',
-};
-my $rdbms_db2_400 = {
- 'DBD::ODBC' => '0',
-};
-my $rdbms_informix = {
- 'DBD::Informix' => '0',
-};
-my $rdbms_sqlanywhere = {
- 'DBD::SQLAnywhere' => '0',
-};
-my $rdbms_sqlanywhere_odbc = {
- 'DBD::ODBC' => '0',
-};
-my $rdbms_firebird = {
- 'DBD::Firebird' => '0',
-};
-my $rdbms_firebird_interbase = {
- 'DBD::InterBase' => '0',
-};
-my $rdbms_firebird_odbc = {
- 'DBD::ODBC' => '0',
-};
+ _rdbms_generic_ado => {
+ req => {
+ 'DBD::ADO' => 0,
+ }
+ },
+
+ # must list any dep used by adhoc testing
+ # this prevents the "skips due to forgotten deps" issue
+ test_adhoc => {
+ req => {
+ 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
+ 'Class::DBI' => '3.000005',
+ 'Date::Simple' => '3.03',
+ 'YAML' => '0',
+ 'Class::Unload' => '0.07',
+ 'Time::Piece' => '0',
+ 'Time::Piece::MySQL' => '0',
+ },
+ },
-my $reqs = {
replicated => {
- req => $replicated,
+ req => $moose_basic,
pod => {
title => 'Storage::Replicated',
desc => 'Modules required for L<DBIx::Class::Storage::DBI::Replicated>',
},
test_replicated => {
+ include => 'replicated',
req => {
- %$replicated,
- 'Test::Moose' => '0',
+ 'Test::Moose' => '0',
},
},
-
admin => {
+ include => '_json_any',
req => {
- %$admin_basic,
+ %$moose_basic,
+ 'MooseX::Types::Path::Class' => '0.05',
+ 'MooseX::Types::JSON' => '0.02',
},
pod => {
title => 'DBIx::Class::Admin',
},
admin_script => {
+ include => 'admin',
req => {
- %$admin_script,
+ 'Getopt::Long::Descriptive' => '0.081',
+ 'Text::CSV' => '1.16',
},
pod => {
title => 'dbicadmin',
},
},
+ icdt => {
+ req => {
+ 'DateTime' => '0.55',
+ },
+ pod => {
+ title => 'InflateColumn::DateTime support',
+ desc =>
+ 'Modules required for L<DBIx::Class::InflateColumn::DateTime>. '
+ . 'Note that this group does not require much on its own, but '
+ . 'instead is augmented by various RDBMS-specific groups. See the '
+ . 'documentation of each C<rbms_*> group for details',
+ },
+ },
+
id_shortener => {
- req => $id_shortener,
+ req => {
+ 'Math::BigInt' => '1.80',
+ 'Math::Base36' => '0.07',
+ },
},
- test_component_accessor => {
+ cdbicompat => {
req => {
- 'Class::Unload' => '0.07',
+ 'Class::Data::Inheritable' => '0',
+ 'Class::Trigger' => '0',
+ 'DBIx::ContextualFetch' => '0',
+ 'Clone' => '0.32',
+ },
+ pod => {
+ title => 'DBIx::Class::CDBICompat support',
+ desc => 'Modules required for L<DBIx::Class::CDBICompat>'
},
},
req => {
'Test::Pod' => '1.42',
},
+ release_testing_mandatory => 1,
},
test_podcoverage => {
'Test::Pod::Coverage' => '1.08',
'Pod::Coverage' => '0.20',
},
+ release_testing_mandatory => 1,
},
test_whitespace => {
'Test::EOL' => '1.0',
'Test::NoTabs' => '0.9',
},
+ release_testing_mandatory => 1,
},
test_strictures => {
req => {
'Test::Strict' => '0.20',
},
+ release_testing_mandatory => 1,
},
test_prettydebug => {
- req => $min_json_any,
+ include => '_json_any',
},
test_admin_script => {
+ include => [qw( admin_script _json_xs_compatible_json_any )],
req => {
- %$admin_script,
- %$test_and_dist_json_any,
'JSON' => 0,
'JSON::PP' => 0,
'Cpanel::JSON::XS' => 0,
},
},
- test_dt => {
- req => $datetime_basic,
- },
-
- test_dt_sqlite => {
- req => {
- %$datetime_basic,
- # t/36datetime.t
- # t/60core.t
- 'DateTime::Format::SQLite' => '0',
- },
- },
-
- test_dt_mysql => {
- req => {
- %$datetime_basic,
- # t/inflate/datetime_mysql.t
- # (doesn't need Mysql itself)
- 'DateTime::Format::MySQL' => '0',
- },
- },
-
- test_dt_pg => {
- req => {
- %$datetime_basic,
- # t/inflate/datetime_pg.t
- # (doesn't need PG itself)
- 'DateTime::Format::Pg' => '0.16004',
- },
- },
-
- test_cdbicompat => {
- req => {
- 'Class::DBI::Plugin::DeepAbstractSearch' => '0',
- %$datetime_basic,
- 'Time::Piece::MySQL' => '0',
- 'Date::Simple' => '3.03',
- },
- },
# this is just for completeness as SQLite
# is a core dep of DBIC for testing
rdbms_sqlite => {
req => {
- %$rdbms_sqlite,
+ 'DBD::SQLite' => 0,
},
pod => {
title => 'SQLite support',
desc => 'Modules required to connect to SQLite',
},
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::SQLite' => '0',
+ },
+ },
+ },
+ },
+
+ # centralize the specification, as we have ICDT tests which can
+ # test the full behavior of RDBMS-specific ICDT on top of bare SQLite
+ # not _-prefixed so that it will show up under req_group_list
+ icdt_pg => {
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::Pg' => '0.16004',
+ },
+ },
+ },
},
rdbms_pg => {
+ include => 'icdt_pg',
req => {
# when changing this list make sure to adjust xt/optional_deps.t
- %$rdbms_pg,
+ 'DBD::Pg' => 0,
},
pod => {
title => 'PostgreSQL support',
},
},
+ _rdbms_mssql_common => {
+ include => '_icdt_strptime_based',
+ },
+
rdbms_mssql_odbc => {
- req => {
- %$rdbms_mssql_odbc,
- },
+ include => [qw( _rdbms_generic_odbc _rdbms_mssql_common )],
pod => {
title => 'MSSQL support via DBD::ODBC',
desc => 'Modules required to connect to MSSQL via DBD::ODBC',
},
rdbms_mssql_sybase => {
+ include => '_rdbms_mssql_common',
req => {
- %$rdbms_mssql_sybase,
+ 'DBD::Sybase' => 0,
},
pod => {
title => 'MSSQL support via DBD::Sybase',
},
rdbms_mssql_ado => {
- req => {
- %$rdbms_mssql_ado,
- },
+ include => [qw( _rdbms_generic_ado _rdbms_mssql_common )],
pod => {
title => 'MSSQL support via DBD::ADO (Windows only)',
desc => 'Modules required to connect to MSSQL via DBD::ADO. This particular DBD is available on Windows only',
},
},
+ _rdbms_msaccess_common => {
+ include => '_icdt_strptime_based',
+ },
+
rdbms_msaccess_odbc => {
- req => {
- %$rdbms_msaccess_odbc,
- },
+ include => [qw( _rdbms_generic_odbc _rdbms_msaccess_common )],
pod => {
title => 'MS Access support via DBD::ODBC',
desc => 'Modules required to connect to MS Access via DBD::ODBC',
},
rdbms_msaccess_ado => {
- req => {
- %$rdbms_msaccess_ado,
- },
+ include => [qw( _rdbms_generic_ado _rdbms_msaccess_common )],
pod => {
title => 'MS Access support via DBD::ADO (Windows only)',
desc => 'Modules required to connect to MS Access via DBD::ADO. This particular DBD is available on Windows only',
},
},
+ # centralize the specification, as we have ICDT tests which can
+ # test the full behavior of RDBMS-specific ICDT on top of bare SQLite
+ # not _-prefixed so that it will show up under req_group_list
+ icdt_mysql => {
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::MySQL' => '0',
+ },
+ },
+ },
+ },
+
rdbms_mysql => {
+ include => 'icdt_mysql',
req => {
- %$rdbms_mysql,
+ 'DBD::mysql' => 0,
},
pod => {
title => 'MySQL support',
},
rdbms_oracle => {
+ include => 'id_shortener',
req => {
- %$rdbms_oracle,
+ 'DBD::Oracle' => 0,
},
pod => {
title => 'Oracle support',
desc => 'Modules required to connect to Oracle',
},
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::Oracle' => '0',
+ },
+ },
+ },
},
rdbms_ase => {
+ include => '_icdt_strptime_based',
req => {
- %$rdbms_ase,
+ 'DBD::Sybase' => 0,
},
pod => {
title => 'Sybase ASE support',
},
},
+ _rdbms_db2_common => {
+ augment => {
+ icdt => {
+ req => {
+ 'DateTime::Format::DB2' => '0',
+ },
+ },
+ },
+ },
+
rdbms_db2 => {
+ include => '_rdbms_db2_common',
req => {
- %$rdbms_db2,
+ 'DBD::DB2' => 0,
},
pod => {
title => 'DB2 support',
},
rdbms_db2_400 => {
- req => {
- %$rdbms_db2_400,
- },
+ include => [qw( _rdbms_generic_odbc _rdbms_db2_common )],
pod => {
title => 'DB2 on AS/400 support',
desc => 'Modules required to connect to DB2 on AS/400',
},
rdbms_informix => {
+ include => '_icdt_strptime_based',
req => {
- %$rdbms_informix,
+ 'DBD::Informix' => 0,
},
pod => {
title => 'Informix support',
},
},
+ _rdbms_sqlanywhere_common => {
+ inclide => '_icdt_strptime_based',
+ },
+
rdbms_sqlanywhere => {
+ include => '_rdbms_sqlanywhere_common',
req => {
- %$rdbms_sqlanywhere,
+ 'DBD::SQLAnywhere' => 0,
},
pod => {
title => 'SQLAnywhere support',
},
rdbms_sqlanywhere_odbc => {
- req => {
- %$rdbms_sqlanywhere_odbc,
- },
+ include => [qw( _rdbms_generic_odbc _rdbms_sqlanywhere_common )],
pod => {
title => 'SQLAnywhere support via DBD::ODBC',
desc => 'Modules required to connect to SQLAnywhere via DBD::ODBC',
},
},
+ _rdbms_firebird_common => {
+ include => '_icdt_strptime_based',
+ },
+
rdbms_firebird => {
+ include => '_rdbms_firebird_common',
req => {
- %$rdbms_firebird,
+ 'DBD::Firebird' => 0,
},
pod => {
title => 'Firebird support',
},
rdbms_firebird_interbase => {
+ include => '_rdbms_firebird_common',
req => {
- %$rdbms_firebird_interbase,
+ 'DBD::InterBase' => 0,
},
pod => {
title => 'Firebird support via DBD::InterBase',
},
rdbms_firebird_odbc => {
- req => {
- %$rdbms_firebird_odbc,
- },
+ include => [qw( _rdbms_generic_odbc _rdbms_firebird_common )],
pod => {
title => 'Firebird support via DBD::ODBC',
desc => 'Modules required to connect to Firebird via DBD::ODBC',
},
},
-# the order does matter because the rdbms support group might require
-# a different version that the test group
- test_rdbms_pg => {
+ test_rdbms_sqlite => {
+ include => 'rdbms_sqlite',
req => {
- $ENV{DBICTEST_PG_DSN}
- ? (
- # when changing this list make sure to adjust xt/optional_deps.t
- %$rdbms_pg,
- 'DBD::Pg' => '2.009002',
- ) : ()
+ ###
+ ### IMPORTANT - do not raise this dependency
+ ### even though many bugfixes are present in newer versions, the general DBIC
+ ### rule is to bend over backwards for available DBDs (given upgrading them is
+ ### often *not* easy or even possible)
+ ###
+ 'DBD::SQLite' => '1.29',
},
},
- test_rdbms_mssql_odbc => {
+ test_rdbms_pg => {
+ include => 'rdbms_pg',
+ env => [
+ DBICTEST_PG_DSN => 1,
+ DBICTEST_PG_USER => 0,
+ DBICTEST_PG_PASS => 0,
+ ],
req => {
- $ENV{DBICTEST_MSSQL_ODBC_DSN}
- ? (
- %$rdbms_mssql_odbc,
- ) : ()
+ # the order does matter because the rdbms support group might require
+ # a different version that the test group
+ #
+ # when changing this list make sure to adjust xt/optional_deps.t
+ 'DBD::Pg' => '2.009002', # specific version to test bytea
},
},
+ test_rdbms_mssql_odbc => {
+ include => 'rdbms_mssql_odbc',
+ env => [
+ DBICTEST_MSSQL_ODBC_DSN => 1,
+ DBICTEST_MSSQL_ODBC_USER => 0,
+ DBICTEST_MSSQL_ODBC_PASS => 0,
+ ],
+ },
+
test_rdbms_mssql_ado => {
- req => {
- $ENV{DBICTEST_MSSQL_ADO_DSN}
- ? (
- %$rdbms_mssql_ado,
- ) : ()
- },
+ include => 'rdbms_mssql_ado',
+ env => [
+ DBICTEST_MSSQL_ADO_DSN => 1,
+ DBICTEST_MSSQL_ADO_USER => 0,
+ DBICTEST_MSSQL_ADO_PASS => 0,
+ ],
},
test_rdbms_mssql_sybase => {
- req => {
- $ENV{DBICTEST_MSSQL_DSN}
- ? (
- %$rdbms_mssql_sybase,
- ) : ()
- },
+ include => 'rdbms_mssql_sybase',
+ env => [
+ DBICTEST_MSSQL_DSN => 1,
+ DBICTEST_MSSQL_USER => 0,
+ DBICTEST_MSSQL_PASS => 0,
+ ],
},
test_rdbms_msaccess_odbc => {
+ include => 'rdbms_msaccess_odbc',
+ env => [
+ DBICTEST_MSACCESS_ODBC_DSN => 1,
+ DBICTEST_MSACCESS_ODBC_USER => 0,
+ DBICTEST_MSACCESS_ODBC_PASS => 0,
+ ],
req => {
- $ENV{DBICTEST_MSACCESS_ODBC_DSN}
- ? (
- %$rdbms_msaccess_odbc,
- %$datetime_basic,
- 'Data::GUID' => '0',
- ) : ()
+ 'Data::GUID' => '0',
},
},
test_rdbms_msaccess_ado => {
+ include => 'rdbms_msaccess_ado',
+ env => [
+ DBICTEST_MSACCESS_ADO_DSN => 1,
+ DBICTEST_MSACCESS_ADO_USER => 0,
+ DBICTEST_MSACCESS_ADO_PASS => 0,
+ ],
req => {
- $ENV{DBICTEST_MSACCESS_ADO_DSN}
- ? (
- %$rdbms_msaccess_ado,
- %$datetime_basic,
- 'Data::GUID' => 0,
- ) : ()
+ 'Data::GUID' => 0,
},
},
test_rdbms_mysql => {
- req => {
- $ENV{DBICTEST_MYSQL_DSN}
- ? (
- %$rdbms_mysql,
- ) : ()
- },
+ include => 'rdbms_mysql',
+ env => [
+ DBICTEST_MYSQL_DSN => 1,
+ DBICTEST_MYSQL_USER => 0,
+ DBICTEST_MYSQL_PASS => 0,
+ ],
},
test_rdbms_oracle => {
+ include => 'rdbms_oracle',
+ env => [
+ DBICTEST_ORA_DSN => 1,
+ DBICTEST_ORA_USER => 0,
+ DBICTEST_ORA_PASS => 0,
+ ],
req => {
- $ENV{DBICTEST_ORA_DSN}
- ? (
- %$rdbms_oracle,
- 'DateTime::Format::Oracle' => '0',
- 'DBD::Oracle' => '1.24',
- ) : ()
+ 'DBD::Oracle' => '1.24',
},
},
test_rdbms_ase => {
- req => {
- $ENV{DBICTEST_SYBASE_DSN}
- ? (
- %$rdbms_ase,
- ) : ()
- },
+ include => 'rdbms_ase',
+ env => [
+ DBICTEST_SYBASE_DSN => 1,
+ DBICTEST_SYBASE_USER => 0,
+ DBICTEST_SYBASE_PASS => 0,
+ ],
},
test_rdbms_db2 => {
- req => {
- $ENV{DBICTEST_DB2_DSN}
- ? (
- %$rdbms_db2,
- ) : ()
- },
+ include => 'rdbms_db2',
+ env => [
+ DBICTEST_DB2_DSN => 1,
+ DBICTEST_DB2_USER => 0,
+ DBICTEST_DB2_PASS => 0,
+ ],
},
test_rdbms_db2_400 => {
- req => {
- $ENV{DBICTEST_DB2_400_DSN}
- ? (
- %$rdbms_db2_400,
- ) : ()
- },
+ include => 'rdbms_db2_400',
+ env => [
+ DBICTEST_DB2_400_DSN => 1,
+ DBICTEST_DB2_400_USER => 0,
+ DBICTEST_DB2_400_PASS => 0,
+ ],
},
test_rdbms_informix => {
- req => {
- $ENV{DBICTEST_INFORMIX_DSN}
- ? (
- %$rdbms_informix,
- ) : ()
- },
+ include => 'rdbms_informix',
+ env => [
+ DBICTEST_INFORMIX_DSN => 1,
+ DBICTEST_INFORMIX_USER => 0,
+ DBICTEST_INFORMIX_PASS => 0,
+ ],
},
test_rdbms_sqlanywhere => {
- req => {
- $ENV{DBICTEST_SQLANYWHERE_DSN}
- ? (
- %$rdbms_sqlanywhere,
- ) : ()
- },
+ include => 'rdbms_sqlanywhere',
+ env => [
+ DBICTEST_SQLANYWHERE_DSN => 1,
+ DBICTEST_SQLANYWHERE_USER => 0,
+ DBICTEST_SQLANYWHERE_PASS => 0,
+ ],
},
test_rdbms_sqlanywhere_odbc => {
- req => {
- $ENV{DBICTEST_SQLANYWHERE_ODBC_DSN}
- ? (
- %$rdbms_sqlanywhere_odbc,
- ) : ()
- },
+ include => 'rdbms_sqlanywhere_odbc',
+ env => [
+ DBICTEST_SQLANYWHERE_ODBC_DSN => 1,
+ DBICTEST_SQLANYWHERE_ODBC_USER => 0,
+ DBICTEST_SQLANYWHERE_ODBC_PASS => 0,
+ ],
},
test_rdbms_firebird => {
- req => {
- $ENV{DBICTEST_FIREBIRD_DSN}
- ? (
- %$rdbms_firebird,
- ) : ()
- },
+ include => 'rdbms_firebird',
+ env => [
+ DBICTEST_FIREBIRD_DSN => 1,
+ DBICTEST_FIREBIRD_USER => 0,
+ DBICTEST_FIREBIRD_PASS => 0,
+ ],
},
test_rdbms_firebird_interbase => {
- req => {
- $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}
- ? (
- %$rdbms_firebird_interbase,
- ) : ()
- },
+ include => 'rdbms_firebird_interbase',
+ env => [
+ DBICTEST_FIREBIRD_INTERBASE_DSN => 1,
+ DBICTEST_FIREBIRD_INTERBASE_USER => 0,
+ DBICTEST_FIREBIRD_INTERBASE_PASS => 0,
+ ],
},
test_rdbms_firebird_odbc => {
- req => {
- $ENV{DBICTEST_FIREBIRD_ODBC_DSN}
- ? (
- %$rdbms_firebird_odbc,
- ) : ()
- },
+ include => 'rdbms_firebird_odbc',
+ env => [
+ DBICTEST_FIREBIRD_ODBC_DSN => 1,
+ DBICTEST_FIREBIRD_ODBC_USER => 0,
+ DBICTEST_FIREBIRD_ODBC_PASS => 0,
+ ],
},
test_memcached => {
+ env => [
+ DBICTEST_MEMCACHED => 1,
+ ],
req => {
- $ENV{DBICTEST_MEMCACHED}
- ? (
- 'Cache::Memcached' => 0,
- ) : ()
+ 'Cache::Memcached' => 0,
},
},
dist_dir => {
+ # we need to run the dbicadmin so we can self-generate its POD
+ # also we do not want surprises in case JSON::XS is in the path
+ # so make sure we get an always-working JSON::Any
+ include => [qw( admin_script _json_xs_compatible_json_any )],
req => {
- %$test_and_dist_json_any,
'ExtUtils::MakeMaker' => '6.64',
'Pod::Inherit' => '0.91',
},
'CPAN::Uploader' => '0.103001',
},
},
-
};
-our %req_availability_cache;
-sub req_list_for {
- my ($class, $group) = @_;
- Carp::croak "req_list_for() expects a requirement group name"
- unless $group;
+### Public API
+
+sub import {
+ my $class = shift;
+
+ if (@_) {
+
+ my $action = shift;
+
+ if ($action eq '-die_without') {
+ my $err;
+ {
+ local $@;
+ eval { $class->die_unless_req_ok_for(\@_); 1 }
+ or $err = $@;
+ }
+ die "\n$err\n" if $err;
+ }
+ elsif ($action eq '-list_missing') {
+ print $class->modreq_missing_for(\@_);
+ print "\n";
+ exit 0;
+ }
+ elsif ($action eq '-skip_all_without') {
+
+ # sanity check - make sure ->current_test is 0 and no plan has been declared
+ do {
+ local $@;
+ defined eval {
+ Test::Builder->new->current_test
+ or
+ Test::Builder->new->has_plan
+ };
+ } and croak("Unable to invoke -skip_all_without after testing has started");
+
+ if ( my $missing = $class->req_missing_for(\@_) ) {
- my $deps = $reqs->{$group}{req}
- or Carp::croak "Requirement group '$group' does not exist";
+ die ("\nMandatory requirements not satisfied during release-testing: $missing\n\n")
+ if $ENV{RELEASE_TESTING} and $class->_groups_to_reqs(\@_)->{release_testing_mandatory};
- return { %$deps };
+ print "1..0 # SKIP requirements not satisfied: $missing\n";
+ exit 0;
+ }
+ }
+ elsif ($action =~ /^-/) {
+ croak "Unknown import-time action '$action'";
+ }
+ else {
+ croak "$class is not an exporter, unable to import '$action'";
+ }
+ }
+
+ 1;
}
+sub unimport {
+ croak( __PACKAGE__ . " does not implement unimport" );
+}
-sub die_unless_req_ok_for {
- my ($class, $group) = @_;
+# OO for (mistakenly considered) ease of extensibility, not due to any need to
+# carry state of any sort. This API is currently used outside, so leave as-is.
+# FIXME - make sure to not propagate this further if module is extracted as a
+# standalone library - keep the stupidity to a DBIC-secific shim!
+#
+sub req_list_for {
+ shift->_groups_to_reqs(shift)->{effective_modreqs};
+}
- Carp::croak "die_unless_req_ok_for() expects a requirement group name"
- unless $group;
+sub modreq_list_for {
+ shift->_groups_to_reqs(shift)->{modreqs};
+}
- $class->_check_deps($group)->{status}
- or die sprintf( "Required modules missing, unable to continue: %s\n", $class->_check_deps($group)->{missing} );
+sub req_group_list {
+ +{ map
+ { $_ => $_[0]->_groups_to_reqs($_) }
+ grep { $_ !~ /^_/ } keys %$dbic_reqs
+ }
+}
+
+sub req_errorlist_for { shift->modreq_errorlist_for(shift) } # deprecated
+sub modreq_errorlist_for {
+ my ($self, $groups) = @_;
+ $self->_errorlist_for_modreqs( $self->_groups_to_reqs($groups)->{modreqs} );
}
sub req_ok_for {
- my ($class, $group) = @_;
+ shift->req_missing_for(shift) ? 0 : 1;
+}
+
+sub req_missing_for {
+ my ($self, $groups) = @_;
+
+ my $reqs = $self->_groups_to_reqs($groups);
+ my $mods_missing = $self->modreq_missing_for($groups);
+
+ return '' if
+ ! $mods_missing
+ and
+ ! $reqs->{missing_envvars}
+ ;
+
+ my @res = $mods_missing || ();
+
+ push @res, 'the following group(s) of environment variables: ' . join ' and ', sort map
+ { __envvar_group_desc($_) }
+ @{$reqs->{missing_envvars}}
+ if $reqs->{missing_envvars};
- Carp::croak "req_ok_for() expects a requirement group name"
- unless $group;
+ return (
+ ( join ' as well as ', @res )
+ .
+ ( $reqs->{modreqs_fully_documented} ? " (see @{[ ref $self || $self ]} documentation for details)" : '' ),
+ );
+}
+
+sub modreq_missing_for {
+ my ($self, $groups) = @_;
+
+ my $reqs = $self->_groups_to_reqs($groups);
+ my $modreq_errors = $self->_errorlist_for_modreqs($reqs->{modreqs})
+ or return '';
- return $class->_check_deps($group)->{status};
+ join ' ', map
+ { $reqs->{modreqs}{$_} ? qq("$_~>=$reqs->{modreqs}{$_}") : $_ }
+ sort { lc($a) cmp lc($b) } keys %$modreq_errors
+ ;
}
-sub req_missing_for {
- my ($class, $group) = @_;
+my $tb;
+sub skip_without {
+ my ($self, $groups) = @_;
- Carp::croak "req_missing_for() expects a requirement group name"
- unless $group;
+ $tb ||= do { local $@; eval { Test::Builder->new } }
+ or croak "Calling skip_without() before loading Test::Builder makes no sense";
- return $class->_check_deps($group)->{missing};
+ if ( my $err = $self->req_missing_for($groups) ) {
+ my ($fn, $ln) = (caller(0))[1,2];
+ $tb->skip("block in $fn around line $ln requires $err");
+ local $^W = 0;
+ last SKIP;
+ }
+
+ 1;
+}
+
+sub die_unless_req_ok_for {
+ if (my $err = shift->req_missing_for(shift) ) {
+ die "Unable to continue due to missing requirements: $err\n";
+ }
}
-sub req_errorlist_for {
- my ($class, $group) = @_;
- Carp::croak "req_errorlist_for() expects a requirement group name"
- unless $group;
- return $class->_check_deps($group)->{errorlist};
+### Private functions
+
+# potentially shorten group desc
+sub __envvar_group_desc {
+ my @envs = @{$_[0]};
+
+ my (@res, $last_prefix);
+ while (my $ev = shift @envs) {
+ my ($pref, $sep, $suff) = split / ([\_\-]) (?= [^\_\-]+ \z )/x, $ev;
+
+ if ( defined $sep and ($last_prefix||'') eq $pref ) {
+ push @res, "...${sep}${suff}"
+ }
+ else {
+ push @res, $ev;
+ }
+
+ $last_prefix = $pref if $sep;
+ }
+
+ join '/', @res;
}
-sub _check_deps {
- my ($class, $group) = @_;
+my $groupname_re = qr/ [A-Z_a-z][0-9A-Z_a-z]* /x;
+my $modname_re = qr/ [A-Z_a-z] [0-9A-Z_a-z]* (?:::[0-9A-Z_a-z]+)* /x;
+my $modver_re = qr/ [0-9]+ (?: \. [0-9]+ )? /x;
+
+# Expand includes from a random group in a specific order:
+# nonvariable groups first, then their includes, then the variable groups,
+# then their includes.
+# This allows reliably marking the rest of the mod reqs as variable (this is
+# also why variable includes are currently not allowed)
+sub __expand_includes {
+ my ($groups, $seen) = @_;
+
+ # !! DIFFERENT !! behavior and return depending on invocation mode
+ # (easier to recurse this way)
+ my $is_toplevel = $seen
+ ? 0
+ : !! ($seen = {})
+ ;
+
+ my ($res_per_type, $missing_envvars);
+
+ # breadth-first evaluation, with non-variable includes on top
+ for my $g (@$groups) {
+
+ croak "Invalid requirement group name '$g': only ascii alphanumerics and _ are allowed"
+ if $g !~ qr/ \A $groupname_re \z/x;
- return $req_availability_cache{$group} ||= do {
+ my $r = $dbic_reqs->{$g}
+ or croak "Requirement group '$g' is not defined";
- my $deps = $class->req_list_for ($group);
+ # always do this check *before* the $seen check
+ croak "Group '$g' with variable effective_modreqs can not be specified as an 'include'"
+ if ( $r->{env} and ! $is_toplevel );
- my %errors;
- for my $mod (keys %$deps) {
- my $req_line = "require $mod;";
- if (my $ver = $deps->{$mod}) {
- $req_line .= "$mod->VERSION($ver);";
+ next if $seen->{$g}++;
+
+ my $req_type = 'static';
+
+ if ( my @e = @{$r->{env}||[]} ) {
+
+ croak "Unexpected 'env' attribute under group '$g' (only allowed in test_* groups)"
+ unless $g =~ /^test_/;
+
+ croak "Unexpected *odd* list in 'env' under group '$g'"
+ if @e % 2;
+
+ # deconstruct the whole thing
+ my (@group_envnames_list, $some_envs_required, $some_required_missing);
+ while (@e) {
+ push @group_envnames_list, my $envname = shift @e;
+
+ # env required or not
+ next unless shift @e;
+
+ $some_envs_required ||= 1;
+
+ $some_required_missing ||= (
+ ! defined $ENV{$envname}
+ or
+ ! length $ENV{$envname}
+ );
+ }
+
+ croak "None of the envvars in group '$g' declared as required, making the requirement moot"
+ unless $some_envs_required;
+
+ if ($some_required_missing) {
+ push @{$missing_envvars->{$g}}, \@group_envnames_list;
+ $req_type = 'variable';
}
+ }
+
+ push @{$res_per_type->{"base_${req_type}"}}, $g;
- eval $req_line;
+ if (my $i = $dbic_reqs->{$g}{include}) {
+ $i = [ $i ] unless ref $i eq 'ARRAY';
- $errors{$mod} = $@ if $@;
+ croak "Malformed 'include' for group '$g': must be another existing group name or arrayref of existing group names"
+ unless @$i;
+
+ push @{$res_per_type->{"incs_${req_type}"}}, @$i;
}
+ }
+
+ my @ret = map {
+ @{ $res_per_type->{"base_${_}"} || [] },
+ ( $res_per_type->{"incs_${_}"} ? __expand_includes( $res_per_type->{"incs_${_}"}, $seen ) : () ),
+ } qw(static variable);
+
+ return ! $is_toplevel ? @ret : do {
+ my $rv = {};
+ $rv->{$_} = {
+ idx => 1 + keys %$rv,
+ missing_envvars => $missing_envvars->{$_},
+ } for @ret;
+ $rv->{$_}{user_requested} = 1 for @$groups;
+ $rv;
+ };
+}
+
+### Private OO API
+our %req_unavailability_cache;
+
+# this method is just a lister and envvar/metadata checker - it does not try to load anything
+sub _groups_to_reqs {
+ my ($self, $want) = @_;
- my $res;
+ $want = [ $want || () ]
+ unless ref $want eq 'ARRAY';
- if (keys %errors) {
- my $missing = join (', ', map { $deps->{$_} ? "$_ >= $deps->{$_}" : $_ } (sort keys %errors) );
- $missing .= " (see $class for details)" if $reqs->{$group}{pod};
- $res = {
- status => 0,
- errorlist => \%errors,
- missing => $missing,
- };
+ croak "@{[ (caller(1))[3] ]}() expects a requirement group name or arrayref of group names"
+ unless @$want;
+
+ my $ret = {
+ modreqs => {},
+ modreqs_fully_documented => 1,
+ };
+
+ my $groups;
+ for my $piece (@$want) {
+ if ($piece =~ qr/ \A $groupname_re \z /x) {
+ push @$groups, $piece;
+ }
+ elsif ( my ($mod, $ver) = $piece =~ qr/ \A ($modname_re) \>\= ($modver_re) \z /x ) {
+ croak "Ad hoc module specification lists '$mod' twice"
+ if exists $ret->{modreqs}{$mod};
+
+ croak "Ad hoc module specification '${mod} >= $ver' (or greater) not listed in the test_adhoc optdep group" if (
+ ! defined $dbic_reqs->{test_adhoc}{req}{$mod}
+ or
+ $dbic_reqs->{test_adhoc}{req}{$mod} < $ver
+ );
+
+ $ret->{modreqs}{$mod} = $ver;
+ $ret->{modreqs_fully_documented} = 0;
}
else {
- $res = {
- status => 1,
- errorlist => {},
- missing => '',
- };
+ croak "Unsupported argument '$piece' supplied to @{[ (caller(1))[3] ]}()"
}
+ }
- $res;
- };
+ my $all_groups = __expand_includes($groups);
+
+ # pre-assemble list of augmentations, perform basic sanity checks
+ # Note that below we *DO NOT* respect the source/target reationship, but
+ # instead always default to augment the "later" group
+ # This is done so that the "stable/variable" boundary keeps working as
+ # expected
+ my $augmentations;
+ for my $requesting_group (keys %$all_groups) {
+ if (my $ag = $dbic_reqs->{$requesting_group}{augment}) {
+ for my $target_group (keys %$ag) {
+
+ croak "Group '$requesting_group' claims to augment a non-existent group '$target_group'"
+ unless $dbic_reqs->{$target_group};
+
+ croak "Augmentation combined with variable effective_modreqs currently unsupported for group '$requesting_group'"
+ if $dbic_reqs->{$requesting_group}{env};
+
+ croak "Augmentation of group '$target_group' with variable effective_modreqs unsupported (requested by '$requesting_group')"
+ if $dbic_reqs->{$target_group}{env};
+
+ if (my @foreign = grep { $_ ne 'req' } keys %{$ag->{$target_group}} ) {
+ croak "Only 'req' augmentations are currently supported (group '$requesting_group' attempts to alter '$foreign[0]' of group '$target_group'";
+ }
+
+ $ret->{augments}{$target_group} = 1;
+
+ # no augmentation for stuff that hasn't been selected
+ if ( $all_groups->{$target_group} and my $ar = $ag->{$target_group}{req} ) {
+ push @{$augmentations->{
+ ( $all_groups->{$requesting_group}{idx} < $all_groups->{$target_group}{idx} )
+ ? $target_group
+ : $requesting_group
+ }}, $ar;
+ }
+ }
+ }
+ }
+
+ for my $group (sort { $all_groups->{$a}{idx} <=> $all_groups->{$b}{idx} } keys %$all_groups ) {
+
+ my $group_reqs = $dbic_reqs->{$group}{req};
+
+ # sanity-check
+ for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) {
+ for (keys %$req_bag) {
+
+ $_ =~ / \A $modname_re \z /x
+ or croak "Requirement '$_' in group '$group' is not a valid module name";
+
+ # !!!DO NOT CHANGE!!!
+ # remember - version.pm may not be available on the system
+ croak "Requirement '$_' in group '$group' specifies an invalid version '$req_bag->{$_}' (only plain non-underscored floating point decimals are supported)"
+ if ( ($req_bag->{$_}||0) !~ qr/ \A $modver_re \z /x );
+ }
+ }
+
+ if (my $e = $all_groups->{$group}{missing_envvars}) {
+ push @{$ret->{missing_envvars}}, @$e;
+ }
+
+ # assemble into the final ret
+ for my $type (
+ 'modreqs',
+ ( $ret->{missing_envvars} ? () : 'effective_modreqs' ),
+ ) {
+ for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) {
+ for my $mod (keys %$req_bag) {
+
+ $ret->{$type}{$mod} = $req_bag->{$mod}||0 if (
+
+ ! exists $ret->{$type}{$mod}
+ or
+ # we sanitized the version to be numeric above - we can just -gt it
+ ($req_bag->{$mod}||0) > $ret->{$type}{$mod}
+
+ );
+ }
+ }
+ }
+
+ $ret->{modreqs_fully_documented} &&= !!$dbic_reqs->{$group}{pod}
+ if $all_groups->{$group}{user_requested};
+
+ $ret->{release_testing_mandatory} ||= !!$dbic_reqs->{$group}{release_testing_mandatory};
+ }
+
+ return $ret;
}
-sub req_group_list {
- return { map { $_ => { %{ $reqs->{$_}{req} || {} } } } (keys %$reqs) };
+
+# this method tries to load specified modreqs and returns a hashref of
+# module/loaderror pairs for anything that failed
+sub _errorlist_for_modreqs {
+ # args supposedly already went through _groups_to_reqs and are therefore sanitized
+ # safe to eval at will
+ my ($self, $reqs) = @_;
+
+ my $ret;
+
+ for my $m ( keys %$reqs ) {
+ my $v = $reqs->{$m};
+
+ if (! exists $req_unavailability_cache{$m}{$v} ) {
+ local $@;
+ eval( "require $m;" . ( $v ? "$m->VERSION(q($v))" : '' ) );
+ $req_unavailability_cache{$m}{$v} = $@;
+ }
+
+ $ret->{$m} = $req_unavailability_cache{$m}{$v}
+ if $req_unavailability_cache{$m}{$v};
+ }
+
+ $ret;
}
+
# This is to be called by the author only (automatically in Makefile.PL)
sub _gen_pod {
my ($class, $distver, $pod_dir) = @_;
File::Path::mkpath([$dir]);
- my $sqltver = $class->req_list_for ('deploy')->{'SQL::Translator'}
+ my $sqltver = $class->req_list_for('deploy')->{'SQL::Translator'}
or die "Hrmm? No sqlt dep?";
- my @chunks = (
- <<"EOC",
+
+ my @chunks;
+
+#@@
+#@@ HEADER
+#@@
+ push @chunks, <<"EOC";
#########################################################################
##################### A U T O G E N E R A T E D ########################
#########################################################################
# will be lost. If you need to change the generated text edit _gen_pod()
# at the end of $modfn
#
+
+=head1 NAME
+
+$class - Optional module dependency specifications (for module authors)
EOC
- '=head1 NAME',
- "$class - Optional module dependency specifications (for module authors)",
- '=head1 SYNOPSIS',
- <<"EOS",
-Somewhere in your build-file (e.g. L<Module::Install>'s Makefile.PL):
+
+
+#@@
+#@@ SYNOPSIS HEADING
+#@@
+ push @chunks, <<"EOC";
+=head1 SYNOPSIS
+
+Somewhere in your build-file (e.g. L<ExtUtils::MakeMaker>'s F<Makefile.PL>):
...
- configure_requires 'DBIx::Class' => '$distver';
+ \$EUMM_ARGS{CONFIGURE_REQUIRES} = {
+ \%{ \$EUMM_ARGS{CONFIGURE_REQUIRES} || {} },
+ 'DBIx::Class' => '$distver',
+ };
- require $class;
+ ...
- my \$deploy_deps = $class->req_list_for('deploy');
+ my %DBIC_DEPLOY_AND_ORACLE_DEPS = %{ eval {
+ require $class;
+ $class->req_list_for([qw( deploy rdbms_oracle icdt )]);
+ } || {} };
- for (keys %\$deploy_deps) {
- requires \$_ => \$deploy_deps->{\$_};
- }
+ \$EUMM_ARGS{PREREQ_PM} = {
+ \%DBIC_DEPLOY_AND_ORACLE_DEPS,
+ \%{ \$EUMM_ARGS{PREREQ_PM} || {} },
+ };
...
-Note that there are some caveats regarding C<configure_requires()>, more info
-can be found at L<Module::Install/configure_requires>
-EOS
- '=head1 DESCRIPTION',
- <<'EOD',
+ ExtUtils::MakeMaker::WriteMakefile(\%EUMM_ARGS);
+
+B<Note>: The C<eval> protection within the example is due to support for
+requirements during L<the C<configure> build phase|CPAN::Meta::Spec/Phases>
+not being available on a sufficient portion of production installations of
+Perl. Robust support for such dependency requirements is available in the
+L<CPAN> installer only since version C<1.94_56> first made available for
+production with perl version C<5.12>. It is the belief of the current
+maintainer that support for requirements during the C<configure> build phase
+will not be sufficiently ubiquitous until the B<year 2020> at the earliest,
+hence the extra care demonstrated above. It should also be noted that some
+3rd party installers (e.g. L<cpanminus|App::cpanminus>) do the right thing
+with configure requirements independent from the versions of perl and CPAN
+available.
+EOC
+
+
+#@@
+#@@ DESCRIPTION HEADING
+#@@
+ push @chunks, <<'EOC';
+=head1 DESCRIPTION
+
Some of the less-frequently used features of L<DBIx::Class> have external
module dependencies on their own. In order not to burden the average user
-with modules he will never use, these optional dependencies are not included
+with modules they will never use, these optional dependencies are not included
in the base Makefile.PL. Instead an exception with a descriptive message is
-thrown when a specific feature is missing one or several modules required for
-its operation. This module is the central holding place for the current list
+thrown when a specific feature can't find one or several modules required for
+its operation. This module is the central holding place for the current list
of such dependencies, for DBIx::Class core authors, and DBIx::Class extension
authors alike.
-EOD
- '=head1 CURRENT REQUIREMENT GROUPS',
- <<'EOD',
-Dependencies are organized in C<groups> and each group can list one or more
-required modules, with an optional minimum version (or 0 for any version).
-The group name can be used in the
-EOD
- );
- for my $group (sort keys %$reqs) {
- my $p = $reqs->{$group}{pod}
- or next;
+Dependencies are organized in L<groups|/CURRENT REQUIREMENT GROUPS> where each
+group can list one or more required modules, with an optional minimum version
+(or 0 for any version). In addition groups prefixed with C<test_> can specify
+a set of environment variables, some (or all) of which are marked as required
+for the group to be considered by L</req_list_for>
+
+Each group name (or a combination thereof) can be used in the
+L<public methods|/METHODS> as described below.
+EOC
+
+
+#@@
+#@@ REQUIREMENT GROUPLIST HEADING
+#@@
+ push @chunks, '=head1 CURRENT REQUIREMENT GROUPS';
+
+ my $standalone_info;
- my $modlist = $reqs->{$group}{req}
- or next;
+ for my $group (sort keys %$dbic_reqs) {
+
+ my $info = $standalone_info->{$group} ||= $class->_groups_to_reqs($group);
+
+ next unless (
+ $info->{modreqs_fully_documented}
+ and
+ ( $info->{augments} or $info->{modreqs} )
+ );
- next unless keys %$modlist;
+ my $p = $dbic_reqs->{$group}{pod};
push @chunks, (
"=head2 $p->{title}",
- "$p->{desc}",
+ "=head3 $group",
+ $p->{desc},
'=over',
- ( map { "=item * $_" . ($modlist->{$_} ? " >= $modlist->{$_}" : '') } (sort keys %$modlist) ),
- '=back',
- "Requirement group: B<$group>",
);
+
+ if ( keys %{ $info->{modreqs}||{} } ) {
+ push @chunks, map
+ { "=item * $_" . ($info->{modreqs}{$_} ? " >= $info->{modreqs}{$_}" : '') }
+ ( sort keys %{ $info->{modreqs} } )
+ ;
+ }
+ else {
+ push @chunks, '=item * No standalone requirements',
+ }
+
+ push @chunks, '=back';
+
+ for my $ag ( sort keys %{ $info->{augments} || {} } ) {
+ my $ag_info = $standalone_info->{$ag} ||= $class->_groups_to_reqs($ag);
+
+ my $newreqs = $class->modreq_list_for([ $group, $ag ]);
+ for (keys %$newreqs) {
+ delete $newreqs->{$_} if (
+ ( defined $info->{modreqs}{$_} and $info->{modreqs}{$_} == $newreqs->{$_} )
+ or
+ ( defined $ag_info->{modreqs}{$_} and $ag_info->{modreqs}{$_} == $newreqs->{$_} )
+ );
+ }
+
+ if (keys %$newreqs) {
+ push @chunks, (
+ "Combined with L</$ag> additionally requires:",
+ '=over',
+ ( map
+ { "=item * $_" . ($newreqs->{$_} ? " >= $newreqs->{$_}" : '') }
+ ( sort keys %$newreqs )
+ ),
+ '=back',
+ );
+ }
+ }
}
- push @chunks, (
- '=head1 METHODS',
- '=head2 req_group_list',
- '=over',
- '=item Arguments: none',
- '=item Return Value: \%list_of_requirement_groups',
- '=back',
- <<'EOD',
+
+#@@
+#@@ API DOCUMENTATION HEADING
+#@@
+ push @chunks, <<'EOC';
+
+=head1 IMPORT-LIKE ACTIONS
+
+Even though this module is not an L<Exporter>, it recognizes several C<actions>
+supplied to its C<import> method.
+
+=head2 -skip_all_without
+
+=over
+
+=item Arguments: @group_names
+
+=back
+
+A convenience wrapper for use during testing:
+EOC
+
+ push @chunks, " use $class -skip_all_without => qw(admin test_rdbms_mysql);";
+
+ push @chunks, 'Roughly equivalent to the following code:';
+
+ push @chunks, sprintf <<'EOS', ($class) x 2;
+
+ BEGIN {
+ require %s;
+ if ( my $missing = %s->req_missing_for(\@group_names_) ) {
+ print "1..0 # SKIP requirements not satisfied: $missing\n";
+ exit 0;
+ }
+ }
+EOS
+
+ push @chunks, <<'EOC';
+
+It also takes into account the C<RELEASE_TESTING> environment variable and
+behaves like L</-die_without> for any requirement groups marked as
+C<release_testing_mandatory>.
+
+=head2 -die_without
+
+=over
+
+=item Arguments: @group_names
+
+=back
+
+A convenience wrapper around L</die_unless_req_ok_for>:
+EOC
+
+ push @chunks, " use $class -die_without => qw(deploy admin);";
+
+ push @chunks, <<'EOC';
+
+=head2 -list_missing
+
+=over
+
+=item Arguments: @group_names
+
+=back
+
+A convenience wrapper around L</modreq_missing_for>:
+
+ perl -Ilib -MDBIx::Class::Optional::Dependencies=-list_missing,deploy,admin | cpanm
+
+=head1 METHODS
+
+=head2 req_group_list
+
+=over
+
+=item Arguments: none
+
+=item Return Value: \%list_of_requirement_groups
+
+=back
+
This method should be used by DBIx::Class packagers, to get a hashref of all
-dependencies keyed by dependency group. Each key (group name) can be supplied
-to one of the group-specific methods below.
-EOD
-
- '=head2 req_list_for',
- '=over',
- '=item Arguments: $group_name',
- '=item Return Value: \%list_of_module_version_pairs',
- '=back',
- <<'EOD',
+dependencies B<keyed> by dependency group. Each key (group name), or a combination
+thereof (as an arrayref) can be supplied to the methods below.
+The B<values> of the returned hash are currently a set of options B<without a
+well defined structure>. If you have use for any of the contents - contact the
+maintainers, instead of treating this as public (left alone stable) API.
+
+=head2 req_list_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: \%set_of_module_version_pairs
+
+=back
+
This method should be used by DBIx::Class extension authors, to determine the
-version of modules a specific feature requires in the B<current> version of
-DBIx::Class. See the L</SYNOPSIS> for a real-world
-example.
-EOD
-
- '=head2 req_ok_for',
- '=over',
- '=item Arguments: $group_name',
- '=item Return Value: 1|0',
- '=back',
- <<'EOD',
-Returns true or false depending on whether all modules required by
-C<$group_name> are present on the system and loadable.
-EOD
-
- '=head2 req_missing_for',
- '=over',
- '=item Arguments: $group_name',
- '=item Return Value: $error_message_string',
- '=back',
- <<"EOD",
-Returns a single line string suitable for inclusion in larger error messages.
-This method would normally be used by DBIx::Class core-module author, to
-indicate to the user that he needs to install specific modules before he will
-be able to use a specific feature.
+version of modules a specific set of features requires for this version of
+DBIx::Class (regardless of their availability on the system).
+See the L</SYNOPSIS> for a real-world example.
+
+When handling C<test_*> groups this method behaves B<differently> from
+L</modreq_list_for> below (and is the only such inconsistency among the
+C<req_*> methods). If a particular group declares as requirements some
+C<environment variables> and these requirements are not satisfied (the envvars
+are unset) - then the C<module requirements> of this group are not included in
+the returned list.
+
+=head2 modreq_list_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: \%set_of_module_version_pairs
+
+=back
+
+Same as L</req_list_for> but does not take into consideration any
+C<environment variable requirements> - returns just the list of required
+modules.
+
+=head2 req_ok_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: 1|0
+
+=back
+
+Returns true or false depending on whether all modules/envvars required by
+the group(s) are loadable/set on the system.
+
+=head2 req_missing_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: $error_message_string
+
+=back
+
+Returns a single-line string suitable for inclusion in larger error messages.
+This method would normally be used by DBIx::Class core features, to indicate to
+the user that they need to install specific modules and/or set specific
+environment variables before being able to use a specific feature set.
For example if some of the requirements for C<deploy> are not available,
the returned string could look like:
+EOC
- SQL::Translator >= $sqltver (see $class for details)
+ push @chunks, qq{ "SQL::Translator~>=$sqltver" (see $class documentation for details)};
+ push @chunks, <<'EOC';
The author is expected to prepend the necessary text to this message before
-returning the actual error seen by the user.
-EOD
-
- '=head2 die_unless_req_ok_for',
- '=over',
- '=item Arguments: $group_name',
- '=back',
- <<'EOD',
-Checks if L</req_ok_for> passes for the supplied C<$group_name>, and
+returning the actual error seen by the user. See also L</modreq_missing_for>
+
+=head2 modreq_missing_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: $error_message_string
+
+=back
+
+Same as L</req_missing_for> except that the error string is guaranteed to be
+either empty, or contain a set of module requirement specifications suitable
+for piping to e.g. L<cpanminus|App::cpanminus>. The method explicitly does not
+attempt to validate the state of required environment variables (if any).
+
+For instance if some of the requirements for C<deploy> are not available,
+the returned string could look like:
+EOC
+
+ push @chunks, qq{ "SQL::Translator~>=$sqltver"};
+
+ push @chunks, <<'EOC';
+
+See also L</-list_missing>.
+
+=head2 skip_without
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=back
+
+A convenience wrapper around L<skip|Test::More/SKIP>. It does not take neither
+a reason (it is generated by L</req_missing_for>) nor an amount of skipped tests
+(it is always C<1>, thus mandating unconditional use of
+L<done_testing|Test::More/done_testing>). Most useful in combination with ad hoc
+requirement specifications:
+EOC
+
+ push @chunks, <<EOC;
+ SKIP: {
+ $class->skip_without([ deploy YAML>=0.90 ]);
+
+ ...
+ }
+EOC
+
+ push @chunks, <<'EOC';
+
+=head2 die_unless_req_ok_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=back
+
+Checks if L</req_ok_for> passes for the supplied group(s), and
in case of failure throws an exception including the information
-from L</req_missing_for>.
-EOD
-
- '=head2 req_errorlist_for',
- '=over',
- '=item Arguments: $group_name',
- '=item Return Value: \%list_of_loaderrors_per_module',
- '=back',
- <<'EOD',
+from L</req_missing_for>. See also L</-die_without>.
+
+=head2 modreq_errorlist_for
+
+=over
+
+=item Arguments: $group_name | \@group_names
+
+=item Return Value: \%set_of_loaderrors_per_module
+
+=back
+
Returns a hashref containing the actual errors that occurred while attempting
-to load each module in the requirement group.
-EOD
- '=head1 FURTHER QUESTIONS?',
- 'Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.',
- '=head1 COPYRIGHT AND LICENSE',
- <<'EOL',
+to load each module in the requirement group(s).
+
+=head2 req_errorlist_for
+
+Deprecated method name, equivalent (via proxy) to L</modreq_errorlist_for>.
+
+EOC
+
+#@@
+#@@ FOOTER
+#@@
+ push @chunks, <<'EOC';
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
-EOL
-
- );
+EOC
- open (my $fh, '>', $podfn) or Carp::croak "Unable to write to $podfn: $!";
- print $fh join ("\n\n", @chunks);
- print $fh "\n";
- close ($fh);
+ eval {
+ open (my $fh, '>', $podfn) or die;
+ print $fh join ("\n\n", @chunks) or die;
+ print $fh "\n" or die;
+ close ($fh) or die;
+ } or croak( "Unable to write $podfn: " . ( $! || $@ || 'unknown error') );
}
1;
use base qw/DBIx::Class/;
use DBIx::Class::Carp;
use DBIx::Class::ResultSetColumn;
+use DBIx::Class::ResultClass::HashRefInflator;
use Scalar::Util qw/blessed weaken reftype/;
use DBIx::Class::_Util qw(
fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION
$self->{_result_inflator}{is_hri} = ( (
! $self->{_result_inflator}{is_core_row}
and
- $inflator_cref == (
- require DBIx::Class::ResultClass::HashRefInflator
- &&
- DBIx::Class::ResultClass::HashRefInflator->can('inflate_result')
- )
+ $inflator_cref == \&DBIx::Class::ResultClass::HashRefInflator::inflate_result
) ? 1 : 0 ) unless defined $self->{_result_inflator}{is_hri};
if @violating_idx;
$unrolled_non_null_cols_to_check = join (',', @$check_non_null_cols);
+
+ utf8::upgrade($unrolled_non_null_cols_to_check)
+ if DBIx::Class::_ENV_::STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE;
}
my $next_cref =
=head2 where
-=over 4
-
-Adds to the WHERE clause.
+Adds extra conditions to the resultset, combined with the preexisting C<WHERE>
+conditions, same as the B<first> argument to the L<search operator|/search>
# only return rows WHERE deleted IS NULL for all searches
__PACKAGE__->resultset_attributes({ where => { deleted => undef } });
-Can be overridden by passing C<< { where => undef } >> as an attribute
-to a resultset.
-
-For more complicated where clauses see L<SQL::Abstract/WHERE CLAUSES>.
-
-=back
+Note that the above example is
+L<strongly discouraged|DBIx::Class::ResultSource/resultset_attributes>.
=head2 cache
});
};
+ utf8::upgrade($src)
+ if DBIx::Class::_ENV_::STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE;
+
return (
$args->{eval} ? ( eval "sub $src" || die $@ ) : $src,
$check_null_columns,
# working title - we are hoping to extract this eventually...
our $null_branch_class = 'DBIx::ResultParser::RelatedNullBranch';
+sub __wrap_in_strictured_scope {
+ " { use strict; use warnings; use warnings FATAL => 'uninitialized';\n$_[0]\n }"
+}
+
sub assemble_simple_parser {
#my ($args) = @_;
# change the quoted placeholders to unquoted alias-references
$parser_src =~ s/ \' \xFF__VALPOS__(\d+)__\xFF \' /"\$_->[$1]"/gex;
- $parser_src = " { use strict; use warnings FATAL => 'all';\n$parser_src\n }";
+ __wrap_in_strictured_scope($parser_src);
}
# the simple non-collapsing nested structure recursor
my @idcol_args = $no_rowid_container ? ('', '') : (
', %cur_row_ids', # only declare the variable if we'll use it
- join ("\n", map { qq(\$cur_row_ids{$_} = ) . (
- # in case we prune - we will never hit these undefs
- $args->{prune_null_branches} ? qq(\$cur_row_data->[$_];)
- : HAS_DOR ? qq(\$cur_row_data->[$_] // "\0NULL\xFF\$rows_pos\xFF$_\0";)
- : qq(defined(\$cur_row_data->[$_]) ? \$cur_row_data->[$_] : "\0NULL\xFF\$rows_pos\xFF$_\0";)
- ) } sort { $a <=> $b } keys %{ $stats->{idcols_seen} } ),
+ join ("\n", map {
+ my $quoted_null_val = qq( "\0NULL\xFF\${rows_pos}\xFF${_}\0" );
+ qq(\$cur_row_ids{$_} = ) . (
+ # in case we prune - we will never hit these undefs
+ $args->{prune_null_branches} ? qq( \$cur_row_data->[$_]; )
+ : HAS_DOR ? qq( \$cur_row_data->[$_] // $quoted_null_val; )
+ : qq( defined(\$cur_row_data->[$_]) ? \$cur_row_data->[$_] : $quoted_null_val; )
+ )
+ } sort { $a <=> $b } keys %{ $stats->{idcols_seen} } ),
);
my $parser_src = sprintf (<<'EOS', @idcol_args, $top_node_key_assembler||'', $top_node_key, join( "\n", @{$data_assemblers||[]} ) );
$no_rowid_container ? "\$cur_row_data->[$1]" : "\$cur_row_ids{$1}"
/gex;
- $parser_src = " { use strict; use warnings FATAL => 'all';\n$parser_src\n }";
+ __wrap_in_strictured_scope($parser_src);
}
# note this is a || not a ||=, the difference is important
: $_[0]->{_result_source} || do {
- my $class = ref $_[0];
$_[0]->can('result_source_instance')
? $_[0]->result_source_instance
: $_[0]->throw_exception(
- "No result source instance registered for $class, did you forget to call $class->table(...) ?"
+ "No result source instance registered for @{[ ref $_[0] ]}, did you forget to call @{[ ref $_[0] ]}->table(...) ?"
)
}
;
sub throw_exception {
my $self=shift;
- if (ref $self && ref $self->result_source ) {
- $self->result_source->throw_exception(@_)
+ if (ref $self && ref (my $rsrc = try { $self->result_source_instance } ) ) {
+ $rsrc->throw_exception(@_)
}
else {
DBIx::Class::Exception->throw(@_);
use warnings;
use strict;
-use base qw( DBIx::Class::SQLMaker );
-
BEGIN {
- use DBIx::Class::Optional::Dependencies;
- die('The following extra modules are required for Oracle-based Storages ' . DBIx::Class::Optional::Dependencies->req_missing_for ('id_shortener') . "\n" )
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('id_shortener');
+ require DBIx::Class::Optional::Dependencies;
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('id_shortener') ) {
+ die "The following extra modules are required for Oracle-based Storages: $missing\n";
+ }
+ require Digest::MD5;
}
+use base 'DBIx::Class::SQLMaker';
+
sub new {
my $self = shift;
my %opts = (ref $_[0] eq 'HASH') ? %{$_[0]} : @_;
@keywords = $to_shorten unless @keywords;
# get a base36 md5 of the identifier
- require Digest::MD5;
- require Math::BigInt;
- require Math::Base36;
my $b36sum = Math::Base36::encode_base36(
Math::BigInt->from_hex (
'0x' . Digest::MD5::md5_hex ($to_shorten)
sub thaw {
my ($self, $obj) = @_;
local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
- require Storable;
return Storable::thaw($obj);
}
=cut
sub freeze {
- require Storable;
return Storable::nfreeze($_[1]);
}
sub dclone {
my ($self, $obj) = @_;
local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
- require Storable;
return Storable::dclone($obj);
}
carp_once "compose_connection deprecated as of 0.08000"
unless $INC{"DBIx/Class/CDBICompat.pm"};
- my $base = 'DBIx::Class::ResultSetProxy';
try {
- eval "require ${base};"
+ require DBIx::Class::ResultSetProxy;
}
catch {
$self->throw_exception
- ("No arguments to load_classes and couldn't load ${base} ($_)")
+ ("No arguments to load_classes and couldn't load DBIx::Class::ResultSetProxy ($_)")
};
if ($self eq $target) {
foreach my $source_name ($self->sources) {
my $source = $self->source($source_name);
my $class = $source->result_class;
- $self->inject_base($class, $base);
+ $self->inject_base($class, 'DBIx::Class::ResultSetProxy');
$class->mk_classdata(resultset_instance => $source->resultset);
$class->mk_classdata(class_resolver => $self);
}
return $self;
}
- my $schema = $self->compose_namespace($target, $base);
+ my $schema = $self->compose_namespace($target, 'DBIx::Class::ResultSetProxy');
quote_sub "${target}::schema", '$s', { '$s' => \$schema };
$schema->connection(@info);
return;
}
- unless (DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')) {
- $self->throw_exception("Unable to proceed without " . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy') );
+ if ( my $missing = DBIx::Class::Optional::Dependencies->req_missing_for('deploy') ) {
+ $self->throw_exception("Unable to proceed without $missing");
}
my $db_tr = SQL::Translator->new({
}
sub _dbi_attrs_for_bind {
- my ($self, $ident, $bind) = @_;
+ #my ($self, $ident, $bind) = @_;
- my @attrs;
+ return [ map {
- for (map { $_->[0] } @$bind) {
- push @attrs, do {
- if (exists $_->{dbd_attrs}) {
- $_->{dbd_attrs}
- }
- elsif($_->{sqlt_datatype}) {
- # cache the result in the dbh_details hash, as it can not change unless
- # we connect to something else
- my $cache = $self->_dbh_details->{_datatype_map_cache} ||= {};
- if (not exists $cache->{$_->{sqlt_datatype}}) {
- $cache->{$_->{sqlt_datatype}} = $self->bind_attribute_by_data_type($_->{sqlt_datatype}) || undef;
- }
- $cache->{$_->{sqlt_datatype}};
- }
- else {
- undef; # always push something at this position
- }
- }
- }
+ exists $_->{dbd_attrs} ? $_->{dbd_attrs}
- return \@attrs;
+ : ! $_->{sqlt_datatype} ? undef
+
+ : do {
+
+ # cache the result in the dbh_details hash, as it (usually) can not change
+ # unless we connect to something else
+ # FIXME: for the time being Oracle is an exception, pending a rewrite of
+ # the LOB storage
+ my $cache = $_[0]->_dbh_details->{_datatype_map_cache} ||= {};
+
+ $cache->{$_->{sqlt_datatype}} = $_[0]->bind_attribute_by_data_type($_->{sqlt_datatype})
+ if ! exists $cache->{$_->{sqlt_datatype}};
+
+ $cache->{$_->{sqlt_datatype}};
+
+ } } map { $_->[0] } @{$_[2]} ];
}
sub _execute {
sub _dbh_columns_info_for {
my ($self, $dbh, $table) = @_;
- if ($dbh->can('column_info')) {
- my %result;
- my $caught;
+ my %result;
+
+ if (! DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE and $dbh->can('column_info')) {
try {
my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table);
my $sth = $dbh->column_info( undef,$schema, $tab, '%' );
$result{$col_name} = \%column_info;
}
} catch {
- $caught = 1;
+ %result = ();
};
- return \%result if !$caught && scalar keys %result;
+
+ return \%result if keys %result;
}
- my %result;
my $sth = $dbh->prepare($self->sql_maker->select($table, undef, \'1 = 0'));
$sth->execute;
- my @columns = @{$sth->{NAME_lc}};
- for my $i ( 0 .. $#columns ){
- my %column_info;
- $column_info{data_type} = $sth->{TYPE}->[$i];
- $column_info{size} = $sth->{PRECISION}->[$i];
- $column_info{is_nullable} = $sth->{NULLABLE}->[$i] ? 1 : 0;
-
- if ($column_info{data_type} =~ m/^(.*?)\((.*?)\)$/) {
- $column_info{data_type} = $1;
- $column_info{size} = $2;
+
+### The acrobatics with lc names is necessary to support both the legacy
+### API that used NAME_lc exclusively, *AND* at the same time work properly
+### with column names differing in cas eonly (thanks pg!)
+
+ my ($columns, $seen_lcs);
+
+ ++$seen_lcs->{lc($_)} and $columns->{$_} = {
+ idx => scalar keys %$columns,
+ name => $_,
+ lc_name => lc($_),
+ } for @{$sth->{NAME}};
+
+ $seen_lcs->{$_->{lc_name}} == 1
+ and
+ $_->{name} = $_->{lc_name}
+ for values %$columns;
+
+ for ( values %$columns ) {
+ my $inf = {
+ data_type => $sth->{TYPE}->[$_->{idx}],
+ size => $sth->{PRECISION}->[$_->{idx}],
+ is_nullable => $sth->{NULLABLE}->[$_->{idx}] ? 1 : 0,
+ };
+
+ if ($inf->{data_type} =~ m/^(.*?)\((.*?)\)$/) {
+ @{$inf}{qw( data_type size)} = ($1, $2);
}
- $result{$columns[$i]} = \%column_info;
+ $result{$_->{name}} = $inf;
}
+
$sth->finish;
- foreach my $col (keys %result) {
- my $colinfo = $result{$col};
- my $type_num = $colinfo->{data_type};
- my $type_name;
- if(defined $type_num && $dbh->can('type_info')) {
- my $type_info = $dbh->type_info($type_num);
- $type_name = $type_info->{TYPE_NAME} if $type_info;
- $colinfo->{data_type} = $type_name if $type_name;
+ if ($dbh->can('type_info')) {
+ for my $inf (values %result) {
+ next if ! defined $inf->{data_type};
+
+ $inf->{data_type} = (
+ (
+ (
+ $dbh->type_info( $inf->{data_type} )
+ ||
+ next
+ )
+ ||
+ next
+ )->{TYPE_NAME}
+ ||
+ next
+ );
+
+ # FIXME - this may be an artifact of the DBD::Pg implmentation alone
+ # needs more testing in the future...
+ $inf->{size} -= 4 if (
+ ( $inf->{size}||0 > 4 )
+ and
+ $inf->{data_type} =~ qr/^text$/i
+ );
}
+
}
return \%result;
%{$sqltargs || {}}
};
- unless (DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')) {
- $self->throw_exception("Can't create a ddl file without " . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy') );
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')) {
+ $self->throw_exception("Can't create a ddl file without $missing");
}
my $sqlt = SQL::Translator->new( $sqltargs );
return join('', @rows);
}
- unless (DBIx::Class::Optional::Dependencies->req_ok_for ('deploy') ) {
- $self->throw_exception("Can't deploy without a ddl_dir or " . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy') );
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('deploy') ) {
+ $self->throw_exception("Can't deploy without a pregenerated 'ddl_dir' directory or $missing");
}
# sources needs to be a parser arg, but for simplicity allow at top level
my $attrs = $self->next::method(@_);
- foreach my $attr (@$attrs) {
- $attr->{ado_size} ||= 8000 if $attr;
- }
+ # The next::method above caches the returned hashrefs in a _dbh related
+ # structure. It is safe for us to modify it in this manner, as the default
+ # does not really change (albeit the entire logic is insane and is pending
+ # a datatype-objects rewrite)
+ $_ and $_->{ado_size} ||= 8000 for @$attrs;
return $attrs;
}
my $attrs = $self->next::method($ident, $bind);
- for my $i (0 .. $#$attrs) {
- if (keys %{$attrs->[$i]||{}} and my $col = $bind->[$i][0]{dbic_colname}) {
- $attrs->[$i] = { %{$attrs->[$i]}, ora_field => $col };
- }
- }
+ # Push the column name into all bind attrs, make sure to *NOT* write into
+ # the existing $attrs->[$idx]{..} hashref, as it is cached by the call to
+ # next::method above.
+ # FIXME - this code will go away when the LobWriter refactor lands
+ $attrs->[$_]
+ and
+ keys %{ $attrs->[$_] }
+ and
+ $bind->[$_][0]{dbic_colname}
+ and
+ $attrs->[$_] = { %{$attrs->[$_]}, ora_field => $bind->[$_][0]{dbic_colname} }
+ for 0 .. $#$attrs;
$attrs;
}
package DBIx::Class::Storage::DBI::Replicated;
+use warnings;
+use strict;
+
BEGIN {
- use DBIx::Class;
- die('The following modules are required for Replication ' . DBIx::Class::Optional::Dependencies->req_missing_for ('replicated') . "\n" )
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('replicated');
+ require DBIx::Class::Optional::Dependencies;
+ if ( my $missing = DBIx::Class::Optional::Dependencies->req_missing_for('replicated') ) {
+ die "The following modules are required for Replicated storage support: $missing\n";
+ }
}
use Moose;
sub build_datetime_parser {
my $self = shift;
- my $type = "DateTime::Format::Strptime";
try {
- eval "require ${type}"
+ require DateTime::Format::Strptime;
}
catch {
- $self->throw_exception("Couldn't load ${type}: $_");
+ $self->throw_exception("Couldn't load DateTime::Format::Strptime: $_");
};
- return $type->new( pattern => '%Y-%m-%d %H:%M:%S.%6N' );
+ return DateTime::Format::Strptime->new( pattern => '%Y-%m-%d %H:%M:%S.%6N' );
}
=head2 connect_call_datetime_setup
use Scalar::Util 'blessed';
use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize);
use SQL::Abstract qw(is_plain_value is_literal_value);
+use DBIx::Class::Carp;
use namespace::clean;
#
my $chunk = shift @pieces;
if (ref $chunk eq 'HASH') {
- push @pairs, map { $_ => $chunk->{$_} } sort keys %$chunk;
+ for (sort keys %$chunk) {
+
+ # Match SQLA 1.79 behavior
+ if ($_ eq '') {
+ is_literal_value($chunk->{$_})
+ ? carp 'Hash-pairs consisting of an empty string with a literal are deprecated, use -and => [ $literal ] instead'
+ : $self->throw_exception("Supplying an empty left hand side argument is not supported in hash-pairs")
+ ;
+ }
+
+ push @pairs, $_ => $chunk->{$_};
+ }
}
elsif (ref $chunk eq 'ARRAY') {
push @pairs, -or => $chunk
if @$chunk;
}
elsif ( ! length ref $chunk) {
+
+ # Match SQLA 1.79 behavior
+ $self->throw_exception("Supplying an empty left hand side argument is not supported in array-pairs")
+ if $where_is_anded_array and (! defined $chunk or $chunk eq '');
+
push @pairs, $chunk, shift @pieces;
}
else {
for (my $i = 0; $i <= $#$where; $i++ ) {
+ # Match SQLA 1.79 behavior
+ $self->throw_exception(
+ "Supplying an empty left hand side argument is not supported in array-pairs"
+ ) if (! defined $where->[$i] or ! length $where->[$i]);
+
my $logic_mod = lc ( ($where->[$i] =~ /^(\-(?:and|or))$/i)[0] || '' );
if ($logic_mod) {
my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] })
or next;
- $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt;
+ my @keys = keys %$sub_elt;
+ if ( @keys == 1 and $keys[0] !~ /^\-/ ) {
+ $fin_idx->{ "COL_$keys[0]_" . serialize $sub_elt } = $sub_elt;
+ }
+ else {
+ $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt;
+ }
}
elsif (! length ref $where->[$i] ) {
my $sub_elt = $self->_collapse_cond({ @{$where}[$i, $i+1] })
ASSERT_NO_INTERNAL_INDIRECT_CALLS => $ENV{DBIC_ASSERT_NO_INTERNAL_INDIRECT_CALLS} ? 1 : 0,
+ STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE => $ENV{DBIC_STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE} ? 1 : 0,
+
+ STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE => $ENV{DBIC_STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE} ? 1 : 0,
+
IV_SIZE => $Config{ivsize},
OS_NAME => $^O,
# Carp::Skip to the rescue soon
use DBIx::Class::Carp '^DBIx::Class|^DBICTest';
+use B ();
use Carp 'croak';
+use Storable 'nfreeze';
use Scalar::Util qw(weaken blessed reftype);
use List::Util qw(first);
sub qsub ($) { goto "e_sub } # no point depping on new Moo just for this
# END pre-Moo2 import block
+# Already correctly prototyped: perlbrew exec perl -MStorable -e 'warn prototype \&Storable::dclone'
+BEGIN { *deep_clone = \&Storable::dclone }
+
use base 'Exporter';
our @EXPORT_OK = qw(
- sigwarn_silencer modver_gt_or_eq
+ sigwarn_silencer modver_gt_or_eq modver_gt_or_eq_and_lt
fail_on_internal_wantarray fail_on_internal_call
refdesc refcount hrefaddr is_exception
- quote_sub qsub perlstring serialize
+ quote_sub qsub perlstring serialize deep_clone
UNRESOLVABLE_CONDITION
);
sub refcount ($) {
croak "Expecting a reference" if ! length ref $_[0];
- require B;
# No tempvars - must operate on $_[0], otherwise the pad
# will count as an extra ref
B::svref_2object($_[0])->REFCNT;
}
sub serialize ($) {
- require Storable;
local $Storable::canonical = 1;
- Storable::nfreeze($_[0]);
+ nfreeze($_[0]);
}
sub is_exception ($) {
eval { $mod->VERSION($ver) } ? 1 : 0;
}
+sub modver_gt_or_eq_and_lt ($$$) {
+ my ($mod, $v_ge, $v_lt) = @_;
+
+ croak "Nonsensical maximum version supplied"
+ if ! defined $v_lt or $v_lt =~ /[^0-9\.\_]/;
+
+ return (
+ modver_gt_or_eq($mod, $v_ge)
+ and
+ ! modver_gt_or_eq($mod, $v_lt)
+ ) ? 1 : 0;
+}
+
{
my $list_ctx_ok_stack_marker;
use SQL::Translator::Utils qw(debug normalize_name);
use DBIx::Class::Carp qw/^SQL::Translator|^DBIx::Class|^Try::Tiny/;
use DBIx::Class::Exception;
+use Class::C3::Componentised;
use Scalar::Util 'blessed';
use Try::Tiny;
use namespace::clean;
DBIx::Class::Exception->throw('No DBIx::Class::Schema') unless ($dbicschema);
if (!ref $dbicschema) {
- eval "require $dbicschema"
- or DBIx::Class::Exception->throw("Can't load $dbicschema: $@");
+ try {
+ Class::C3::Componentised->ensure_class_loaded($dbicschema)
+ } catch {
+ DBIx::Class::Exception->throw("Can't load $dbicschema: $_");
+ }
}
if (
EOW
require DBIx::Class::Optional::Dependencies;
- my %reqs_for_group = %{DBIx::Class::Optional::Dependencies->req_group_list};
# exclude the rdbms_* groups which are for DBIC users
- $opt_testdeps = {
- map { %{$reqs_for_group{$_}} } grep { !/^rdbms_|^dist_/ } keys %reqs_for_group
- };
+ # and the moose-related stuff iff we are under 5.8.3
+ $opt_testdeps = DBIx::Class::Optional::Dependencies->req_list_for([
+ grep {
+ !/^rdbms_|^dist_/
+ and
+ ($] > 5.008002 or !/^ (?: test_ )? (?: admin | admin_script | replicated ) $/x )
+ } keys %{DBIx::Class::Optional::Dependencies->req_group_list}
+ ]);
+
+ # this one is "special" - we need it both in optdeps and as a hard dep
+ delete $opt_testdeps->{'DBD::SQLite'};
print "Including all optional deps\n";
$reqs->{test_requires} = {
package MY;
sub distdir {
(my $snippet = shift->SUPER::distdir(@_)) =~ s/^create_distdir :/create_distdir_copy_manifested :/;
+ no warnings 'qw';
return <<"EOM";
$snippet
check_create_distdir_prereqs :
\t\$(NOECHO) @{[
- $mm_proto->oneliner("DBIx::Class::Optional::Dependencies->die_unless_req_ok_for(q(dist_dir))", [qw/-Ilib -MDBIx::Class::Optional::Dependencies/])
+ $mm_proto->oneliner("1", [qw( -Ilib -MDBIx::Class::Optional::Dependencies=-die_without,dist_dir )])
]}
EOM
sub postamble {
my $snippet = shift->SUPER::postamble(@_);
+ no warnings 'qw';
return <<"EOM";
$snippet
check_upload_dist_prereqs :
\t\$(NOECHO) @{[
- $mm_proto->oneliner("DBIx::Class::Optional::Dependencies->die_unless_req_ok_for(q(dist_upload))", [qw/-Ilib -MDBIx::Class::Optional::Dependencies/])
+ $mm_proto->oneliner("1", [qw( -Ilib -MDBIx::Class::Optional::Dependencies=-die_without,dist_upload )])
]}
EOM
#!/bin/bash
-source maint/travis-ci_scripts/common.bash
+# Stop pre-started RDBMS and sync for some settle time
+run_or_err "Stopping MySQL" "sudo /etc/init.d/mysql stop"
+run_or_err "Stopping PostgreSQL" "sudo /etc/init.d/postgresql stop"
+/bin/sync
+
+# Sanity check VM before continuing
+echo "
+=============================================================================
+
+= Startup Meminfo
+$(free -m -t)
+
+============================================================================="
+
+CI_VM_MIN_FREE_MB=2000
+if [[ "$(free -m | grep 'buffers/cache:' | perl -p -e '$_ = (split /\s+/, $_)[3]')" -lt "$CI_VM_MIN_FREE_MB" ]]; then
+ SHORT_CIRCUIT_SMOKE=1
+ echo_err "
+=============================================================================
+
+CI virtual machine stuck in a state with a lot of memory locked for no reason.
+Under Travis this state usually results in a failed build.
+Short-circuiting buildjob to avoid false negatives, please restart it manually.
+
+============================================================================="
+fi
+
if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
# Different boxes we run on may have different amount of hw threads
# The oneliner is a tad convoluted - basicaly what we do is
# slurp the entire file and get the index off the last
# `processor : XX` line
-export NUMTHREADS="$(( ( $(perl -0777 -n -e 'print (/ (?: .+ ^ processor \s+ : \s+ (\d+) ) (?! ^ processor ) /smx)' < /proc/cpuinfo) + 1 ) / 2 ))"
+#
+# We also divide the result by a factor, otherwise the travis VM gets
+# overloaded (the amount of available swap is just TOOOO damn small)
+if [[ -z "$NUMTHREADS" ]] ; then
+ export NUMTHREADS="$(( ( $(perl -0777 -n -e 'print (/ (?: .+ ^ processor \s+ : \s+ (\d+) ) (?! ^ processor ) /smx)' < /proc/cpuinfo) + 1 ) / 3 ))"
+fi
export CACHE_DIR="/tmp/poormanscache"
-# install some common tools from APT, more below unless CLEANTEST
-apt_install libapp-nopaste-perl tree apt-transport-https
+# these will be installed no matter what, also some extras unless CLEANTEST
+common_packages="libapp-nopaste-perl tree"
-# FIXME - the debian package is oddly broken - uses a bin/env based shebang
-# so nothing works under a brew. Fix here until #debian-perl patches it up
-sudo /usr/bin/perl -p -i -e 's|#!/usr/bin/env perl|#!/usr/bin/perl|' $(which nopaste)
+if [[ "$CLEANTEST" = "true" ]]; then
+
+ apt_install $common_packages
+
+else
-if [[ "$CLEANTEST" != "true" ]]; then
-### apt-get invocation - faster to grab everything at once
#
# FIXME these debconf lines should automate the firebird config but do not :(((
sudo bash -c 'echo -e "firebird2.5-super\tshared/firebird/enabled\tboolean\ttrue" | debconf-set-selections'
sudo bash -c 'echo -e "firebird2.5-super\tshared/firebird/sysdba_password/new_password\tpassword\t123" | debconf-set-selections'
- # add extra APT repo for Oracle
- # (https is critical - apt-get update can't seem to follow the 302)
- sudo bash -c 'echo -e "\ndeb [arch=i386] https://oss.oracle.com/debian unstable main non-free" >> /etc/apt/sources.list'
-
- run_or_err "Cloning poor man's cache from github" "git clone --depth=1 --branch=poor_mans_travis_cache https://github.com/ribasushi/travis_futzing.git $CACHE_DIR && $CACHE_DIR/reassemble"
-
- run_or_err "Priming up the APT cache with $(echo $(ls -d $CACHE_DIR/apt_cache/*.deb))" "sudo cp $CACHE_DIR/apt_cache/*.deb /var/cache/apt/archives"
+ apt_install $common_packages libmysqlclient-dev memcached firebird2.5-super firebird2.5-dev unixodbc-dev expect
- apt_install libmysqlclient-dev memcached firebird2.5-super firebird2.5-dev unixodbc-dev expect oracle-xe
+ run_or_err "Cloning poor man's cache from github" "git clone --depth=1 --single-branch --branch=oracle/10.2.0 https://github.com/poortravis/poormanscache.git $CACHE_DIR && $CACHE_DIR/reassemble"
+ run_or_err "Installing OracleXE manually from deb" "sudo dpkg -i $CACHE_DIR/apt_cache/oracle-xe_10.2.0.1-1.1_i386.deb || sudo bash -c 'source maint/travis-ci_scripts/common.bash && apt_install -f'"
### config memcached
run_or_err "Starting memcached" "sudo /etc/init.d/memcached start"
export DBICTEST_MEMCACHED=127.0.0.1:11211
### config mysql
- run_or_err "Restarting MySQL" "sudo /etc/init.d/mysql restart"
+ run_or_err "Installing minimizing MySQL config" "sudo cp maint/travis-ci_scripts/configs/minimal_mysql_travis.cnf /etc/mysql/conf.d/ && sudo chmod 644 /etc/mysql/conf.d/*.cnf"
+ run_or_err "Starting MySQL" "sudo /etc/init.d/mysql start"
run_or_err "Creating MySQL TestDB" "mysql -e 'create database dbic_test;'"
export DBICTEST_MYSQL_DSN='dbi:mysql:database=dbic_test;host=127.0.0.1'
export DBICTEST_MYSQL_USER=root
### config pg
- run_or_err "Restarting PostgreSQL" "sudo /etc/init.d/postgresql restart"
+ run_or_err "Starting PostgreSQL" "sudo /etc/init.d/postgresql start"
run_or_err "Creating PostgreSQL TestDB" "psql -c 'create database dbic_test;' -U postgres"
export DBICTEST_PG_DSN='dbi:Pg:database=dbic_test;host=127.0.0.1'
export DBICTEST_PG_USER=postgres
"echo \"CREATE DATABASE '/var/lib/firebird/2.5/data/dbic_test.fdb';\" | sudo isql-fb -u sysdba -p 123"
then
- run_or_err "Fetching and building Firebird ODBC driver" '
- cd "$(mktemp -d)"
- wget -qO- http://sourceforge.net/projects/firebird/files/firebird-ODBC-driver/2.0.2-Release/OdbcFb-Source-2.0.2.153.gz/download | tar -zx
- cd Builds/Gcc.lin
- perl -p -i -e "s|/usr/lib64|/usr/lib/x86_64-linux-gnu|g" ../makefile.environ
- make -f makefile.linux
- sudo make -f makefile.linux install
- '
-
- sudo bash -c 'cat >> /etc/odbcinst.ini' <<< "
-[Firebird]
-Description = InterBase/Firebird ODBC Driver
-Driver = /usr/lib/x86_64-linux-gnu/libOdbcFb.so
-Setup = /usr/lib/x86_64-linux-gnu/libOdbcFb.so
-Threading = 1
-FileUsage = 1
-"
-
export DBICTEST_FIREBIRD_DSN=dbi:Firebird:dbname=/var/lib/firebird/2.5/data/dbic_test.fdb
export DBICTEST_FIREBIRD_USER=SYSDBA
export DBICTEST_FIREBIRD_PASS=123
export DBICTEST_FIREBIRD_INTERBASE_USER=SYSDBA
export DBICTEST_FIREBIRD_INTERBASE_PASS=123
- export DBICTEST_FIREBIRD_ODBC_DSN="dbi:ODBC:Driver=Firebird;Dbname=/var/lib/firebird/2.5/data/dbic_test.fdb"
- export DBICTEST_FIREBIRD_ODBC_USER=SYSDBA
- export DBICTEST_FIREBIRD_ODBC_PASS=123
-
break
fi
export ORACLE_HOME="$CACHE_DIR/ora_instaclient/x86-64/oracle_instaclient_10.2.0.5.0"
fi
+
+# The debian package is oddly broken - uses a /bin/env based shebang
+# so nothing works under a brew, fixed in libapp-nopaste-perl 0.92-3
+# http://changelogs.ubuntu.com/changelogs/pool/universe/liba/libapp-nopaste-perl/libapp-nopaste-perl_0.96-1/changelog
+#
+# Since the vm runs an old version of umbongo fix things ourselves
+sudo /usr/bin/perl -p -i -e 's|#!/usr/bin/env perl|#!/usr/bin/perl|' $(which nopaste)
#!/bin/bash
-source maint/travis-ci_scripts/common.bash
if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-CPAN_MIRROR=$(echo "$PERL_CPANM_OPT" | grep -oP -- '--mirror\s+\S+' | head -n 1 | cut -d ' ' -f 2)
-if ! [[ "$CPAN_MIRROR" =~ "http://" ]] ; then
- echo_err "Unable to extract primary cpan mirror from PERL_CPANM_OPT - something is wrong"
- echo_err "PERL_CPANM_OPT: $PERL_CPANM_OPT"
- CPAN_MIRROR="http://cpan.metacpan.org/"
- PERL_CPANM_OPT="$PERL_CPANM_OPT --mirror $CPAN_MIRROR"
- echo_err "Using $CPAN_MIRROR for the time being"
-fi
+# we need a mirror that both has the standard index and a backpan version rolled
+# into one, due to MDV testing
+CPAN_MIRROR="http://cpan.metacpan.org/"
+
+PERL_CPANM_OPT="$PERL_CPANM_OPT --mirror $CPAN_MIRROR"
# do not set PERLBREW_CPAN_MIRROR - not all backpan-like mirrors have the perl tarballs
export PERL_MM_USE_DEFAULT=1 PERL_MM_NONINTERACTIVE=1 PERL_AUTOINSTALL_PREFER_CPAN=1 HARNESS_TIMER=1 MAKEFLAGS="-j$NUMTHREADS"
PERL_CPANM_OPT="$PERL_CPANM_OPT --dev"
- # FIXME inline-upgrade cpanm, work around https://github.com/travis-ci/travis-ci/issues/1477
- cpanm_loc="$(which cpanm)"
- run_or_err "Upgrading cpanm ($cpanm_loc) to latest stable" \
- "wget -q -O $cpanm_loc cpanmin.us && chmod a+x $cpanm_loc"
fi
# Fixup CPANM_OPT to behave more like a traditional cpan client
# the presently installed libs
# Idea stolen from
# https://github.com/kentfredric/Dist-Zilla-Plugin-Prereqs-MatchInstalled-All/blob/master/maint-travis-ci/sterilize_env.pl
-elif [[ "$CLEANTEST" == "true" ]] && [[ "$POISON_ENV" != "true" ]] ; then
+# Only works on 5.12+ (where sitelib was finally properly fixed)
+elif [[ "$CLEANTEST" == "true" ]] && [[ "$POISON_ENV" != "true" ]] && perl -M5.012 -e 1 &>/dev/null ; then
echo_err "$(tstamp) Cleaning precompiled Travis-Perl"
- perl -MConfig -MFile::Find -e '
+ perl -M5.012 -MConfig -MFile::Find -e '
my $sitedirs = {
map { $Config{$_} => 1 }
grep { $_ =~ /site(lib|arch)exp$/ }
"
run_or_err "Configuring CPAN.pm" "perl -e '$CPAN_CFG_SCRIPT'"
-echo_err "
-===================== PERL CONFIGURATION COMPLETE =====================
-
-= CPUinfo
-$(perl -0777 -p -e 's/.+\n\n(?!\z)//s' < /proc/cpuinfo)
-
-= Meminfo
-$(free -m -t)
+# poison the environment
+if [[ "$POISON_ENV" = "true" ]] ; then
+
+ # in addition to making sure tests do not rely on implicid order of
+ # returned results, look through lib, find all mentioned ENVvars and
+ # set them to true and see if anything explodes
+ for var in \
+ DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER \
+ $( grep -P '\$ENV\{' -r lib/ --exclude-dir Optional | grep -oP '\bDBIC\w+' | sort -u | grep -vP '^(DBIC_TRACE(_PROFILE)?|DBIC_.+_DEBUG)$' )
+ do
+ if [[ -z "${!var}" ]] ; then
+ export $var=1
+ echo "POISON_ENV: setting $var to 1"
+ fi
+ done
+
+ # bogus nonexisting DBI_*
+ export DBI_DSN="dbi:ODBC:server=NonexistentServerAddress"
+ export DBI_DRIVER="ADO"
+
+ # some people do in fact set this - boggle!!!
+ # it of course won't work before 5.8.4
+ if perl -M5.008004 -e 1 &>/dev/null ; then
+ export PERL_STRICTURES_EXTRA=1
+ fi
-= Diskinfo
-$(sudo df -h)
+ # emulate a local::lib-like env
+ # trick cpanm into executing true as shell - we just need the find+unpack
+ run_or_err "Downloading latest stable DBIC from CPAN" \
+ "SHELL=/bin/true cpanm --look DBIx::Class"
-$(mount | grep '^/')
+ export PERL5LIB="$( ls -d ~/.cpanm/latest-build/DBIx-Class-*/lib | tail -n1 ):$PERL5LIB"
-= Kernel info
-$(uname -a)
+ # perldoc -l <mod> searches $(pwd)/lib in addition to PERL5LIB etc, hence the cd /
+ echo_err "Latest stable DBIC (without deps) locatable via \$PERL5LIB at $(cd / && perldoc -l DBIx::Class)"
-= Network Configuration
-$(ip addr)
+fi
-= Network Sockets Status
-$(sudo netstat -an46p | grep -Pv '\s(CLOSING|(FIN|TIME|CLOSE)_WAIT.?|LAST_ACK)\s')
+if [[ "$CLEANTEST" != "true" ]] ; then
+ # using SQLT if will be available
+ # not doing later because we will be running in a subshell
+ export DBICTEST_SQLT_DEPLOY=1
-= Processlist
-$(sudo ps fuxa)
-============================================================================="
+fi
#!/bin/bash
+# this file is executed in a subshell - set up the common stuff
source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-# poison the environment
-if [[ "$POISON_ENV" = "true" ]] ; then
-
- # in addition to making sure tests do not rely on implicid order of
- # returned results, look through lib, find all mentioned ENVvars and
- # set them to true and see if anything explodes
- for var in \
- DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER \
- $(grep -P '\$ENV\{' -r lib/ --exclude-dir Optional | grep -oP '\bDBIC\w+' | sort -u | grep -v DBIC_TRACE)
- do
- if [[ -z "${!var}" ]] ; then
- export $var=1
- echo "POISON_ENV: setting $var to 1"
- fi
- done
-
- # bogus nonexisting DBI_*
- export DBI_DSN="dbi:ODBC:server=NonexistentServerAddress"
- export DBI_DRIVER="ADO"
-
- # some people do in fact set this - boggle!!!
- export PERL_STRICTURES_EXTRA=1
+if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then exit 0 ; fi
- # emulate a local::lib-like env
- # trick cpanm into executing true as shell - we just need the find+unpack
- run_or_err "Downloading latest stable DBIC from CPAN" \
- "SHELL=/bin/true cpanm --look DBIx::Class"
-
- export PERL5LIB="$( ls -d ~/.cpanm/latest-build/DBIx-Class-*/lib | tail -n1 ):$PERL5LIB"
+# The prereq-install stage will not work with both POISON and DEVREL
+# DEVREL wins
+if [[ "$DEVREL_DEPS" = "true" ]] ; then
+ export POISON_ENV=""
+fi
- # perldoc -l <mod> searches $(pwd)/lib in addition to PERL5LIB etc, hence the cd /
- echo_err "Latest stable DBIC (without deps) locatable via \$PERL5LIB at $(cd / && perldoc -l DBIx::Class)"
+# FIXME - this is a kludge in place of proper MDV testing. For the time
+# being simply use the minimum versions of our DBI/DBDstack, to avoid
+# fuckups like 0.08260 (went unnoticed for 5 months)
+if [[ "$POISON_ENV" = "true" ]] ; then
- # FIXME - this is a kludge in place of proper MDV testing. For the time
- # being simply use the minimum versions of our DBI/DBDstack, to avoid
- # fuckups like 0.08260 (went unnoticed for 5 months)
- #
# use url-spec for DBI due to https://github.com/miyagawa/cpanminus/issues/328
- if perl -M5.013003 -e1 &>/dev/null ; then
+ if [[ "$CLEANTEST" != "true" ]] || perl -M5.013003 -e1 &>/dev/null ; then
+ # the fulltest may re-upgrade DBI, be conservative only on cleantests
# earlier DBI will not compile without PERL_POLLUTE which was gone in 5.14
parallel_installdeps_notest T/TI/TIMB/DBI-1.614.tar.gz
else
fi
# Test both minimum DBD::SQLite and minimum BigInt SQLite
+ # reverse the logic from above for this (low on full, higher on clean)
if [[ "$CLEANTEST" = "true" ]]; then
parallel_installdeps_notest DBD::SQLite@1.37
else
# So instead we still use our stock (possibly old) CPAN, and add some
# handholding
- if [[ "$DEVREL_DEPS" == "true" ]] ; then
- # We are not "quite ready" for SQLA 1.99, do not consider it
- #
- installdeps 'SQL::Abstract~<1.99'
-
- elif ! CPAN_is_sane ; then
+ if [[ "$DEVREL_DEPS" != "true" ]] && ! CPAN_is_sane ; then
# no configure_requires - we will need the usual suspects anyway
# without pre-installing these in one pass things like extract_prereqs won't work
installdeps ExtUtils::MakeMaker ExtUtils::CBuilder Module::Build
-
fi
else
# we will be running all dbic tests - preinstall lots of stuff, run basic tests
- # using SQLT and set up whatever databases necessary
- export DBICTEST_SQLT_DEPLOY=1
# do the preinstall in several passes to minimize amount of cross-deps installing
# multiple times, and to avoid module re-architecture breaking another install
parallel_installdeps_notest Test::Exception Encode::Locale Test::Fatal
parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status
parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities
- parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant
+ parallel_installdeps_notest YAML LWP Class::Trigger DateTime::Format::Builder Class::Accessor::Grouped Package::Variant
parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which
+ # the official version is very much outdated and does not compile on 5.14+
+ # use this rather updated source tree (needs to go to PAUSE):
+ # https://github.com/pilcrow/perl-dbd-interbase
if [[ -n "$DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then
- # the official version is very much outdated and does not compile on 5.14+
- # use this rather updated source tree (needs to go to PAUSE):
- # https://github.com/pilcrow/perl-dbd-interbase
parallel_installdeps_notest git://github.com/dbsrgits/perl-dbd-interbase.git
fi
# install (remaining) dependencies, sometimes with a gentle push
if [[ "$CLEANTEST" = "true" ]]; then
- # we may need to prepend some stuff to that list
- HARD_DEPS="$(echo $(make listdeps))"
-##### TEMPORARY WORKAROUNDS needed in case we will be using CPAN.pm
- if [[ "$DEVREL_DEPS" != "true" ]] && ! CPAN_is_sane ; then
- # combat dzillirium on harness-wide level, otherwise breakage happens weekly
- echo_err "$(tstamp) Ancient CPAN.pm: engaging TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests during dep install"
- perl -MTAP::Harness\ 3.18 -e1 &>/dev/null || run_or_err "Upgrading TAP::Harness for HARNESS_SUBCLASS support" "cpan TAP::Harness"
- export PERL5LIB="$(pwd)/maint/travis-ci_scripts/lib:$PERL5LIB"
- export HARNESS_SUBCLASS="TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests"
- # sanity check, T::H does not report sensible errors when the subclass fails to load
- perl -MTAP::Harness::IgnoreNonessentialDzilAutogeneratedTests -e1
-
- # DBD::SQLite reasonably wants DBI at config time
- perl -MDBI -e1 &>/dev/null || HARD_DEPS="DBI $HARD_DEPS"
-
- # this is a fucked CPAN - won't understand configure_requires of
- # various pieces we may run into
- # FIXME - need to get these off metacpan or something instead
- HARD_DEPS="ExtUtils::Depends B::Hooks::OP::Check $HARD_DEPS"
-
- # FIXME
- # parent is temporary due to Carp https://rt.cpan.org/Ticket/Display.html?id=88494
- HARD_DEPS="parent $HARD_DEPS"
-
- if CPAN_supports_BUILDPL ; then
- # We will invoke a posibly MBT based BUILD-file, but we do not support
- # configure requires. So we not only need to install MBT but its prereqs
- # FIXME This is madness
- HARD_DEPS="$(extract_prereqs Module::Build::Tiny) Module::Build::Tiny $HARD_DEPS"
- else
- # FIXME
- # work around Params::Validate not having a Makefile.PL so really old
- # toolchains can not figure out what the prereqs are ;(
- # Need to do more research before filing a bug requesting Makefile inclusion
- HARD_DEPS="$(extract_prereqs Params::Validate) $HARD_DEPS"
- fi
- fi
-##### END TEMPORARY WORKAROUNDS
+ # we are doing a devrel pass - try to upgrade *everything* (we will be using cpanm so safe-ish)
+ if [[ "$DEVREL_DEPS" == "true" ]] ; then
- installdeps $HARD_DEPS
+ HARD_DEPS="$(echo $(make listalldeps))"
+
+ else
+
+ HARD_DEPS="$(echo $(make listdeps | sort -R))"
+
+##### TEMPORARY WORKAROUNDS needed in case we will be using a fucked CPAN.pm
+ if ! CPAN_is_sane ; then
+
+ # DBD::SQLite reasonably wants DBI at config time
+ perl -MDBI -e1 &>/dev/null || HARD_DEPS="DBI $HARD_DEPS"
+
+ # this is a fucked CPAN - won't understand configure_requires of
+ # various pieces we may run into
+ # FIXME - need to get these off metacpan or something instead
+ HARD_DEPS="ExtUtils::Depends B::Hooks::OP::Check $HARD_DEPS"
+
+ if CPAN_supports_BUILDPL ; then
+ # We will invoke a posibly MBT based BUILD-file, but we do not support
+ # configure requires. So we not only need to install MBT but its prereqs
+ # FIXME This is madness
+ HARD_DEPS="$(extract_prereqs Module::Build::Tiny) Module::Build::Tiny $HARD_DEPS"
+ else
+ # FIXME
+ # work around Params::Validate not having a Makefile.PL so really old
+ # toolchains can not figure out what the prereqs are ;(
+ # Need to do more research before filing a bug requesting Makefile inclusion
+ HARD_DEPS="$(extract_prereqs Params::Validate) $HARD_DEPS"
+ fi
-### FIXME in case we set it earlier in a workaround
- if [[ -n "$HARNESS_SUBCLASS" ]] ; then
-
- INSTALLDEPS_SKIPPED_TESTLIST=$(perl -0777 -e '
-my $curmod_re = qr{
-^
- (?:
- \QBuilding and testing\E
- |
- [\x20\t]* CPAN\.pm: [^\n]*? (?i:build)\S*
- )
-
- [\x20\t]+ (\S+)
-$}mx;
-
-my $curskip_re = qr{^ === \x20 \QSkipping nonessential autogenerated tests: \E([^\n]+) }mx;
-
-my (undef, @chunks) = (split qr/$curmod_re/, <>);
-while (@chunks) {
- my ($mod, $log) = splice @chunks, 0, 2;
- print "!!! Skipped nonessential tests while installing $mod:\n\t$1\n"
- if $log =~ $curskip_re;
-}
-' <<< "$LASTOUT")
-
- if [[ -n "$INSTALLDEPS_SKIPPED_TESTLIST" ]] ; then
- POSTMORTEM="$POSTMORTEM$(
- echo
- echo "The following non-essential tests were skipped during deps installation"
- echo "============================================================="
- echo "$INSTALLDEPS_SKIPPED_TESTLIST"
- echo "============================================================="
- echo
- )"
fi
- unset HARNESS_SUBCLASS
+##### END TEMPORARY WORKAROUNDS
fi
+ installdeps $HARD_DEPS
+
else
+
parallel_installdeps_notest "$(make listdeps)"
+
fi
echo_err "$(tstamp) Dependency installation finished"
===================== DEPENDENCY CONFIGURATION COMPLETE =====================
$(tstamp) Configuration phase seems to have taken $(date -ud "@$SECONDS" '+%H:%M:%S') (@$SECONDS)
-= Meminfo
-$(free -m -t)
-
-= Diskinfo
-$(sudo df -h)
-
-$(mount | grep '^/')
-
-= Environment
-$(env | grep -P 'TEST|HARNESS|MAKE|TRAVIS|PERL|DBIC' | LC_ALL=C sort | cat -v)
-
-= Perl in use
-$(perl -V)
-============================================================================="
+$(ci_vm_state_text)"
#!/bin/bash
+# this file is executed in a subshell - set up the common stuff
source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
+
+if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then exit 0 ; fi
run_harness_tests() {
local -x HARNESS_OPTIONS=c:j$NUMTHREADS
fi
TEST_T1=$SECONDS
-if [[ -z "$DBICTRACE" ]] && [[ -z "$POISON_ENV" ]] && [[ -s "$TEST_STDERR_LOG" ]] ; then
+if [[ -z "$DBIC_TRACE" ]] && [[ -z "$DBIC_MULTICREATE_DEBUG" ]] && [[ -s "$TEST_STDERR_LOG" ]] ; then
STDERR_LOG_SIZE=$(wc -l < "$TEST_STDERR_LOG")
# prepend STDERR log
#!/bin/bash
-# !!! Nothing here will be executed !!!
-# The source-line calling this script is commented out in .travis.yml
-
+# this file is executed in a subshell - set up the common stuff
source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-echo_err "Nothing to do"
+if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then exit 0 ; fi
+
+echo_err "
+$(ci_vm_state_text)
-return 0
+=== dmesg ringbuffer
+$(sudo dmesg)"
#!/bin/bash
+# this file is executed in a subshell - set up the common stuff
source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
+
+if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then exit 0 ; fi
if [[ "$CLEANTEST" != "true" ]] ; then
- parallel_installdeps_notest $(perl -Ilib -MDBIx::Class -e 'print join " ", keys %{DBIx::Class::Optional::Dependencies->req_list_for("dist_dir")}')
+ parallel_installdeps_notest $(perl -Ilib -MDBIx::Class::Optional::Dependencies=-list_missing,dist_dir)
run_or_err "Attempt to build a dist with all prereqs present" "make dist"
echo "Contents of the resulting dist tarball:"
echo "==========================================="
# !!! Nothing here will be executed !!!
# The source-line calling this script is commented out in .travis.yml
+# this file is executed in a subshell - set up the common stuff
source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-echo_err "Nothing to do"
+if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then exit 0 ; fi
-return 0
+echo_err "Nothing to do"
#!/bin/bash
+# "autodie"
set -e
TEST_STDERR_LOG=/tmp/dbictest.stderr
-TIMEOUT_CMD="/usr/bin/timeout --kill-after=9.5m --signal=TERM 9m"
+TIMEOUT_CMD="/usr/bin/timeout --kill-after=16m --signal=TERM 15m"
echo_err() { echo "$@" 1>&2 ; }
tstamp() { echo -n "[$(date '+%H:%M:%S')]" ; }
+ci_vm_state_text() {
+ echo "
+========================== CI System information ============================
+
+= CPUinfo
+$(perl -0777 -p -e 's/.+\n\n(?!\z)//s' < /proc/cpuinfo)
+
+= Meminfo
+$(free -m -t)
+
+= Diskinfo
+$(sudo df -h)
+
+$(mount | grep '^/')
+
+= Kernel info
+$(uname -a)
+
+= Network Configuration
+$(ip addr)
+
+= Network Sockets Status
+$(sudo netstat -an46p | grep -Pv '\s(CLOSING|(FIN|TIME|CLOSE)_WAIT.?|LAST_ACK)\s')
+
+= Processlist
+$(sudo ps fuxa)
+
+= Environment
+$(env | grep -P 'TEST|HARNESS|MAKE|TRAVIS|PERL|DBIC' | LC_ALL=C sort | cat -v)
+
+= Perl in use
+$(perl -V)
+============================================================================="
+}
+
run_or_err() {
echo_err -n "$(tstamp) $1 ... "
# flatten
pkgs="$@"
- # Need to do this at every step, the sources list may very well have changed
- run_or_err "Updating APT available package list" "sudo apt-get update"
-
run_or_err "Installing Debian APT packages: $pkgs" "sudo apt-get install --allow-unauthenticated --no-install-recommends -y $pkgs"
}
if [[ -z "$@" ]] ; then return; fi
# one module spec per line
- MODLIST="$(printf '%s\n' "$@")"
+ MODLIST="$(printf '%s\n' "$@" | sort -R)"
# We want to trap the output of each process and serially append them to
# each other as opposed to just dumping a jumbled up mass-log that would
--- /dev/null
+[mysqld]
+
+thread_cache_size = 0
+
+# mysql >= 5.5.16
+#thread_pool_size = 1
+
+bulk_insert_buffer_size = 0
+read_buffer_size = 32K
+join_buffer_size = 128K
+sort_buffer_size = 128K
+table_definition_cache = 400
+
+performance_schema = 0
+
+query_cache_type = 0
+query_cache_size = 0
+
+innodb_use_sys_malloc = 1
+innodb_buffer_pool_size = 1M
+
+key_buffer_size = 64K
+myisam_sort_buffer_size = 128K
+++ /dev/null
-package TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests;
-
-use warnings;
-use strict;
-
-use base 'TAP::Harness';
-use File::Spec ();
-use IPC::Open3 'open3';
-use File::Temp ();
-use List::Util 'first';
-
-my $frivolous_test_map = {
-# Test based on the extremely dep-heavy, *prone to failures* Test::CheckDeps
-#
- qr|^t/00-check-deps.t$| => [
- qr|^\Q# this test was generated with Dist::Zilla::Plugin::Test::CheckDeps|m,
-
- # older non-annotated versions
- qr|use \s+ Test::CheckDeps .*? ^\Qcheck_dependencies('suggests')\E .*? \QBAIL_OUT("Missing dependencies") if !Test::More->builder->is_passing|smx,
- ],
-
-# "does everything compile" tests are useless by definition - this is what the
-# rest of the test suite is for
-#
- qr|^t/00-compile.t$| => [
- qr|^\Q# this test was generated with Dist::Zilla::Plugin::Test::Compile|m,
- ],
-
-# The report prereq test managed to become fatal as well
-#
- qr|^t/00-report-prereqs.t$| => [
- qr|^\Q# This test was generated by Dist::Zilla::Plugin::Test::ReportPrereqs|m,
- ],
-
-# Just future-proof the thing, catch anything autogened by dzil for a bit
- qr|^t/00-| => [
- qr|^\Q# This test was generated by Dist::Zilla::|m,
- ]
-};
-
-sub aggregate_tests {
- my ($self, $aggregate, @all_tests) = @_;
-
- my ($run_tests, $skip_tests);
-
- TESTFILE:
- for (@all_tests) {
- my $fn = File::Spec::Unix->catpath( File::Spec->splitpath( $_ ) );
-
- if (my $REs = $frivolous_test_map->{
- (first { $fn =~ $_ } keys %$frivolous_test_map ) || ''
- }) {
- my $slurptest = do { local (@ARGV, $/) = $fn; <> };
- $slurptest =~ $_ and push @$skip_tests, $fn and next TESTFILE for @$REs;
- }
-
- push @$run_tests, $fn;
- }
-
- if ($skip_tests) {
-
- for my $tfn (@$skip_tests) {
-
- (my $tfn_flattened = $tfn) =~ s|/|_|g;
-
- my $log_file = File::Temp->new(
- DIR => '/tmp',
- TEMPLATE => "AutoGenTest_${tfn_flattened}_XXXXX",
- SUFFIX => '.txt',
- );
-
- # FIXME I have no idea why the fileno dance is necessary - will investigate later
- # All I know is that if I pass in just $log_file - open3 ignores it >:(
- my $pid = open3(undef, '>&'.fileno($log_file), undef, $^X, qw(-I blib -I arch/lib), $tfn );
- waitpid ($pid, 0);
- my $ex = $?;
-
- if ($ex) {
- # use qx as opposed to another open3 until I figure out the above
- close $log_file or die "Unable to close $log_file: $!";
- chomp( my $url = `/usr/bin/nopaste -q -s Shadowcat -d $log_file < $log_file` );
-
- $tfn .= "[would NOT have passed: $ex / $url]";
- }
- }
-
- print STDERR "=== Skipping nonessential autogenerated tests: @$skip_tests\n";
- }
-
- return $self->SUPER::aggregate_tests($aggregate, @$run_tests);
-}
-
-1;
use warnings;
BEGIN {
- use DBIx::Class;
- die ( 'The following modules are required for the dbicadmin utility: '
- . DBIx::Class::Optional::Dependencies->req_missing_for ('admin_script')
- . "\n"
- ) unless DBIx::Class::Optional::Dependencies->req_ok_for ('admin_script');
+ require DBIx::Class::Optional::Dependencies;
+ if (my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('admin_script') ) {
+ die "The following modules are required for the dbicadmin utility: $missing\n";
+ }
}
use DBIx::Class::Admin::Descriptive;
use Test::Warn;
use lib qw(t/lib);
use DBICTest;
-use DBIx::Class::_Util 'sigwarn_silencer';
+use DBIx::Class::_Util qw(sigwarn_silencer serialize);
use Path::Class::File ();
use Math::BigInt;
use List::Util qw/shuffle/;
-use Storable qw/nfreeze dclone/;
my $schema = DBICTest->init_schema();
}
local $Storable::canonical = 1;
- my $preimage = nfreeze($args);
+ my $preimage = serialize($args);
for my $tst (keys %$args) {
}
ok (
- ($preimage eq nfreeze($args)),
+ ($preimage eq serialize($args)),
'Arguments fed to populate()/create() unchanged'
);
-#!/usr/bin/perl
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'deploy';
use strict;
use warnings;
use ViewDeps;
use ViewDepsBad;
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' .
- DBIx::Class::Optional::Dependencies->req_missing_for('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('deploy');
-}
-
-use_ok('DBIx::Class::ResultSource::View');
-
#################### SANITY
my $view = DBIx::Class::ResultSource::View->new;
use DBICTest;
my $schema = DBICTest->init_schema();
-plan tests => 19;
-
# select from a class with resultset_attributes
my $resultset = $schema->resultset('BooksInLibrary');
is($resultset, 3, "select from a class with resultset_attributes okay");
+$resultset = $resultset->search({}, { where => undef });
+is($resultset, 3, "where condition not obliterated");
+
# now test out selects through a resultset
my $owner = $schema->resultset('Owners')->find({name => "Newton"});
my $programming_perl = $owner->books->find_or_create({ title => "Programming Perl" });
ok( !$@, 'many_to_many set_$rel(\@objects) did not throw');
is($pointy_objects->count, $pointy_count, 'many_to_many set_$rel($hash) count correct');
is($round_objects->count, $round_count, 'many_to_many set_$rel($hash) other rel count correct');
+
+done_testing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_pg';
+
use strict;
use warnings;
use Test::More;
use lib qw(t/lib);
use DBICTest;
-use DBIx::Class::Optional::Dependencies ();
my $main_pid = $$;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
- . ' (note: creates and drops a table named artist!)' unless ($dsn && $user);
-
# README: If you set the env var to a number greater than 10,
# we will use that many children
my $num_children = $ENV{DBICTEST_FORK_STRESS} || 1;
$num_children = 10;
}
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
my $schema = DBICTest::Schema->connect($dsn, $user, $pass, { AutoCommit => 1 });
my $parent_rs;
}
use threads;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_pg';
+
use strict;
use warnings;
plan skip_all => 'DBIC does not actively support threads before perl 5.8.5'
if $] < '5.008005';
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
- . ' (note: creates and drops a table named artist!)' unless ($dsn && $user);
-
# README: If you set the env var to a number greater than 10,
# we will use that many children
my $num_children = $ENV{DBICTEST_THREAD_STRESS} || 1;
$num_children = 10;
}
-use_ok('DBICTest::Schema');
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
my $schema = DBICTest::Schema->connect($dsn, $user, $pass, { AutoCommit => 1, RaiseError => 1, PrintError => 0 });
}
use threads;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_pg';
+
use strict;
use warnings;
plan skip_all => 'DBIC does not actively support threads before perl 5.8.5'
if $] < '5.008005';
-use DBIx::Class::Optional::Dependencies ();
use Scalar::Util 'weaken';
use lib qw(t/lib);
use DBICTest;
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
- . ' (note: creates and drops a table named artist!)' unless ($dsn && $user);
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
-
my $num_children = $ENV{DBICTEST_THREAD_STRESS} || 1;
if($num_children !~ /^[0-9]+$/ || $num_children < 10) {
$num_children = 10;
}
-use_ok('DBICTest::Schema');
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
my $schema = DBICTest::Schema->connect($dsn, $user, $pass, { AutoCommit => 1, RaiseError => 1, PrintError => 0 });
# Load them and empty the registry
# this loads the DT armada
- $has_dt = DBIx::Class::Optional::Dependencies->req_ok_for('test_dt_sqlite');
+ $has_dt = DBIx::Class::Optional::Dependencies->req_ok_for([qw( test_rdbms_sqlite icdt )]);
require Errno;
require DBI;
! DBICTest::RunMode->is_plain
and
! $ENV{DBICTEST_IN_PERSISTENT_ENV}
- and
- # FIXME - investigate wtf is going on with 5.18
- ! ( $] > 5.017 and $ENV{DBIC_TRACE_PROFILE} )
) {
# FIXME - ideally we should be able to just populate an alternative
($ENV{PATH}) = $ENV{PATH} =~ /(.+)/;
-my $persistence_tests = {
- PPerl => {
- cmd => [qw/pperl --prefork=1/, __FILE__],
- },
- 'CGI::SpeedyCGI' => {
- cmd => [qw/speedy -- -t5/, __FILE__],
- },
-};
-
-# scgi is smart and will auto-reap after -t amount of seconds
-# pperl needs an actual killer :(
-$persistence_tests->{PPerl}{termcmd} = [
- $persistence_tests->{PPerl}{cmd}[0],
- '--kill',
- @{$persistence_tests->{PPerl}{cmd}}[ 1 .. $#{$persistence_tests->{PPerl}{cmd}} ],
-];
-
+my $persistence_tests;
SKIP: {
skip 'Test already in a persistent loop', 1
if $ENV{DBICTEST_IN_PERSISTENT_ENV};
local $ENV{DBICTEST_IN_PERSISTENT_ENV} = 1;
+ $persistence_tests = {
+ PPerl => {
+ cmd => [qw/pperl --prefork=1/, __FILE__],
+ },
+ 'CGI::SpeedyCGI' => {
+ cmd => [qw/speedy -- -t5/, __FILE__],
+ },
+ };
+
+ # scgi is smart and will auto-reap after -t amount of seconds
+ # pperl needs an actual killer :(
+ $persistence_tests->{PPerl}{termcmd} = [
+ $persistence_tests->{PPerl}{cmd}[0],
+ '--kill',
+ @{$persistence_tests->{PPerl}{cmd}}[ 1 .. $#{$persistence_tests->{PPerl}{cmd}} ],
+ ];
+
require IPC::Open2;
for my $type (keys %$persistence_tests) { SKIP: {
# just an extra precaution in case we blew away from the SKIP - since there are no
# PID files to go by (man does pperl really suck :(
END {
- unless ($ENV{DBICTEST_IN_PERSISTENT_ENV}) {
- close $_ for (*STDIN, *STDOUT, *STDERR);
+ if ($persistence_tests->{PPerl}{termcmd}) {
local $?; # otherwise test will inherit $? of the system()
- system (@{$persistence_tests->{PPerl}{termcmd}})
- if $persistence_tests->{PPerl}{termcmd};
+ require IPC::Open3;
+ open my $null, ">", File::Spec->devnull;
+ waitpid(
+ IPC::Open3::open3(undef, $null, $null, @{$persistence_tests->{PPerl}{termcmd}}),
+ 0,
+ );
}
}
constant
overload
+ if
base
Devel::GlobalDestruction
mro
Scalar::Util
List::Util
+ Storable
Class::Accessor::Grouped
Class::C3::Componentised
'DBIx::Class::ResultSet::Pager',
# utility classes, not part of the inheritance chain
+ 'DBIx::Class::Optional::Dependencies',
'DBIx::Class::ResultSource::RowParser::Util',
'DBIx::Class::_Util',
) };
$new->update_or_insert;
ok($new->in_storage, 'update_or_insert insert ok');
-# test in update mode
-$new->title('Insert or Update - updated');
-$new->update_or_insert;
-is( $schema->resultset("Track")->find(100)->title, 'Insert or Update - updated', 'update_or_insert update ok');
-
-SKIP: {
- skip "Tests require " . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_sqlite'), 13
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_sqlite');
-
- # test get_inflated_columns with objects
- my $event = $schema->resultset('Event')->search->first;
- my %edata = $event->get_inflated_columns;
- is($edata{'id'}, $event->id, 'got id');
- isa_ok($edata{'starts_at'}, 'DateTime', 'start_at is DateTime object');
- isa_ok($edata{'created_on'}, 'DateTime', 'create_on DateTime object');
- is($edata{'starts_at'}, $event->starts_at, 'got start date');
- is($edata{'created_on'}, $event->created_on, 'got created date');
-
-
- # get_inflated_columns w/relation and accessor alias
- isa_ok($new->updated_date, 'DateTime', 'have inflated object via accessor');
- my %tdata = $new->get_inflated_columns;
- is($tdata{'trackid'}, 100, 'got id');
- isa_ok($tdata{'cd'}, 'DBICTest::CD', 'cd is CD object');
- is($tdata{'cd'}->id, 1, 'cd object is id 1');
- is(
- $tdata{'position'},
- $schema->resultset ('Track')->search ({cd => 1})->count,
- 'Ordered assigned proper position',
- );
- is($tdata{'title'}, 'Insert or Update - updated');
- is($tdata{'last_updated_on'}, '1973-07-19T12:01:02');
- isa_ok($tdata{'last_updated_on'}, 'DateTime', 'inflated accessored column');
-}
-
throws_ok (sub {
$schema->class("Track")->load_components('DoesNotExist');
}, qr!Can't locate DBIx/Class/DoesNotExist.pm!, 'exception on nonexisting component');
$schema->source("Artist")->column_info_from_storage(1);
$schema->source("Artist")->{_columns_info_loaded} = 0;
+ my @undef_default = DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE
+ ? ()
+ : ( default_value => undef )
+ ;
+
is_deeply (
$schema->source('Artist')->columns_info,
{
artistid => {
data_type => "INTEGER",
- default_value => undef,
+ @undef_default,
is_nullable => 0,
size => undef
},
charfield => {
data_type => "char",
- default_value => undef,
+ @undef_default,
is_nullable => 1,
size => 10
},
name => {
data_type => "varchar",
- default_value => undef,
+ @undef_default,
is_nullable => 1,
is_numeric => 0,
size => 100
{
artistid => {
data_type => "INTEGER",
- default_value => undef,
+ @undef_default,
is_nullable => 0,
size => undef
},
isa_ok( $new_artist, 'DBIx::Class::Row', '$rs->new gives a row object' );
}
+
# make sure we got rid of the compat shims
SKIP: {
my $remove_version = 0.083;
throws_ok { $schema->resultset} qr/resultset\(\) expects a source name/, 'resultset with no argument throws exception';
+throws_ok { $schema->source('Artist')->result_class->new( 'bugger' ) } qr/must be a hashref/;
+
done_testing;
'rank' => {
'data_type' => 'integer',
'is_nullable' => 0,
- 'default_value' => '13',
+ DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE ? () : ( 'default_value' => '13' ),
},
'charfield' => {
'data_type' => 'char',
use Test::More;
use lib qw(t/lib);
use DBICTest;
-use Storable qw/dclone/;
my $schema = DBICTest->init_schema();
$pager = $it->pager;
is ($qcnt, 0, 'No queries on rs/pager creation');
-$it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) };
+# test *requires* it to be Storable
+$it = do {
+ local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema;
+ Storable::dclone ($it);
+};
is ($qcnt, 0, 'No queries on rs/pager freeze/thaw');
is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2" );
$rs->create({ title => 'bah', artist => 1, year => 2011 });
$qcnt = 0;
-$it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) };
+# test *requires* it to be Storable
+$it = do {
+ local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema;
+ Storable::dclone ($it);
+};
is ($qcnt, 0, 'No queries on rs/pager freeze/thaw');
is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2, even though underlying count changed" );
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_mysql';
+
use strict;
use warnings;
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
-
my $schema = DBICTest::Schema->connect($dsn, $user, $pass, { quote_names => 1 });
my $dbh = $schema->storage->dbh;
});
} 'LOCK IN SHARE MODE select works';
+my ($int_type_name, @undef_default) = DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE
+ ? ('integer')
+ : ( 'INT', default_value => undef )
+;
+
my $test_type_info = {
'artistid' => {
- 'data_type' => 'INT',
+ 'data_type' => $int_type_name,
'is_nullable' => 0,
'size' => 11,
- 'default_value' => undef,
+ @undef_default,
},
'name' => {
'data_type' => 'VARCHAR',
'is_nullable' => 1,
'size' => 100,
- 'default_value' => undef,
+ @undef_default,
},
'rank' => {
- 'data_type' => 'INT',
+ 'data_type' => $int_type_name,
'is_nullable' => 0,
'size' => 11,
- 'default_value' => 13,
+ DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE ? () : ( 'default_value' => '13' ),
},
'charfield' => {
'data_type' => 'CHAR',
'is_nullable' => 1,
'size' => 10,
- 'default_value' => undef,
+ @undef_default,
},
};
$test_type_info->{charfield}->{data_type} = 'VARCHAR';
}
+ if (DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE) {
+ $_->{data_type} = lc $_->{data_type} for values %$test_type_info;
+ }
+
my $type_info = $schema->storage->columns_info_for('artist');
is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
}
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_pg';
+
use strict;
use warnings;
use SQL::Abstract 'is_literal_value';
use DBIx::Class::_Util 'is_exception';
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-plan skip_all => <<'EOM' unless $dsn && $user;
-Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test
-( NOTE: This test drops and creates tables called 'artist', 'cd',
-'timestamp_primary_key_test', 'track', 'casecheck', 'array_test' and
-'sequence_test' as well as following sequences: 'pkid1_seq', 'pkid2_seq' and
-'nonpkid_seq'. as well as following schemas: 'dbic_t_schema',
-'dbic_t_schema_2', 'dbic_t_schema_3', 'dbic_t_schema_4', and 'dbic_t_schema_5')
-EOM
-
### load any test classes that are defined further down in the file via BEGIN blocks
-
our @test_classes; #< array that will be pushed into by test classes defined in this file
DBICTest::Schema->load_classes( map {s/.+:://;$_} @test_classes ) if @test_classes;
### pre-connect tests (keep each test separate as to make sure rebless() runs)
{
my $s = DBICTest::Schema->connect($dsn, $user, $pass);
-
- ok (!$s->storage->_dbh, 'definitely not connected');
-
- # Check that datetime_parser returns correctly before we explicitly connect.
- SKIP: {
- skip (
- "Pg parser detection test needs " . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_pg'),
- 2
- ) unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_pg');
-
- my $store = ref $s->storage;
- is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
-
- my $parser = $s->storage->datetime_parser;
- is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
- }
-
- ok (!$s->storage->_dbh, 'still not connected');
- }
-
- {
- my $s = DBICTest::Schema->connect($dsn, $user, $pass);
# make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
ok (!$s->storage->_dbh, 'definitely not connected');
is ($s->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
run_apk_tests($schema); #< older set of auto-pk tests
run_extended_apk_tests($schema); #< new extended set of auto-pk tests
+
+######## test the pg-specific syntax from https://rt.cpan.org/Ticket/Display.html?id=99503
+ lives_ok {
+ is(
+ $schema->resultset('Artist')->search({ artistid => { -in => \ '(select 4) union (select 5)' } })->count,
+ 2,
+ 'Two expected artists found on subselect union within IN',
+ );
+ };
+
### type_info tests
my $test_type_info = {
my $type_info = $schema->storage->columns_info_for('dbic_t_schema.artist');
my $artistid_defval = delete $type_info->{artistid}->{default_value};
- like($artistid_defval,
- qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
- 'columns_info_for - sequence matches Pg get_autoinc_seq expectations');
- is_deeply($type_info, $test_type_info,
- 'columns_info_for - column data types');
-
+ # The curor info is too radically different from what is in the column_info
+ # call - just punt it (DBD::SQLite tests the codepath plenty enough)
+ unless (DBIx::Class::_ENV_::STRESSTEST_COLUMN_INFO_UNAWARE_STORAGE) {
+ like(
+ $artistid_defval,
+ qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
+ 'columns_info_for - sequence matches Pg get_autoinc_seq expectations'
+ );
+ is_deeply($type_info, $test_type_info,
+ 'columns_info_for - column data types');
+ }
####### Array tests
sub { die "DBICTestTimeout" },
));
- alarm(2);
$artist2 = $schema2->resultset('Artist')->find(1);
$artist2->name('fooey');
+
+ # FIXME - this needs to go away in lieu of a non-retrying runner
+ # ( i.e. after solving RT#47005 )
+ local *DBIx::Class::Storage::DBI::_ping = sub { 1 }, DBIx::Class::_ENV_::OLD_MRO && Class::C3->reinitialize()
+ if DBIx::Class::_Util::modver_gt_or_eq( 'DBD::Pg' => '3.5.0' );
+
+ alarm(1);
$artist2->update;
};
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_pg';
+
use strict;
use warnings;
use Test::More;
-use DBIx::Class::Optional::Dependencies ();
use Try::Tiny;
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
-
my ($dsn, $dbuser, $dbpass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $dbuser);
-
my $schema = DBICTest::Schema->connect($dsn, $dbuser, $dbpass, { AutoCommit => 1 });
if ($schema->storage->_server_info->{normalized_dbms_version} >= 9.0) {
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_oracle';
+
use strict;
use warnings;
use Test::More;
use Sub::Name;
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
+$ENV{NLS_SORT} = "BINARY";
+$ENV{NLS_COMP} = "BINARY";
+$ENV{NLS_LANG} = "AMERICAN";
+
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
# optional:
my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_ORA_EXTRAUSER_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test.'
- unless ($dsn && $user && $pass);
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_oracle')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_oracle');
-
-$ENV{NLS_SORT} = "BINARY";
-$ENV{NLS_COMP} = "BINARY";
-$ENV{NLS_LANG} = "AMERICAN";
-
{
package # hide from PAUSE
DBICTest::Schema::ArtistFQN;
like ($seq, qr/\.${q}artist_pk_seq${q}$/, 'Correct PK sequence selected for sqlt-like trigger');
}
+
# test LIMIT support
for (1..6) {
$schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_oracle';
+
use strict;
use warnings;
use Test::More;
use Sub::Name;
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
-
-plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test.'
- unless ($dsn && $user && $pass);
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_oracle')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_oracle');
-
$ENV{NLS_SORT} = "BINARY";
$ENV{NLS_COMP} = "BINARY";
$ENV{NLS_LANG} = "AMERICAN";
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
+
my $v = do {
my $si = DBICTest::Schema->connect($dsn, $user, $pass)->storage->_server_info;
$si->{normalized_dbms_version}
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_oracle';
+
use strict;
use warnings;
# dealing with HQs. So just punt on the entire shuffle thing.
BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 }
-
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
-$ENV{NLS_SORT} = "BINARY";
-$ENV{NLS_COMP} = "BINARY";
-$ENV{NLS_LANG} = "AMERICAN";
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
-
-plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test.'
- unless ($dsn && $user && $pass);
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_oracle')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_oracle');
-
use DBICTest::Schema::Artist;
BEGIN {
DBICTest::Schema::Artist->add_column('parentid');
use DBICTest;
use DBICTest::Schema;
+$ENV{NLS_SORT} = "BINARY";
+$ENV{NLS_COMP} = "BINARY";
+$ENV{NLS_LANG} = "AMERICAN";
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
note "Oracle Version: " . $schema->storage->_server_info->{dbms_version};
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_db2';
+
use strict;
use warnings;
use Test::More;
use Test::Exception;
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_db2')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_db2');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_${_}" } qw/DSN USER PASS/};
-
-#warn "$dsn $user $pass";
-
-plan skip_all => 'Set $ENV{DBICTEST_DB2_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
-
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
my $name_sep = $schema->storage->_dbh_get_info('SQL_QUALIFIER_NAME_SEPARATOR');
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_db2_400';
+
use strict;
use warnings;
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_db2_400')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_db2_400');
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_400_${_}" } qw/DSN USER PASS/};
-
-#warn "$dsn $user $pass";
-
# Probably best to pass the DBQ option in the DSN to specify a specific
# libray. Something like:
# DBICTEST_DB2_400_DSN='dbi:ODBC:dsn=MyAS400;DBQ=MYLIB'
-plan skip_all => 'Set $ENV{DBICTEST_DB2_400_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_DB2_400_${_}" } qw/DSN USER PASS/};
plan tests => 6;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_mssql_odbc';
+
use strict;
use warnings;
use Test::Exception;
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mssql_odbc')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mssql_odbc');
-
use lib qw(t/lib);
use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
-
{
my $srv_ver = DBICTest::Schema->connect($dsn, $user, $pass)->storage->_server_info->{dbms_version};
ok ($srv_ver, 'Got a test server version on fresh schema: ' . ($srv_ver||'???') );
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_ase';
+
use strict;
use warnings;
no warnings 'uninitialized';
use Test::More;
use Test::Exception;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
-if (not ($dsn && $user)) {
- plan skip_all => join ' ',
- 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test.',
- 'Warning: This test drops and creates the tables:',
- "'artist', 'money_test' and 'bindtype_test'",
- ;
-};
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_ase')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_ase');
-
my @storage_types = (
'DBI::Sybase::ASE',
'DBI::Sybase::ASE::NoBindVars',
);
-eval "require DBIx::Class::Storage::$_;" for @storage_types;
+eval "require DBIx::Class::Storage::$_;" or die $@
+ for @storage_types;
my $schema;
my $storage_idx = -1;
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
+
sub get_schema {
DBICTest::Schema->connect($dsn, $user, $pass, {
on_connect_call => [
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_mssql_ado';
+
use strict;
use warnings;
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mssql_ado')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mssql_ado');
-
# Example DSN (from frew):
# dbi:ADO:PROVIDER=sqlncli10;SERVER=tcp:172.24.2.10;MARS Connection=True;Initial Catalog=CIS;UID=cis_web;PWD=...;DataTypeCompatibility=80;
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ADO_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ADO_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
-
DBICTest::Schema->load_classes(qw/VaryingMAX ArtistGUID/);
my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_informix';
+
use strict;
use warnings;
use Test::More;
use Test::Exception;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_informix')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_informix');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_INFORMIX_${_}" } qw/DSN USER PASS/};
-#warn "$dsn $user $pass";
-
-plan skip_all => 'Set $ENV{DBICTEST_INFORMIX_DSN}, _USER and _PASS to run this test'
- unless $dsn;
-
my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
auto_savepoint => 1
});
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_rdbms_mssql_sybase';
+
use strict;
use warnings;
use Test::More;
use Test::Exception;
use Scalar::Util 'weaken';
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
-
-plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
- unless ($dsn);
-
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mssql_sybase')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mssql_sybase');
-
{
my $srv_ver = DBICTest::Schema->connect($dsn, $user, $pass)->storage->_server_info->{dbms_version};
ok ($srv_ver, 'Got a test server version on fresh schema: ' . ($srv_ver||'???') );
use lib qw(t/lib);
use DBICTest;
-use DBIx::Class::_Util qw(sigwarn_silencer modver_gt_or_eq);
+use DBIx::Class::_Util qw( sigwarn_silencer modver_gt_or_eq modver_gt_or_eq_and_lt );
# check that we work somewhat OK with braindead SQLite transaction handling
#
is ($row->rank, 'abc', 'proper rank inserted into database');
# and make sure we do not lose actual bigints
+SKIP: {
+
+skip "Not testing bigint handling on known broken DBD::SQLite trial versions", 1
+ if modver_gt_or_eq_and_lt( 'DBD::SQLite', '1.45', '1.45_03' );
+
{
package DBICTest::BigIntArtist;
use base 'DBICTest::Schema::Artist';
$_[1]->do('ALTER TABLE artist ADD COLUMN bigint BIGINT');
});
-my $sqlite_broken_bigint = (
- modver_gt_or_eq('DBD::SQLite', '1.34') and ! modver_gt_or_eq('DBD::SQLite', '1.37')
-);
+my $sqlite_broken_bigint = modver_gt_or_eq_and_lt( 'DBD::SQLite', '1.34', '1.37' );
# 63 bit integer
my $many_bits = (Math::BigInt->new(2) ** 62);
}
is_deeply (\@w, [], "No mismatch warnings on bigint operations ($v_desc)" );
-}
+
+}}
done_testing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'deploy';
+
use strict;
use warnings;
use Test::More;
use Test::Warn;
-use lib qw(t/lib);
-use DBICTest;
-
use Scalar::Util 'blessed';
-BEGIN {
- require DBIx::Class;
- plan skip_all =>
- 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
-}
+use lib qw(t/lib);
+use DBICTest;
my $custom_deployment_statements_called = 0;
return $self->next::method(@_);
}
-
# Check deployment statements ctx sensitivity
{
my $schema = DBICTest->init_schema (no_deploy => 1, quote_names => 1);
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw(deploy test_rdbms_mysql);
+
use strict;
use warnings;
use DBICTest;
use DBIx::Class::_Util 'sigwarn_silencer';
-my ($dsn, $user, $pass);
-
-BEGIN {
- ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
-
- plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
- unless ($dsn);
-
- require DBIx::Class;
- plan skip_all =>
- 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy');
-
- plan skip_all =>
- 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
-}
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
# this is just to grab a lock
{
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'deploy';
+
use strict;
use warnings;
use DBICTest;
use DBIx::Class::_Util 'sigwarn_silencer';
-BEGIN {
- require DBIx::Class;
- plan skip_all =>
- 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
-}
-
# Test for SQLT-related leaks
{
my $s = DBICTest::Schema->clone;
# make sure a connected instance passed via $args does not get the $dbh improperly serialized
SKIP: {
- # YAML is a build_requires dep of SQLT - it may or may not be here
- eval { require YAML } or skip "Test requires YAML.pm", 1;
+ DBIx::Class::Optional::Dependencies->skip_without( 'YAML>=0' );
lives_ok {
+++ /dev/null
-use strict;
-use warnings;
-
-use Test::More;
-
-use lib 't/lib';
-use DBICTest;
-
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for('admin')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('admin');
-}
-
-use_ok 'DBIx::Class::Admin';
-
-
-done_testing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( admin deploy );
+
use strict;
use warnings;
use DBICTest;
use DBIx::Class::_Util 'sigwarn_silencer';
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for('admin')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('admin');
-
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('deploy');
-}
-
-use_ok 'DBIx::Class::Admin';
+use DBIx::Class::Admin;
# lock early
DBICTest->init_schema(no_deploy => 1, no_populate => 1);
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'admin';
+
use strict;
use warnings;
use lib 't/lib';
use DBICTest;
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for('admin')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('admin');
-}
-
-use_ok 'DBIx::Class::Admin';
-
+use DBIx::Class::Admin;
{ # test data maniplulation functions
-# vim: filetype=perl
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_admin_script';
+
use strict;
use warnings;
+BEGIN {
+ # just in case the user env has stuff in it
+ delete $ENV{JSON_ANY_ORDER};
+}
+
use Test::More;
use Config;
use File::Spec;
use lib qw(t/lib);
use DBICTest;
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' .
- DBIx::Class::Optional::Dependencies->req_missing_for('test_admin_script')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('test_admin_script');
-
- # just in case the user env has stuff in it
- delete $ENV{JSON_ANY_ORDER};
-}
-
$ENV{PATH} = '';
$ENV{PERL5LIB} = join ($Config{path_sep}, @INC);
-require JSON::Any;
my @json_backends = qw(DWIW PP JSON CPANEL XS);
# test the script is setting @INC properly
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
}
{
- SKIP: {
- skip "No column objects", 1;
+ {
+ local $TODO = "No column objects";
eval { my @grps = State->__grouper->groups_for("Huh"); };
ok $@, "Huh not in groups";
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
use namespace::clean;
$| = 1;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
-}
+use lib 't/cdbi/testlib';
+use Film;
ok(Film->can('db_Main'), 'set_db()');
is(Film->__driver, "SQLite", "Driver set correctly");
ok !$film, "It destroys itself";
}
-SKIP: {
- skip "Caching has been removed", 5
- if Film->isa("DBIx::Class::CDBICompat::NoObjectIndex");
+{
# my bad taste is your bad taste
my $btaste = Film->retrieve('Bad Taste');
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use Test::Warn;
# Test lazy loading
#----------------------------------------------------------------------
-INIT {
- use lib 't/cdbi/testlib';
- use Lazy;
-}
+use lib 't/cdbi/testlib';
+use Lazy;
is_deeply [ Lazy->columns('Primary') ], [qw/this/], "Pri";
is_deeply [ sort Lazy->columns('Essential') ], [qw/opop this/], "Essential";
# Now again for inflated values
SKIP: {
- skip "Requires Date::Simple 3.03", 5 unless eval "use Date::Simple 3.03; 1; ";
+ DBIx::Class::Optional::Dependencies->skip_without( 'Date::Simple>=3.03' );
Lazy->has_a(
orp => 'Date::Simple',
inflate => sub { Date::Simple->new($_[0] . '-01-01') },
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use Test::Exception;
use DBIx::Class::_Util 'sigwarn_silencer';
@YA::Film::ISA = 'Film';
-#local $SIG{__WARN__} = sub { };
-
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
- use Director;
-}
+use lib 't/cdbi/testlib';
+use Film;
+use Director;
Film->create_test_film;
ok(my $btaste = Film->retrieve('Bad Taste'), "We have Bad Taste");
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
+use Test::Exception;
use lib 't/cdbi/testlib';
use Film;
like $@, qr/fails.*constraint/, "Fails listref constraint";
my $ok = eval { Film->create({ Rating => 'U' }) };
is $@, '', "Can create with rating U";
- SKIP: {
- skip "No column objects", 2;
- ok +Film->find_column('rating')->is_constrained, "Rating is constrained";
- ok +Film->find_column('director')->is_constrained, "Director is not";
+ {
+ local $TODO = "No column objects";
+ lives_ok { Film->find_column('rating')->is_constrained || die } "Rating is constrained";
+ lives_ok { Film->find_column('director')->is_constrained || die } "Director is not";
}
}
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
+use lib 't/cdbi/testlib';
INIT {
- #local $SIG{__WARN__} =
- #sub { like $_[0], qr/clashes with built-in method/, $_[0] };
- use lib 't/cdbi/testlib';
- require Film;
- require Actor;
- require Director;
+ require Film;
+ require Actor;
+ require Director;
- Actor->has_a(film => 'Film');
- Film->has_a(director => 'Director');
+ Actor->has_a(film => 'Film');
+ Film->has_a(director => 'Director');
- sub Class::DBI::sheep { ok 0; }
+ sub Class::DBI::sheep { ok 0; }
}
# Install the deprecation warning intercept here for the rest of the 08 dev cycle
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use Test::Exception;
use DBIx::Class::_Util 'sigwarn_silencer';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( cdbicompat test_rdbms_mysql Time::Piece::MySQL>=0 );
+
$| = 1;
use warnings;
use strict;
use Test::More;
use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
-eval { require Time::Piece::MySQL }
- or plan skip_all => 'Time::Piece::MySQL required for this test';
use_ok ('Log');
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use Data::Dumper;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
- use Director;
-}
+use lib 't/cdbi/testlib';
+use Film;
+use Director;
{ # Cascade on delete
Director->has_many(nasties => 'Film');
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( cdbicompat Time::Piece>=0 );
+
use strict;
use warnings;
+
use Test::More;
use Test::Warn;
-use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
-eval { require Time::Piece }
- or plan skip_all => 'Time::Piece required for this test';
-
package Temp::DBI;
use base qw(DBIx::Class::CDBICompat);
Temp::DBI->columns(All => qw(id date));
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt cdbicompat );
+
use strict;
use warnings;
-use Test::More;
-use lib qw(t/cdbi/testlib);
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
-BEGIN {
- eval { require DateTime; DateTime->VERSION(0.55) }
- or plan skip_all => 'DateTime 0.55 required for this test';
-}
+use Test::More;
+use lib 't/lib';
+use DBICTest;
my $schema = DBICTest->init_schema();
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( cdbicompat rdbms_sqlite icdt );
+
use strict;
use warnings;
# of the "" operator.
use Test::More;
-use DBIx::Class::Optional::Dependencies;
-
-BEGIN {
- plan skip_all => "Test needs ".DBIx::Class::Optional::Dependencies->req_missing_for('test_dt_sqlite')
- unless DBIx::Class::Optional::Dependencies->req_ok_for('test_dt_sqlite');
-}
use lib 't/cdbi/testlib';
use ImplicitInflate;
-use strict;
-use warnings;
-
# Columns in CDBI could be defined as Class::DBI::Column objects rather than
# or as well as with __PACKAGE__->columns();
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( cdbicompat Class::DBI>=3.000005 );
+
+use strict;
+use warnings;
use Test::More;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
#----------------------------------------------------------------------
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( cdbicompat Class::DBI::Plugin::DeepAbstractSearch>=0 );
+
use strict;
use warnings;
-use Test::More;
-use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
+use Test::More;
-BEGIN {
- eval { require Class::DBI::Plugin::DeepAbstractSearch }
- or plan skip_all => 'Class::DBI::Plugin::DeepAbstractSearch required for this test';
-}
+use lib 't/lib';
+use DBICTest;
my $DB = DBICTest->_sqlite_dbname(sqlite_use_file => 1);;
-use Test::More;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
use strict;
use warnings;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
-}
+use Test::More;
+use lib 't/cdbi/testlib';
+use Film;
Film->create({ Title => $_, Rating => "PG" }) for ("Superman", "Super Fuzz");
Film->create({ Title => "Batman", Rating => "PG13" });
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use Test::Warn;
# Emulate that Class::DBI inflates immediately
SKIP: {
- unless (eval { require MyFoo }) {
- my ($err) = $@ =~ /([^\n]+)/;
- skip $err, 3
- }
-
+ DBIx::Class::Optional::Dependencies->skip_without([qw( Date::Simple>=3.03 test_rdbms_mysql )]);
+ require MyFoo;
my $foo = MyFoo->insert({
name => 'Whatever',
tdate => '1949-02-01',
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
-}
+use lib 't/cdbi/testlib';
+use Film;
{
Film->insert({
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
-INIT {
- use lib 't/cdbi/testlib';
-}
+use lib 't/cdbi/testlib';
{
package # hide from PAUSE
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
use Test::More;
-use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
{
package Thing;
use base qw(DBIx::Class::CDBICompat);
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
-use Test::More;
-use Class::Inspector ();
+use Test::More;
use lib 't/cdbi/testlib';
use Director;
-# Test that has_many() will load the foreign class.
+# Test that has_many() will load the foreign class
+require Class::Inspector;
ok !Class::Inspector->loaded( 'Film' );
-ok eval { Director->has_many( films => 'Film' ); 1; } || diag $@;
+ok eval { Director->has_many( films => 'Film' ); 1; } or diag $@;
my $shan_hua = Director->create({
Name => "Shan Hua",
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
package Foo;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
#----------------------------------------------------------------------
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
-INIT {
- use lib 't/cdbi/testlib';
- require Film;
-}
+use lib 't/cdbi/testlib';
+INIT { require Film }
sub Film::get_test {
my $self = shift;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
use lib 't/cdbi/testlib';
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
$| = 1;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
-}
-
-plan skip_all => "Object cache is turned off"
- if Film->isa("DBIx::Class::CDBICompat::NoObjectIndex");
-
-plan tests => 5;
-
+use lib 't/cdbi/testlib';
+use Film;
ok +Film->create({
Title => 'This Is Spinal Tap',
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
+
use Test::More;
-INIT {
- use lib 't/cdbi/testlib';
- use Film;
-}
+use lib 't/cdbi/testlib';
+use Film;
for my $title ("Bad Taste", "Braindead", "Forgotten Silver") {
Film->insert({ Title => $title, Director => 'Peter Jackson' });
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt cdbicompat );
+
use strict;
use warnings;
-use Test::More;
+use Test::More;
use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
-BEGIN {
- eval { require DateTime; DateTime->VERSION(0.55) }
- or plan skip_all => 'DateTime 0.55 required for this test';
-}
-
# Don't use Test::NoWarnings because of an unrelated DBD::SQLite warning.
my @warnings;
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt cdbicompat );
+
use strict;
use warnings;
+
use Test::More;
use Test::Exception;
use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite (); # this will issue the necessary SKIPs on missing reqs
-
-BEGIN {
- eval { require DateTime; DateTime->VERSION(0.55) }
- or plan skip_all => 'DateTime 0.55 required for this test';
-}
{
package Thing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'cdbicompat';
+
use strict;
use warnings;
use Test::More;
-use lib 't/cdbi/testlib';
-use DBIC::Test::SQLite;
+use lib 't/lib';
+use DBICTest;
DBICTest::Schema::CD->load_components(qw/CDBICompat CDBICompat::Pager/);
package # hide from PAUSE
DBIC::Test::SQLite;
+use strict;
+use warnings;
+
=head1 NAME
DBIx::Class::Test::SQLite - Base class for running Class::DBI tests against DBIx::Class compat layer, shamelessly ripped from Class::DBI::Test::SQLite
=cut
-use strict;
-use warnings;
-
-use Test::More;
-
# adding implicit search criteria to the iterator will alter the test
# mechanics - leave everything as-is instead, and hope SQLite won't
# change too much
use lib 't/lib';
use DBICTest;
-BEGIN {
- eval { require DBIx::Class::CDBICompat }
- or plan skip_all => 'Class::DBI required for this test';
-}
-
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/CDBICompat Core DB/);
use base qw(DBIx::Class::CDBICompat);
-our $dbh;
-
-my $err;
-if (! $ENV{DBICTEST_MYSQL_DSN} ) {
- $err = 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test';
-}
-elsif ( ! DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql') ) {
- $err = 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
-}
-
-if ($err) {
- my $t = eval { Test::Builder->new };
- if ($t and ! $t->current_test) {
- $t->skip_all ($err);
- }
- else {
- die "$err\n";
- }
-}
-
my @connect = (@ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/}, { PrintError => 0});
# this is only so we grab a lock on mysql
{
my $x = DBICTest::Schema->connect(@connect);
}
-$dbh = DBI->connect(@connect) or die DBI->errstr;
+our $dbh = DBI->connect(@connect) or die DBI->errstr;
my @table;
-END { $dbh->do("DROP TABLE $_") foreach @table }
+END {
+ $dbh->do("DROP TABLE $_") for @table;
+ undef $dbh;
+}
__PACKAGE__->connection(@connect);
use base 'MyBase';
-use Date::Simple 3.03;
-
__PACKAGE__->set_table();
__PACKAGE__->columns(All => qw/myid name val tdate/);
__PACKAGE__->has_a(
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( test_rdbms_sqlite icdt );
+
use strict;
use warnings;
my $schema = DBICTest->init_schema();
-plan skip_all => 'Inflation tests need ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt');
-
$schema->class('CD') ->inflate_column( 'year',
{ inflate => sub { DateTime->new( year => shift ) },
deflate => sub { shift->year } }
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt _rdbms_firebird_common );
+
use strict;
use warnings;
use Test::More;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
use Scope::Guard ();
DBICTEST_FIREBIRD_ODBC => 'test_rdbms_firebird_odbc',
};
-plan skip_all => join (' ',
- 'Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}',
- 'and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},',
- '_USER and _PASS to run these tests.',
-
- "WARNING: This test drops and creates a table called 'event'",
-) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
-
-plan skip_all => ( 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for('test_dt') )
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt');
+my @tdeps = values %$env2optdep;
+plan skip_all => 'Test needs ' . (join ' OR ', map
+ { "[ @{[ DBIx::Class::Optional::Dependencies->req_missing_for( $_ ) ]} ]" }
+ @tdeps
+) unless scalar grep
+ { DBIx::Class::Optional::Dependencies->req_ok_for( $_ ) }
+ @tdeps
+;
my $schema;
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt test_rdbms_informix );
+
use strict;
use warnings;
use Test::More;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
use Scope::Guard ();
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt')
-. ' and ' .
-DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_informix')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt')
- && DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_informix');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_INFORMIX_${_}" } qw/DSN USER PASS/};
-
-if (not $dsn) {
- plan skip_all => <<'EOF';
-Set $ENV{DBICTEST_INFORMIX_DSN} _USER and _PASS to run this test'.
-Warning: This test drops and creates a table called 'event'";
-EOF
-}
-
my $schema;
{
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt _rdbms_msaccess_common );
+
use strict;
use warnings;
use Test::More;
use Scope::Guard ();
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
+my @tdeps = qw( test_rdbms_msaccess_odbc test_rdbms_msaccess_ado );
+plan skip_all => 'Test needs ' . (join ' OR ', map
+ { "[ @{[ DBIx::Class::Optional::Dependencies->req_missing_for( $_ ) ]} ]" }
+ @tdeps
+) unless scalar grep
+ { DBIx::Class::Optional::Dependencies->req_ok_for( $_ ) }
+ @tdeps
+;
+
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSACCESS_ODBC_${_}" } qw/DSN USER PASS/};
my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSACCESS_ADO_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Test needs ' .
- (join ' and ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_dt'),
- (join ' or ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_msaccess_odbc'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_msaccess_ado')))
- unless
- DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt') && (
- $dsn && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_msaccess_odbc')
- or
- $dsn2 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_msaccess_ado'))
- or (not $dsn || $dsn2);
-
-plan skip_all => <<'EOF' unless $dsn || $dsn2;
-Set $ENV{DBICTEST_MSACCESS_ODBC_DSN} and/or $ENV{DBICTEST_MSACCESS_ADO_DSN} (and optionally _USER and _PASS) to run these tests.
-Warning: this test drops and creates the table 'track'.
-EOF
-
my @connect_info = (
[ $dsn, $user || '', $pass || '' ],
[ $dsn2, $user2 || '', $pass2 || '' ],
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt _rdbms_mssql_common );
+
use strict;
use warnings;
use Test::Exception;
use Scope::Guard ();
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
+my @tdeps = qw( test_rdbms_mssql_odbc test_rdbms_mssql_sybase test_rdbms_mssql_ado );
+plan skip_all => 'Test needs ' . (join ' OR ', map
+ { "[ @{[ DBIx::Class::Optional::Dependencies->req_missing_for( $_ ) ]} ]" }
+ @tdeps
+) unless scalar grep
+ { DBIx::Class::Optional::Dependencies->req_ok_for( $_ ) }
+ @tdeps
+;
+
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
my ($dsn3, $user3, $pass3) = @ENV{map { "DBICTEST_MSSQL_ADO_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Test needs ' .
- (join ' and ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_dt'),
- (join ' or ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_mssql_odbc'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_mssql_sybase'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_mssql_ado')))
- unless
- DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt') && (
- $dsn && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_mssql_odbc')
- or
- $dsn2 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_mssql_sybase')
- or
- $dsn3 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_mssql_ado'))
- or (not $dsn || $dsn2 || $dsn3);
-
-if (not ($dsn || $dsn2 || $dsn3)) {
- plan skip_all =>
- 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN} and/or $ENV{DBICTEST_MSSQL_DSN} and/or '
- .'$ENV{DBICTEST_MSSQL_ADO_DSN} _USER and _PASS to run this test' .
- "\nWarning: This test drops and creates tables called 'event_small_dt' and"
- ." 'track'.";
-}
-
DBICTest::Schema->load_classes('EventSmallDT');
my @connect_info = (
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt test_rdbms_oracle );
+
use strict;
use warnings;
use Test::More;
use Test::Exception;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_oracle')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_oracle');
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
-
-if (not ($dsn && $user && $pass)) {
- plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
- 'Warning: This test drops and creates a table called \'event\'';
-}
-
# DateTime::Format::Oracle needs this set
$ENV{NLS_DATE_FORMAT} = 'DD-MON-YY';
$ENV{NLS_TIMESTAMP_FORMAT} = 'YYYY-MM-DD HH24:MI:SSXFF';
$ENV{NLS_SORT} = "BINARY";
$ENV{NLS_COMP} = "BINARY";
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
# older oracles do not support a TIMESTAMP datatype
# clean up our mess
END {
- if($schema && (my $dbh = $schema->storage->dbh)) {
+ if($schema && (my $dbh = $schema->storage->_dbh)) {
$dbh->do("DROP TABLE event");
}
undef $schema;
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt _rdbms_sqlanywhere_common );
+
use strict;
use warnings;
use Test::More;
use Scope::Guard ();
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
+my @tdeps = qw( test_rdbms_sqlanywhere test_rdbms_sqlanywhere_odbc );
+plan skip_all => 'Test needs ' . (join ' OR ', map
+ { "[ @{[ DBIx::Class::Optional::Dependencies->req_missing_for( $_ ) ]} ]" }
+ @tdeps
+) unless scalar grep
+ { DBIx::Class::Optional::Dependencies->req_ok_for( $_ ) }
+ @tdeps
+;
+
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SQLANYWHERE_${_}" } qw/DSN USER PASS/};
my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_SQLANYWHERE_ODBC_${_}" } qw/DSN USER PASS/};
-plan skip_all => 'Test needs ' .
- (join ' and ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_dt'),
- (join ' or ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_sqlanywhere'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_sqlanywhere_odbc')))
- unless
- DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt') && (
- $dsn && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_sqlanywhere')
- or
- $dsn2 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_sqlanywhere_odbc'))
- or (not $dsn || $dsn2);
-
-if (not ($dsn || $dsn2)) {
- plan skip_all => <<'EOF';
-Set $ENV{DBICTEST_SQLANYWHERE_DSN} and/or $ENV{DBICTEST_SQLANYWHERE_ODBC_DSN}
-_USER and _PASS to run this test'.
-Warning: This test drops and creates a table called 'event'";
-EOF
-}
-
my @info = (
[ $dsn, $user, $pass ],
[ $dsn2, $user2, $pass2 ],
--- /dev/null
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt test_rdbms_sqlite );
+
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Warn;
+use Try::Tiny;
+use lib qw(t/lib);
+use DBICTest;
+
+# Test offline parser determination (formerly t/inflate/datetime_determine_parser.t)
+{
+ my $schema = DBICTest->init_schema(
+ no_deploy => 1, # Deploying would cause an early rebless
+ );
+
+ is(
+ ref $schema->storage, 'DBIx::Class::Storage::DBI',
+ 'Starting with generic storage'
+ );
+
+ # Calling date_time_parser should cause the storage to be reblessed,
+ # so that we can pick up datetime_parser_type from subclasses
+ my $parser = $schema->storage->datetime_parser();
+
+ is($parser, 'DateTime::Format::SQLite', 'Got expected storage-set datetime_parser');
+ isa_ok($schema->storage, 'DBIx::Class::Storage::DBI::SQLite', 'storage');
+
+ ok(! $schema->storage->connected, 'Not yet connected');
+}
+
+# so user's env doesn't screw us
+delete $ENV{DBIC_DT_SEARCH_OK};
+
+my $schema = DBICTest->init_schema();
+
+# inflation test
+my $event = $schema->resultset("Event")->find(1);
+
+isa_ok($event->starts_at, 'DateTime', 'DateTime returned');
+
+# klunky, but makes older Test::More installs happy
+my $starts = $event->starts_at;
+is("$starts", '2006-04-25T22:24:33', 'Correct date/time');
+
+my $dt_warn_re = qr/DateTime objects.+not supported properly/;
+
+my $row;
+
+{
+ local $ENV{DBIC_DT_SEARCH_OK} = 1;
+ local $SIG{__WARN__} = sub {
+ fail('Disabled warning still issued') if $_[0] =~ $dt_warn_re;
+ warn @_;
+ };
+ $row = $schema->resultset('Event')->search({ starts_at => $starts })->single
+}
+
+warnings_exist {
+ $row = $schema->resultset('Event')->search({ starts_at => $starts })->single
+} [$dt_warn_re],
+ 'using a DateTime object in ->search generates a warning';
+
+{
+ local $TODO = "This stuff won't work without a -dt operator of some sort"
+ unless eval { require DBIx::Class::SQLMaker::DateOps };
+
+ is(eval { $row->id }, 1, 'DT in search');
+
+ local $ENV{DBIC_DT_SEARCH_OK} = 1;
+
+ ok($row =
+ $schema->resultset('Event')->search({ starts_at => { '>=' => $starts } })
+ ->single);
+
+ is(eval { $row->id }, 1, 'DT in search with condition');
+}
+
+# create using DateTime
+my $created = $schema->resultset('Event')->create({
+ starts_at => DateTime->new(year=>2006, month=>6, day=>18),
+ created_on => DateTime->new(year=>2006, month=>6, day=>23)
+});
+my $created_start = $created->starts_at;
+
+isa_ok($created->starts_at, 'DateTime', 'DateTime returned');
+is("$created_start", '2006-06-18T00:00:00', 'Correct date/time');
+
+## timestamp field
+isa_ok($event->created_on, 'DateTime', 'DateTime returned');
+
+## varchar fields
+isa_ok($event->varchar_date, 'DateTime', 'DateTime returned');
+isa_ok($event->varchar_datetime, 'DateTime', 'DateTime returned');
+
+## skip inflation field
+isnt(ref($event->skip_inflation), 'DateTime', 'No DateTime returned for skip inflation column');
+
+# klunky, but makes older Test::More installs happy
+my $createo = $event->created_on;
+is("$createo", '2006-06-22T21:00:05', 'Correct date/time');
+
+my $created_cron = $created->created_on;
+
+isa_ok($created->created_on, 'DateTime', 'DateTime returned');
+is("$created_cron", '2006-06-23T00:00:00', 'Correct date/time');
+
+## varchar field using inflate_date => 1
+my $varchar_date = $event->varchar_date;
+is("$varchar_date", '2006-07-23T00:00:00', 'Correct date/time');
+
+## varchar field using inflate_datetime => 1
+my $varchar_datetime = $event->varchar_datetime;
+is("$varchar_datetime", '2006-05-22T19:05:07', 'Correct date/time');
+
+## skip inflation field
+my $skip_inflation = $event->skip_inflation;
+is ("$skip_inflation", '2006-04-21 18:04:06', 'Correct date/time');
+
+# extra accessor tests with update_or_insert
+{
+ my $new = $schema->resultset("Track")->new( {
+ trackid => 100,
+ cd => 1,
+ title => 'Insert or Update',
+ last_updated_on => '1973-07-19 12:01:02'
+ } );
+ $new->update_or_insert;
+ ok($new->in_storage, 'update_or_insert insert ok');
+
+ # test in update mode
+ $new->title('Insert or Update - updated');
+ $new->update_or_insert;
+ is( $schema->resultset("Track")->find(100)->title, 'Insert or Update - updated', 'update_or_insert update ok');
+
+ # test get_inflated_columns with objects
+ my $event = $schema->resultset('Event')->search->first;
+ my %edata = $event->get_inflated_columns;
+ is($edata{'id'}, $event->id, 'got id');
+ isa_ok($edata{'starts_at'}, 'DateTime', 'start_at is DateTime object');
+ isa_ok($edata{'created_on'}, 'DateTime', 'create_on DateTime object');
+ is($edata{'starts_at'}, $event->starts_at, 'got start date');
+ is($edata{'created_on'}, $event->created_on, 'got created date');
+
+ # get_inflated_columns w/relation and accessor alias
+ isa_ok($new->updated_date, 'DateTime', 'have inflated object via accessor');
+ my %tdata = $new->get_inflated_columns;
+ is($tdata{'trackid'}, 100, 'got id');
+ isa_ok($tdata{'cd'}, 'DBICTest::CD', 'cd is CD object');
+ is($tdata{'cd'}->id, 1, 'cd object is id 1');
+ is(
+ $tdata{'position'},
+ $schema->resultset ('Track')->search ({cd => 1})->count,
+ 'Ordered assigned proper position',
+ );
+ is($tdata{'title'}, 'Insert or Update - updated');
+ is($tdata{'last_updated_on'}, '1973-07-19T12:01:02');
+ isa_ok($tdata{'last_updated_on'}, 'DateTime', 'inflated accessored column');
+}
+
+# create and update with literals
+{
+ my $d = {
+ created_on => \ '2001-09-11',
+ starts_at => \[ '?' => '2001-10-26' ],
+ };
+
+ my $ev = $schema->resultset('Event')->create($d);
+
+ for my $col (qw(created_on starts_at)) {
+ ok (ref $ev->$col, "literal untouched in $col");
+ is_deeply( $ev->$col, $d->{$col});
+ is_deeply( $ev->get_inflated_column($col), $d->{$col});
+ is_deeply( $ev->get_column($col), $d->{$col});
+ }
+
+ $ev->discard_changes;
+
+ is_deeply(
+ { $ev->get_dirty_columns },
+ {}
+ );
+
+ for my $col (qw(created_on starts_at)) {
+ isa_ok ($ev->$col, "DateTime", "$col properly inflated on retrieve");
+ }
+
+ for my $meth (qw(set_inflated_columns set_columns)) {
+
+ $ev->$meth({%$d});
+
+ is_deeply(
+ { $ev->get_dirty_columns },
+ $d,
+ "Expected dirty cols after setting literals via $meth",
+ );
+
+ $ev->update;
+
+ for my $col (qw(created_on starts_at)) {
+ ok (ref $ev->$col, "literal untouched in $col updated via $meth");
+ is_deeply( $ev->$col, $d->{$col});
+ is_deeply( $ev->get_inflated_column($col), $d->{$col});
+ is_deeply( $ev->get_column($col), $d->{$col});
+ }
+ }
+}
+
+done_testing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt test_rdbms_ase );
+
use strict;
use warnings;
use Test::Exception;
use Scope::Guard ();
use Try::Tiny;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt')
-. ' and ' .
-DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_ase')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt')
- && DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_ase');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
-if (not ($dsn && $user)) {
- plan skip_all =>
- 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' .
- "\nWarning: This test drops and creates a table called 'track' and " .
- "'event_small_dt'";
-}
-
DBICTest::Schema->load_classes('EventSmallDT');
my @storage_types = (
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt icdt_mysql );
+
use strict;
use warnings;
use Test::More;
use Test::Exception;
use Test::Warn;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
use DBICTest::Schema;
use DBIx::Class::_Util 'sigwarn_silencer';
-plan skip_all => 'Inflation tests need ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_mysql')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_mysql');
-
{
DBICTest::Schema->load_classes('EventTZ');
local $SIG{__WARN__} = sigwarn_silencer( qr/extra \=\> .+? has been deprecated/ );
+use DBIx::Class::Optional::Dependencies -skip_all_without => qw( icdt icdt_pg );
+
use strict;
use warnings;
use Test::More;
use Test::Warn;
-use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_pg');
-
DBICTest::Schema->load_classes('EventTZPg');
+{
+ my $s = DBICTest::Schema->connect('dbi:Pg:whatever');
+
+ ok (!$s->storage->_dbh, 'definitely not connected');
+
+ # Check that datetime_parser returns correctly before we explicitly connect.
+ my $store = ref $s->storage;
+ is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
+
+ my $parser = $s->storage->datetime_parser;
+ is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+
+ ok (!$s->storage->_dbh, 'still not connected');
+}
+
my $schema = DBICTest->init_schema();
warnings_are {
+++ /dev/null
-use strict;
-use warnings;
-
-use Test::More;
-use Test::Warn;
-use Try::Tiny;
-use lib qw(t/lib);
-use DBICTest;
-
-# so user's env doesn't screw us
-delete $ENV{DBIC_DT_SEARCH_OK};
-
-my $schema = DBICTest->init_schema();
-
-plan skip_all => 'DT inflation tests need ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_sqlite')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_sqlite');
-
-# inflation test
-my $event = $schema->resultset("Event")->find(1);
-
-isa_ok($event->starts_at, 'DateTime', 'DateTime returned');
-
-# klunky, but makes older Test::More installs happy
-my $starts = $event->starts_at;
-is("$starts", '2006-04-25T22:24:33', 'Correct date/time');
-
-my $dt_warn_re = qr/DateTime objects.+not supported properly/;
-
-my $row;
-
-{
- local $ENV{DBIC_DT_SEARCH_OK} = 1;
- local $SIG{__WARN__} = sub {
- fail('Disabled warning still issued') if $_[0] =~ $dt_warn_re;
- warn @_;
- };
- $row = $schema->resultset('Event')->search({ starts_at => $starts })->single
-}
-
-warnings_exist {
- $row = $schema->resultset('Event')->search({ starts_at => $starts })->single
-} [$dt_warn_re],
- 'using a DateTime object in ->search generates a warning';
-
-{
- local $TODO = "This stuff won't work without a -dt operator of some sort"
- unless eval { require DBIx::Class::SQLMaker::DateOps };
-
- is(eval { $row->id }, 1, 'DT in search');
-
- local $ENV{DBIC_DT_SEARCH_OK} = 1;
-
- ok($row =
- $schema->resultset('Event')->search({ starts_at => { '>=' => $starts } })
- ->single);
-
- is(eval { $row->id }, 1, 'DT in search with condition');
-}
-
-# create using DateTime
-my $created = $schema->resultset('Event')->create({
- starts_at => DateTime->new(year=>2006, month=>6, day=>18),
- created_on => DateTime->new(year=>2006, month=>6, day=>23)
-});
-my $created_start = $created->starts_at;
-
-isa_ok($created->starts_at, 'DateTime', 'DateTime returned');
-is("$created_start", '2006-06-18T00:00:00', 'Correct date/time');
-
-## timestamp field
-isa_ok($event->created_on, 'DateTime', 'DateTime returned');
-
-## varchar fields
-isa_ok($event->varchar_date, 'DateTime', 'DateTime returned');
-isa_ok($event->varchar_datetime, 'DateTime', 'DateTime returned');
-
-## skip inflation field
-isnt(ref($event->skip_inflation), 'DateTime', 'No DateTime returned for skip inflation column');
-
-# klunky, but makes older Test::More installs happy
-my $createo = $event->created_on;
-is("$createo", '2006-06-22T21:00:05', 'Correct date/time');
-
-my $created_cron = $created->created_on;
-
-isa_ok($created->created_on, 'DateTime', 'DateTime returned');
-is("$created_cron", '2006-06-23T00:00:00', 'Correct date/time');
-
-## varchar field using inflate_date => 1
-my $varchar_date = $event->varchar_date;
-is("$varchar_date", '2006-07-23T00:00:00', 'Correct date/time');
-
-## varchar field using inflate_datetime => 1
-my $varchar_datetime = $event->varchar_datetime;
-is("$varchar_datetime", '2006-05-22T19:05:07', 'Correct date/time');
-
-## skip inflation field
-my $skip_inflation = $event->skip_inflation;
-is ("$skip_inflation", '2006-04-21 18:04:06', 'Correct date/time');
-
-# create and update with literals
-{
- my $d = {
- created_on => \ '2001-09-11',
- starts_at => \[ '?' => '2001-10-26' ],
- };
-
- my $ev = $schema->resultset('Event')->create($d);
-
- for my $col (qw(created_on starts_at)) {
- ok (ref $ev->$col, "literal untouched in $col");
- is_deeply( $ev->$col, $d->{$col});
- is_deeply( $ev->get_inflated_column($col), $d->{$col});
- is_deeply( $ev->get_column($col), $d->{$col});
- }
-
- $ev->discard_changes;
-
- is_deeply(
- { $ev->get_dirty_columns },
- {}
- );
-
- for my $col (qw(created_on starts_at)) {
- isa_ok ($ev->$col, "DateTime", "$col properly inflated on retrieve");
- }
-
- for my $meth (qw(set_inflated_columns set_columns)) {
-
- $ev->$meth({%$d});
-
- is_deeply(
- { $ev->get_dirty_columns },
- $d,
- "Expected dirty cols after setting literals via $meth",
- );
-
- $ev->update;
-
- for my $col (qw(created_on starts_at)) {
- ok (ref $ev->$col, "literal untouched in $col updated via $meth");
- is_deeply( $ev->$col, $d->{$col});
- is_deeply( $ev->get_inflated_column($col), $d->{$col});
- is_deeply( $ev->get_column($col), $d->{$col});
- }
- }
-}
-
-done_testing;
+++ /dev/null
-use strict;
-use warnings;
-
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt_sqlite')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt_sqlite');
-
-my $schema = DBICTest->init_schema(
- no_deploy => 1, # Deploying would cause an early rebless
-);
-
-is(
- ref $schema->storage, 'DBIx::Class::Storage::DBI',
- 'Starting with generic storage'
-);
-
-# Calling date_time_parser should cause the storage to be reblessed,
-# so that we can pick up datetime_parser_type from subclasses
-
-my $parser = $schema->storage->datetime_parser();
-
-is($parser, 'DateTime::Format::SQLite', 'Got expected storage-set datetime_parser');
-isa_ok($schema->storage, 'DBIx::Class::Storage::DBI::SQLite', 'storage');
-
-done_testing;
use Test::More;
-use DBIx::Class::_Util 'modver_gt_or_eq';
+use DBIx::Class::_Util 'modver_gt_or_eq_and_lt';
use base();
BEGIN {
plan skip_all => 'base.pm 2.20 (only present in perl 5.19.7) is known to break this test'
- if modver_gt_or_eq(base => '2.19_01') and ! modver_gt_or_eq(base => '2.21');
+ if modver_gt_or_eq_and_lt( 'base', '2.19_01', '2.21' );
}
use Test::Exception;
}
}
-plan (skip_all => "No suitable serializer found") unless $selected;
-
DBICTest::Schema::Serialized->inflate_column( 'serialized',
{ inflate => $selected->{inflater},
deflate => $selected->{deflater},
}
END {
+ # Make sure we run after any cleanup in other END blocks
+ push @{ B::end_av()->object_2svref }, sub {
assert_empty_weakregistry($weak_registry, 'quiet');
+ };
}
=head2 deploy_schema
}
END {
- assert_empty_weakregistry($weak_registry, 'quiet');
+ # Make sure we run after any cleanup in other END blocks
+ push @{ B::end_av()->object_2svref }, sub {
+ assert_empty_weakregistry($weak_registry, 'quiet');
+ };
}
1;
'CD_to_Producer',
'Dummy', # this is a real result class we remove in the hook below
),
- qw/SelfRefAlias TreeLike TwoKeyTreeLike Event EventTZ NoPrimaryKey/,
+ qw/SelfRefAlias TreeLike TwoKeyTreeLike Event NoPrimaryKey/,
qw/Collection CollectionObject TypedObject Owners BooksInLibrary/,
qw/ForceForeign Encoded/,
);
# need to operate on the instance for things to work
__PACKAGE__->result_source_instance->view_definition( sprintf (
- 'SELECT %s FROM cd WHERE year = \'2000\'',
+ "SELECT %s FROM cd WHERE year = '2000'",
join (', ', __PACKAGE__->columns),
));
}
}
-use Module::Runtime 'module_notional_filename';
-BEGIN {
- for my $mod (qw( SQL::Abstract::Test SQL::Abstract )) {
- if ( $INC{ module_notional_filename($mod) } ) {
- # FIXME this does not seem to work in BEGIN - why?!
- #require Carp;
- #$Carp::Internal{ (__PACKAGE__) }++;
- #Carp::croak( __PACKAGE__ . " must be loaded before $mod" );
-
- my ($fr, @frame) = 1;
- while (@frame = caller($fr++)) {
- last if $frame[1] !~ m|^t/lib/DBICTest|;
- }
-
- die __PACKAGE__ . " must be loaded before $mod (or modules using $mod) at $frame[1] line $frame[2]\n";
- }
- }
-}
-
use Config;
use Carp 'confess';
use Scalar::Util qw(blessed refaddr);
use DBICTest::Util 'stacktrace';
use constant {
CV_TRACING => DBIx::Class::Optional::Dependencies->req_ok_for ('test_leaks_heavy'),
- SKIP_SCALAR_REFS => ( $] > 5.017 ) ? 1 : 0,
};
use base 'Exporter';
for keys %$reg;
}
- # FIXME/INVESTIGATE - something fishy is going on with refs to plain
- # strings, perhaps something to do with the CoW work etc...
- return $target if SKIP_SCALAR_REFS and reftype($target) eq 'SCALAR';
-
if (! defined $weak_registry->{$refaddr}{weakref}) {
$weak_registry->{$refaddr} = {
stacktrace => stacktrace(1),
weakref => $target,
};
- weaken( $weak_registry->{$refaddr}{weakref} );
- $refs_traced++;
+
+ # on perl < 5.8.3 sometimes a weaken can throw (can't find RT)
+ # so guard against that unlikely event
+ local $@;
+ eval { weaken( $weak_registry->{$refaddr}{weakref} ); $refs_traced++ }
+ or delete $weak_registry->{$refaddr};
}
my $desc = refdesc $target;
elsif (CV_TRACING and $type eq 'CODE') {
$visited_cnt += visit_refs({ %$args, refs => [ map {
( !isweak($_) ) ? $_ : ()
- } scalar PadWalker::closed_over($r) ] }); # scalar due to RT#92269
+ } values %{ scalar PadWalker::closed_over($r) } ] }); # scalar due to RT#92269
}
1;
} or warn "Could not descend into @{[ refdesc $r ]}: $@\n";
my $refs_per_pkg;
- my $dummy_addresslist;
-
my $seen_refs = {};
visit_namespaces(
action => sub {
$refs_per_pkg->{$pkg} += visit_refs (
seen_refs => $seen_refs,
- # FIXME FIXME FIXME
- # This is so damn odd - if we feed a constsub {1} (or in fact almost
- # anything other than the actionsub below, any scalarref will show
- # up as a leak, trapped by... something...
- # Ideally we should be able to const this to sub{1} and just return
- # $seen_refs (in fact it is identical to the dummy list at the end of
- # a run here). Alas this doesn't seem to work, so punt for now...
- action => sub { ++$dummy_addresslist->{ hrefaddr $_[0] } },
+ action => sub { 1 },
refs => [ map { my $sym = $_;
# *{"$pkg$sym"}{CODE} won't simply work - MRO-cached CVs are invisible there
use Test::More;
use Test::Exception;
+use Math::BigInt;
use lib qw(t/lib);
use DBICTest;
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt');
-}
-
my $schema = DBICTest->init_schema();
my $artist_rs = $schema->resultset('Artist');
my $cd_rs = $schema->resultset('CD');
}
{
- my $formatter = DateTime::Format::Strptime->new(pattern => '%Y');
- my $dt = DateTime->new(year => 2006, month => 06, day => 06,
- formatter => $formatter );
+ my $dt = Math::BigInt->new(2006);
+
my $cd;
lives_ok {
$cd = $cd_rs->search({ year => $dt})->create
use warnings;
use Test::More;
+use Math::BigInt;
use lib qw(t/lib);
use DBICTest;
'extra columns returned by get_inflated_columns without inflatable columns',
);
-SKIP: {
- skip (
- "+select/get_inflated_columns tests need " . DBIx::Class::Optional::Dependencies->req_missing_for ('test_dt'),
- 1
- ) unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_dt');
-
- $schema->class('CD')->inflate_column( 'year',
- { inflate => sub { DateTime->new( year => shift ) },
- deflate => sub { shift->year } }
- );
+# Test object inflation
+$schema->class('CD')->inflate_column( 'year',
+ { inflate => sub { Math::BigInt->new( shift ) },
+ deflate => sub { shift() . '' } }
+);
- $basecols{year} = DateTime->new ( year => $basecols{year} );
+$basecols{year} = Math::BigInt->new( $basecols{year} );
- is_deeply (
- { $plus_rs->first->get_inflated_columns, %todo_rel_inflation_override },
- { %basecols, tr_cnt => $track_cnt },
- 'extra columns returned by get_inflated_columns',
- );
-}
+is_deeply (
+ { $plus_rs->first->get_inflated_columns, %todo_rel_inflation_override },
+ { %basecols, tr_cnt => $track_cnt },
+ 'extra columns returned by get_inflated_columns',
+);
done_testing;
my $deparser;
sub is_same_src { SKIP: {
+
+ skip "Skipping comparison of unicode-posioned source", 1
+ if DBIx::Class::_ENV_::STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE;
+
$deparser ||= B::Deparse->new;
local $Test::Builder::Level = $Test::Builder::Level + 1;
$expect =~ s/__NBC__/perlstring($DBIx::Class::ResultSource::RowParser::Util::null_branch_class)/ge;
- $expect = " { use strict; use warnings FATAL => 'all';\n$expect\n }";
+ $expect = " { use strict; use warnings FATAL => 'uninitialized';\n$expect\n }";
my @normalized = map {
my $cref = eval "sub { $_ }" or do {
--- /dev/null
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib 't/lib';
+use DBICTest;
+
+{
+ package DBICTest::Foo;
+ use base "DBIx::Class::Core";
+}
+
+throws_ok { DBICTest::Foo->new("urgh") } qr/must be a hashref/;
+
+done_testing;
use lib qw(t/lib);
use DBICTest ':DiffSQL';
-
-use Storable 'dclone';
+use DBIx::Class::_Util 'serialize';
my $schema = DBICTest->init_schema();
# A search() with prefetch seems to pollute an already joined resultset
# in a way that offsets future joins (adapted from a test case by Debolaz)
{
- my ($cd_rs, $attrs);
+ my ($cd_rs, $preimage);
# test a real-life case - rs is obtained by an implicit m2m join
$cd_rs = $schema->resultset ('Producer')->first->cds;
- $attrs = dclone( $cd_rs->{attrs} );
+ $preimage = serialize $cd_rs->{attrs};
$cd_rs->search ({})->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after a simple search');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after search with prefetch');
}, 'first prefetching search ok');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after another search with prefetch')
}, 'second prefetching search ok');
# test a regular rs with an empty seen_join injected - it should still work!
$cd_rs = $schema->resultset ('CD');
$cd_rs->{attrs}{seen_join} = {};
- $attrs = dclone( $cd_rs->{attrs} );
+ $preimage = serialize $cd_rs->{attrs};
$cd_rs->search ({})->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after a simple search');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after search with prefetch');
}, 'first prefetching search ok');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
+ is ( serialize $cd_rs->{attrs}, $preimage, 'Resultset attributes preserved after another search with prefetch')
}, 'second prefetching search ok');
}
use warnings;
use Test::More;
use Test::Warn;
+use Test::Exception;
use lib qw(t/lib);
use DBICTest ':DiffSQL';
is($num, 69, 'test overloaded object is "sane"');
is("$num", 69, 'test overloaded object is "sane"');
-for my $t (
+my @tests = (
{
where => { artistid => 1, charfield => undef },
cc_result => { artistid => 1, charfield => undef },
},
efcc_result => { x => { -ident => 'y' } },
},
+);
+
+# these die as of SQLA 1.80 - make sure we do not transform them
+# into something usable instead
+for my $lhs (undef, '', { -ident => 'foo' }, { -value => 'foo' } ) {
+ no warnings 'uninitialized';
+
+ for my $w (
+ ( map { { -or => $_ }, (ref $lhs ? () : { @$_ } ) }
+ [ $lhs => "foo" ],
+ [ $lhs => { "=" => "bozz" } ],
+ [ $lhs => { "=" => \"bozz" } ],
+ [ $lhs => { -max => \"bizz" } ],
+ ),
+
+ (ref $lhs) ? () : (
+ { -or => [ -and => { $lhs => "baz" }, bizz => "buzz" ] },
+ { -or => [ foo => "bar", { $lhs => "baz" }, bizz => "buzz" ] },
+ { foo => "bar", -or => { $lhs => "baz" } },
+ { foo => "bar", -or => { $lhs => \"baz" }, bizz => "buzz" },
+ ),
+
+ { foo => "bar", -and => [ $lhs => \"baz" ], bizz => "buzz" },
+ { foo => "bar", -or => [ $lhs => \"baz" ], bizz => "buzz" },
+
+ { -or => [ foo => "bar", [ $lhs => \"baz" ], bizz => "buzz" ] },
+ { -or => [ foo => "bar", $lhs => \"baz", bizz => "buzz" ] },
+ { -or => [ foo => "bar", $lhs => \["baz"], bizz => "buzz" ] },
+ { -or => [ $lhs => \"baz" ] },
+ { -or => [ $lhs => \["baz"] ] },
+
+ ) {
+ push @tests, {
+ where => $w,
+ throw => qr/
+ \QSupplying an empty left hand side argument is not supported in \E(?:array|hash)-pairs
+ |
+ \QIllegal use of top-level '-\E(?:value|ident)'
+ /x,
+ }
+ }
+}
+
+# these are deprecated as of SQLA 1.79 - make sure we do not transform
+# them without losing the warning
+for my $lhs (undef, '') {
+ for my $rhs ( \"baz", \[ "baz" ] ) {
+ no warnings 'uninitialized';
+
+ my $expected_warning = qr/\QHash-pairs consisting of an empty string with a literal are deprecated/;
+
+ push @tests, {
+ where => { $lhs => $rhs },
+ cc_result => { -and => [ $rhs ] },
+ efcc_result => {},
+ sql => 'WHERE baz',
+ warn => $expected_warning,
+ };
+
+ for my $w (
+ { foo => "bar", -and => { $lhs => $rhs }, bizz => "buzz" },
+ { foo => "bar", $lhs => $rhs, bizz => "buzz" },
+ ) {
+ push @tests, {
+ where => $w,
+ cc_result => {
+ -and => [ $rhs ],
+ bizz => "buzz",
+ foo => "bar",
+ },
+ efcc_result => {
+ foo => "bar",
+ bizz => "buzz",
+ },
+ sql => 'WHERE baz AND bizz = ? AND foo = ?',
+ warn => $expected_warning,
+ };
+ }
+ }
+}
+
+# lots of extra silly tests with a false column
+for my $eq (
+ \"= baz",
+ \[ "= baz" ],
+ { '=' => { -ident => 'baz' } },
+ { '=' => \'baz' },
) {
+ for my $where (
+ { foo => "bar", -and => [ 0 => $eq ], bizz => "buzz" },
+ { foo => "bar", -or => [ 0 => $eq ], bizz => "buzz" },
+ { foo => "bar", -and => { 0 => $eq }, bizz => "buzz" },
+ { foo => "bar", -or => { 0 => $eq }, bizz => "buzz" },
+ { foo => "bar", 0 => $eq, bizz => "buzz" },
+ ) {
+ push @tests, {
+ where => $where,
+ cc_result => {
+ 0 => $eq,
+ foo => 'bar',
+ bizz => 'buzz',
+ },
+ efcc_result => {
+ foo => 'bar',
+ bizz => 'buzz',
+ ( ref $eq eq 'HASH' ? ( 0 => $eq->{'='} ) : () ),
+ },
+ sql => 'WHERE 0 = baz AND bizz = ? AND foo = ?',
+ };
+ push @tests, {
+ where => { -or => $where },
+ cc_result => { -or => [
+ "0" => $eq,
+ bizz => 'buzz',
+ foo => 'bar',
+ ]},
+ efcc_result => {},
+ sql => 'WHERE 0 = baz OR bizz = ? OR foo = ?',
+ }
+
+ }
+
+ for my $where (
+ [ foo => "bar", -and => [ 0 => $eq ], bizz => "buzz" ],
+ [ foo => "bar", -or => [ 0 => $eq ], bizz => "buzz" ],
+ [ foo => "bar", -and => { 0 => $eq }, bizz => "buzz" ],
+ [ foo => "bar", -or => { 0 => $eq }, bizz => "buzz" ],
+ [ foo => "bar", 0 => $eq, bizz => "buzz" ],
+ ) {
+ push @tests, {
+ where => { -or => $where },
+ cc_result => { -or => [
+ "0" => $eq,
+ bizz => 'buzz',
+ foo => 'bar',
+ ]},
+ efcc_result => {},
+ sql => 'WHERE foo = ? OR 0 = baz OR bizz = ?',
+ collapsed_sql => 'WHERE 0 = baz OR bizz = ? OR foo = ?',
+ }
+ }
+
+ for my $where (
+ [ {foo => "bar"}, -and => { 0 => "baz" }, bizz => "buzz" ],
+ [ -or => [ foo => "bar", -or => { 0 => "baz" }, bizz => "buzz" ] ],
+ ) {
+ push @tests, {
+ where => { -or => $where },
+ cc_result => { -or => [
+ "0" => 'baz',
+ bizz => 'buzz',
+ foo => 'bar',
+ ]},
+ efcc_result => {},
+ sql => 'WHERE foo = ? OR 0 = ? OR bizz = ?',
+ collapsed_sql => 'WHERE 0 = ? OR bizz = ? OR foo = ?',
+ };
+ }
+
+};
+
+for my $t (@tests) {
for my $w (
$t->{where},
$t->{where}, # do it twice, make sure we didn't destory the condition
[ -and => $t->{where} ],
[ -AND => $t->{where} ],
{ -OR => [ -AND => $t->{where} ] },
- ( keys %{$t->{where}} <= 1 ? [ %{$t->{where}} ] : () ),
+ ( ( keys %{$t->{where}} == 1 and length( (keys %{$t->{where}})[0] ) )
+ ? [ %{$t->{where}} ]
+ : ()
+ ),
( (keys %{$t->{where}} == 1 and $t->{where}{-or})
? ( ref $t->{where}{-or} eq 'HASH'
? [ map { $_ => $t->{where}{-or}{$_} } sort keys %{$t->{where}{-or}} ]
: ()
),
) {
+ die unless Test::Builder->new->is_passing;
+
my $name = do { local ($Data::Dumper::Indent, $Data::Dumper::Terse, $Data::Dumper::Sortkeys) = (0, 1, 1); Dumper $w };
- my ($generated_sql) = $sm->where($w);
+ my ($collapsed_cond, $collapsed_cond_as_sql);
- is_same_sql ( $generated_sql, $t->{sql}, "Expected SQL from $name" )
- if exists $t->{sql};
+ if ($t->{throw}) {
+ throws_ok {
+ $collapsed_cond = $schema->storage->_collapse_cond($w);
+ ($collapsed_cond_as_sql) = $sm->where($collapsed_cond);
+ } $t->{throw}, "Exception on attempted collapse/render of $name"
+ and
+ next;
+ }
- is_same_sql(
- ($sm->where($t->{cc_result}))[0],
- ( $t->{collapsed_sql} || $t->{sql} || $generated_sql ),
- "Collapse did not alter *the semantics* of the final SQL based on $name",
- );
-
- my $collapsed_cond = $schema->storage->_collapse_cond($w);
+ warnings_exist {
+ $collapsed_cond = $schema->storage->_collapse_cond($w);
+ ($collapsed_cond_as_sql) = $sm->where($collapsed_cond);
+ } $t->{warn} || [], "Expected warning when collapsing/rendering $name";
is_deeply(
$collapsed_cond,
"Expected collapsed condition produced on $name",
);
+ my ($original_sql) = do {
+ local $SIG{__WARN__} = sub {};
+ $sm->where($w);
+ };
+
+ is_same_sql ( $original_sql, $t->{sql}, "Expected original SQL from $name" )
+ if exists $t->{sql};
+
+ is_same_sql(
+ $collapsed_cond_as_sql,
+ ( $t->{collapsed_sql} || $t->{sql} || $original_sql ),
+ "Collapse did not alter *the semantics* of the final SQL based on $name",
+ );
+
is_deeply(
- $schema->storage->_extract_fixed_condition_columns($w),
+ $schema->storage->_extract_fixed_condition_columns($collapsed_cond),
$t->{efcc_result},
"Expected fixed_condition produced on $name",
);
is_deeply(
- $schema->storage->_extract_fixed_condition_columns($w, 'consider_nulls'),
+ $schema->storage->_extract_fixed_condition_columns($collapsed_cond, 'consider_nulls'),
$t->{efcc_n_result},
"Expected fixed_condition including NULLs produced on $name",
) if $t->{efcc_n_result};
- die unless Test::Builder->new->is_passing;
+ is_deeply(
+ $collapsed_cond,
+ $t->{cc_result},
+ "Collapsed condition result unaltered by fixed condition extractor",
+ );
}
}
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'id_shortener';
+
use strict;
use warnings;
use Test::More;
-use lib qw(t/lib);
-
-use DBIx::Class::Optional::Dependencies;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('id_shortener')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('id_shortener');
+use lib qw(t/lib);
use DBICTest::Schema::Artist;
BEGIN {
DBICTest::Schema::Artist->add_column('parentid');
use Test::More;
use Test::Exception;
-use Storable 'dclone';
use lib qw(t/lib);
use DBICTest ':DiffSQL';
+use DBIx::Class::_Util 'deep_clone';
my $schema = DBICTest->init_schema;
my $native_limit_dialect = $schema->storage->sql_maker->{limit_dialect};
@where_bind,
@group_bind,
@having_bind,
- @{ dclone \@order_bind }, # without this is_deeply throws a fit
+ @{ deep_clone \@order_bind }, # without this is_deeply throws a fit
],
],
limit_offset_prefetch => [
@where_bind,
@group_bind,
@having_bind,
- @{ dclone \@order_bind }, # without this is_deeply throws a fit
+ @{ deep_clone \@order_bind }, # without this is_deeply throws a fit
],
],
limit_offset_prefetch => [
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'id_shortener';
+
use strict;
use warnings;
-use Test::More;
-
-BEGIN {
- require DBIx::Class::Optional::Dependencies;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('id_shortener')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('id_shortener');
-}
+use Test::More;
use Test::Exception;
use Data::Dumper::Concise;
use lib qw(t/lib);
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'id_shortener';
+
use strict;
use warnings;
use Test::More;
-BEGIN {
- require DBIx::Class::Optional::Dependencies;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('id_shortener')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('id_shortener');
-}
-
use lib qw(t/lib);
use DBICTest ':DiffSQL';
use DBIx::Class::SQLMaker::OracleJoins;
is($schema->resultset("Artist")->search(), 3, "Three artists returned");
} 'Custom cursor autoloaded';
+# test component_class reentrancy
SKIP: {
- eval { require Class::Unload }
- or skip 'component_class reentrancy test requires Class::Unload', 1;
+ DBIx::Class::Optional::Dependencies->skip_without( 'Class::Unload>=0.07' );
Class::Unload->unload('DBICTest::Cursor');
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_prettydebug';
+
use strict;
use warnings;
+
use lib qw(t/lib);
use DBICTest;
use Test::More;
-BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_prettydebug')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_prettydebug');
-}
-
BEGIN { delete @ENV{qw(DBIC_TRACE_PROFILE)} }
{
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'deploy';
+
use strict;
use warnings;
use lib qw(t/lib);
use DBICTest;
-BEGIN {
- require DBIx::Class;
- plan skip_all =>
- 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
-}
-
local $ENV{DBI_DSN};
# this is how maint/gen_schema did it (connect() to force a storage
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_replicated';
+
use strict;
use warnings;
use Test::More;
-
+use DBIx::Class::_Util 'modver_gt_or_eq_and_lt';
use lib qw(t/lib);
use DBICTest;
BEGIN {
- require DBIx::Class;
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_replicated')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_replicated');
-
- if (DBICTest::RunMode->is_smoker) {
- my $mver = Moose->VERSION;
- plan skip_all => "A trial version $mver of Moose detected known to break replication - skipping test known to fail"
- if ($mver >= 1.99 and $mver <= 1.9902);
- }
-
+ plan skip_all => "A trial version of Moose detected known to break replication - skipping test known to fail" if (
+ DBICTest::RunMode->is_smoker
+ and
+ modver_gt_or_eq_and_lt( 'Moose', '1.99', '1.9903' )
+ )
}
use Test::Moose;
+my ($inc_before, $inc_after);
+BEGIN {
+ $inc_before = [ keys %INC ];
+ require DBIx::Class::Optional::Dependencies;
+ $inc_after = [ keys %INC ];
+}
+
use strict;
use warnings;
no warnings qw/once/;
use Test::More;
use Test::Exception;
-use lib qw(t/lib);
-use Scalar::Util; # load before we break require()
-use Carp (); # Carp is not used in the test, but we want to have it loaded for proper %INC comparison
-
-# a dummy test which lazy-loads more modules (so we can compare INC below)
-is_deeply([], []);
-
-# record contents of %INC - makes sure there are no extra deps slipping into
-# Opt::Dep.
-my $inc_before = [ keys %INC ];
-ok ( (! grep { $_ =~ m|DBIx/Class| } @$inc_before ), 'Nothing DBIC related is yet loaded');
-
-# DBIx::Class::Optional::Dependencies queries $ENV at compile time
-# to build the optional requirements
-BEGIN {
- $ENV{DBICTEST_PG_DSN} = '1';
- delete $ENV{DBICTEST_ORA_DSN};
-}
-use_ok 'DBIx::Class::Optional::Dependencies';
+# load before we break require()
+use Scalar::Util();
+use MRO::Compat();
+use Carp 'confess';
+use List::Util 'shuffle';
-my $inc_after = [ keys %INC ];
+ok ( (! grep { $_ =~ m|DBIx/Class| } @$inc_before ), 'Nothing DBIC related was loaded before inc-test')
+ unless $ENV{PERL5OPT}; # a defined PERL5OPT may inject extra deps crashing this test
is_deeply (
[ sort @$inc_after],
- [ sort (@$inc_before, 'DBIx/Class/Optional/Dependencies.pm') ],
+ [ sort (@$inc_before, qw( DBIx/Class/Optional/Dependencies.pm if.pm )) ],
'Nothing loaded other than DBIx::Class::OptDeps',
-);
+) unless $ENV{RELEASE_TESTING};
+
+# check the project-local groups for sanity
+lives_ok {
+ DBIx::Class::Optional::Dependencies->req_group_list
+} "The entire optdep list is well formed";
-my $sqlt_dep = DBIx::Class::Optional::Dependencies->req_list_for ('deploy');
is_deeply (
- [ keys %$sqlt_dep ],
+ [ keys %{ DBIx::Class::Optional::Dependencies->req_list_for ('deploy') } ],
[ 'SQL::Translator' ],
'Correct deploy() dependency list',
);
-# make module loading impossible, regardless of actual libpath contents
+# scope to break require()
{
- local @INC = (sub { die('Optional Dep Test') } );
- ok (
- ! DBIx::Class::Optional::Dependencies->req_ok_for ('deploy'),
- 'deploy() deps missing',
+# make module loading impossible, regardless of actual libpath contents
+ local @INC = (sub { confess('Optional Dep Test') } );
+
+# basic test using the deploy target
+ for ('deploy', ['deploy']) {
+
+ # explicitly blow up cache
+ %DBIx::Class::Optional::Dependencies::req_unavailability_cache = ();
+
+ ok (
+ ! DBIx::Class::Optional::Dependencies->req_ok_for ($_),
+ 'deploy() deps missing',
+ );
+
+ like (
+ DBIx::Class::Optional::Dependencies->modreq_missing_for ($_),
+ qr/
+ \A
+ " SQL::Translator \~ \>\= [\d\.]+ "
+ \z
+ /x,
+ 'expected modreq missing string contents',
+ );
+
+ like (
+ DBIx::Class::Optional::Dependencies->req_missing_for ($_),
+ qr/
+ \A
+ " SQL::Translator \~ \>\= [\d\.]+ "
+ \Q (see DBIx::Class::Optional::Dependencies documentation for details)\E
+ \z
+ /x,
+ 'expected missing string contents',
+ );
+
+ like (
+ DBIx::Class::Optional::Dependencies->modreq_errorlist_for ($_)->{'SQL::Translator'},
+ qr/Optional Dep Test/,
+ 'custom exception found in errorlist',
+ );
+
+ #make it so module appears loaded
+ local $INC{'SQL/Translator.pm'} = 1;
+ local $SQL::Translator::VERSION = 999;
+
+ ok (
+ ! DBIx::Class::Optional::Dependencies->req_ok_for ($_),
+ 'deploy() deps missing cached properly from previous run',
+ );
+
+ # blow cache again
+ %DBIx::Class::Optional::Dependencies::req_unavailability_cache = ();
+
+ ok (
+ DBIx::Class::Optional::Dependencies->req_ok_for ($_),
+ 'deploy() deps present',
+ );
+
+ is (
+ DBIx::Class::Optional::Dependencies->req_missing_for ($_),
+ '',
+ 'expected null missing string',
+ );
+
+ is_deeply (
+ # use the deprecated method name
+ DBIx::Class::Optional::Dependencies->req_errorlist_for ($_),
+ undef,
+ 'expected empty errorlist',
+ );
+ }
+
+# test single-db text
+ local $ENV{DBICTEST_MYSQL_DSN};
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_mysql'),
+ undef,
+ 'unknown optional dependencies list for testing MySQL without ENV var',
);
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->modreq_list_for('test_rdbms_mysql'),
+ { 'DBD::mysql' => 0 },
+ 'correct optional module dependencies list for testing MySQL without ENV var',
+ );
+
+ local $ENV{DBICTEST_MYSQL_DSN};
+ local $ENV{DBICTEST_PG_DSN};
- like (
- DBIx::Class::Optional::Dependencies->req_missing_for ('deploy'),
- qr/^SQL::Translator \>\= \d/,
- 'expected missing string contents',
+# regular
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->modreq_list_for('test_rdbms_pg'),
+ { 'DBD::Pg' => '2.009002' },
+ 'optional dependencies list for testing Postgres without envvar',
);
- like (
- DBIx::Class::Optional::Dependencies->req_errorlist_for ('deploy')->{'SQL::Translator'},
- qr/Optional Dep Test/,
- 'custom exception found in errorlist',
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_pg'),
+ undef,
+ 'optional dependencies list for testing Postgres without envvar',
);
-}
-#make it so module appears loaded
-$INC{'SQL/Translator.pm'} = 1;
-$SQL::Translator::VERSION = 999;
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_list_for('rdbms_pg'),
+ { 'DBD::Pg' => '0', },
+ 'optional dependencies list for using Postgres matches',
+ );
-ok (
- ! DBIx::Class::Optional::Dependencies->req_ok_for ('deploy'),
- 'deploy() deps missing cached properly',
-);
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_missing_for('rdbms_pg'),
+ 'DBD::Pg (see DBIx::Class::Optional::Dependencies documentation for details)',
+ 'optional dependencies missing list for using Postgres matches',
+ );
-#reset cache
-%DBIx::Class::Optional::Dependencies::req_availability_cache = ();
+# test combination of different requirements on same module (pg's are relatively stable)
+ is_deeply (
+ DBIx::Class::Optional::Dependencies->req_list_for([shuffle qw( rdbms_pg test_rdbms_pg )]),
+ { 'DBD::Pg' => '0' },
+ 'optional module dependencies list for testing Postgres matches without envvar',
+ );
+ is(
+ DBIx::Class::Optional::Dependencies->req_missing_for([qw( rdbms_pg test_rdbms_pg )]),
+ '"DBD::Pg~>=2.009002" as well as the following group(s) of environment variables: DBICTEST_PG_DSN/..._USER/..._PASS',
+ 'optional dependencies for testing Postgres without envvar'
+ );
-ok (
- DBIx::Class::Optional::Dependencies->req_ok_for ('deploy'),
- 'deploy() deps present',
-);
+ is(
+ DBIx::Class::Optional::Dependencies->req_missing_for([shuffle qw( test_rdbms_mysql test_rdbms_pg )]),
+ 'DBD::mysql "DBD::Pg~>=2.009002" as well as the following group(s) of environment variables: DBICTEST_MYSQL_DSN/..._USER/..._PASS and DBICTEST_PG_DSN/..._USER/..._PASS',
+ 'optional dependencies for testing Postgres+MySQL without envvars'
+ );
-is (
- DBIx::Class::Optional::Dependencies->req_missing_for ('deploy'),
- '',
- 'expected null missing string',
-);
+ $ENV{DBICTEST_PG_DSN} = 'boo';
+ is_deeply (
+ DBIx::Class::Optional::Dependencies->modreq_list_for([shuffle qw( rdbms_pg test_rdbms_pg )]),
+ { 'DBD::Pg' => '2.009002' },
+ 'optional module dependencies list for testing Postgres matches with envvar',
+ );
-is_deeply (
- DBIx::Class::Optional::Dependencies->req_errorlist_for ('deploy'),
- {},
- 'expected empty errorlist',
-);
+ is(
+ DBIx::Class::Optional::Dependencies->req_missing_for([shuffle qw( rdbms_pg test_rdbms_pg )]),
+ '"DBD::Pg~>=2.009002"',
+ 'optional dependencies error text for testing Postgres matches with evvar',
+ );
+
+# ICDT augmentation
+ my $mysql_icdt = [shuffle qw( test_rdbms_mysql icdt )];
+
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->modreq_list_for($mysql_icdt),
+ {
+ 'DateTime' => '0.55',
+ 'DBD::mysql' => 0,
+ 'DateTime::Format::MySQL' => 0,
+ },
+ 'optional module dependencies list for testing ICDT MySQL without envvar',
+ );
+
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_list_for($mysql_icdt),
+ {
+ 'DateTime' => '0.55',
+ },
+ 'optional dependencies list for testing ICDT MySQL without envvar',
+ );
+
+ is(
+ DBIx::Class::Optional::Dependencies->req_missing_for($mysql_icdt),
+ '"DateTime~>=0.55" DateTime::Format::MySQL DBD::mysql as well as the following group(s) of environment variables: DBICTEST_MYSQL_DSN/..._USER/..._PASS',
+ 'missing optional dependencies for testing ICDT MySQL without envvars'
+ );
+
+# test multi-level include with a variable and mandatory part converging on same included dep
+ local $ENV{DBICTEST_MSACCESS_ODBC_DSN};
+ local $ENV{DBICTEST_MSSQL_ODBC_DSN} = 'foo';
+ my $msaccess_mssql_icdt = [ shuffle qw( test_rdbms_msaccess_odbc test_rdbms_mssql_odbc icdt ) ];
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_missing_for($msaccess_mssql_icdt),
+ 'Data::GUID "DateTime~>=0.55" "DateTime::Format::Strptime~>=1.2" DBD::ODBC as well as the following group(s) of environment variables: DBICTEST_MSACCESS_ODBC_DSN/..._USER/..._PASS',
+ 'Correct req_missing_for on multi-level converging include',
+ );
+
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->modreq_missing_for($msaccess_mssql_icdt),
+ 'Data::GUID "DateTime~>=0.55" "DateTime::Format::Strptime~>=1.2" DBD::ODBC',
+ 'Correct modreq_missing_for on multi-level converging include',
+ );
+
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->req_list_for($msaccess_mssql_icdt),
+ {
+ 'DBD::ODBC' => 0,
+ 'DateTime' => '0.55',
+ 'DateTime::Format::Strptime' => '1.2',
+ },
+ 'Correct req_list_for on multi-level converging include',
+ );
+
+ is_deeply(
+ DBIx::Class::Optional::Dependencies->modreq_list_for($msaccess_mssql_icdt),
+ {
+ 'DBD::ODBC' => 0,
+ 'Data::GUID' => 0,
+ 'DateTime' => '0.55',
+ 'DateTime::Format::Strptime' => '1.2',
+ },
+ 'Correct modreq_list_for on multi-level converging include',
+ );
+
+}
# test multiple times to find autovivification bugs
-for (1..2) {
+for my $meth (qw(req_list_for modreq_list_for)) {
throws_ok {
- DBIx::Class::Optional::Dependencies->req_list_for();
+ DBIx::Class::Optional::Dependencies->$meth();
} qr/\Qreq_list_for() expects a requirement group name/,
- "req_list_for without groupname throws exception on run $_";
+ "$meth without groupname throws exception";
throws_ok {
- DBIx::Class::Optional::Dependencies->req_list_for('');
- } qr/\Qreq_list_for() expects a requirement group name/,
- "req_list_for with empty groupname throws exception on run $_";
+ DBIx::Class::Optional::Dependencies->$meth('');
+ } qr/\Q$meth() expects a requirement group name/,
+ "$meth with empty groupname throws exception";
throws_ok {
- DBIx::Class::Optional::Dependencies->req_list_for('invalid_groupname');
- } qr/Requirement group 'invalid_groupname' does not exist/,
- "req_list_for with invalid groupname throws exception on run $_";
+ DBIx::Class::Optional::Dependencies->$meth('invalid_groupname');
+ } qr/Requirement group 'invalid_groupname' is not defined/,
+ "$meth with invalid groupname throws exception";
}
-is_deeply(
- DBIx::Class::Optional::Dependencies->req_list_for('rdbms_pg'),
- {
- 'DBD::Pg' => '0',
- }, 'optional dependencies for deploying to Postgres ok');
-
-is_deeply(
- DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_pg'),
- {
- 'DBD::Pg' => '2.009002',
- }, 'optional dependencies for testing Postgres with ENV var ok');
-
-is_deeply(
- DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_oracle'),
- {}, 'optional dependencies for testing Oracle without ENV var ok');
-
done_testing;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_pod';
+
use warnings;
use strict;
use lib qw(t/lib);
use DBICTest;
-require DBIx::Class;
-unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_pod') ) {
- my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_pod');
- $ENV{RELEASE_TESTING}
- ? die ("Failed to load release-testing module requirements: $missing")
- : plan skip_all => "Test needs: $missing"
-}
-
# this has already been required but leave it here for CPANTS static analysis
require Test::Pod;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_podcoverage';
+
use warnings;
use strict;
use DBICTest;
use namespace::clean;
-require DBIx::Class;
-unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_podcoverage') ) {
- my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_podcoverage');
- $ENV{RELEASE_TESTING}
- ? die ("Failed to load release-testing module requirements: $missing")
- : plan skip_all => "Test needs: $missing"
-}
-
# this has already been required but leave it here for CPANTS static analysis
require Test::Pod::Coverage;
mk_classaccessor
/]
},
+ 'DBIx::Class::Optional::Dependencies' => {
+ ignore => [qw/
+ croak
+ /]
+ },
'DBIx::Class::Carp' => {
ignore => [qw/
unimport
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_strictures';
+
use warnings;
use strict;
use Test::More;
+use File::Find;
use lib 't/lib';
use DBICTest;
-unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_strictures') ) {
- my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_strictures');
- $ENV{RELEASE_TESTING}
- ? die ("Failed to load release-testing module requirements: $missing")
- : plan skip_all => "Test needs: $missing"
-}
-
-use File::Find;
-
# The rationale is - if we can load all our optdeps
# that are related to lib/ - then we should be able to run
# perl -c checks (via syntax_ok), and all should just work
maint/Makefile.PL.inc/.+ # all the maint inc snippets are auto-strictured
|
t/lib/DBICTest/Util/OverrideRequire.pm # no stictures by design (load order sensitive)
+ |
+ lib/DBIx/Class/Optional/Dependencies.pm # no stictures by design (load spee sensitive)
)$}x;
my $f = $_;
+use DBIx::Class::Optional::Dependencies -skip_all_without => 'test_whitespace';
+
use warnings;
use strict;
use lib 't/lib';
use DBICTest ':GlobalLock';
-require DBIx::Class;
-unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_whitespace') ) {
- my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_whitespace');
- $ENV{RELEASE_TESTING}
- ? die ("Failed to load release-testing module requirements: $missing")
- : plan skip_all => "Test needs: $missing"
-}
-
# FIXME - temporary workaround for RT#82032, RT#82033
# also add all scripts (no extension) and some extra extensions
# we want to check