on_success: change
on_failure: always
+ # FIXME - This stuff is not yet available for free OSS accounts, sadpanda
+ # First paragrah on http://about.travis-ci.org/docs/user/caching/
+ #cache:
+ # apt: true
+ # directories:
+ # - /var/cache/apt/archives
+
language: perl
perl:
- CLEANTEST=true
matrix:
+ fast_finish: true
include:
# this particular perl is quite widespread
- perl: 5.8.8_thr_mb
###
# some permutations of tracing and envvar poisoning
- - perl: 5.18.1_thr_mb
+ - perl: 5.16.2_thr_mb
env:
- CLEANTEST=false
- POISON_ENV=true
- DBIC_TRACE=1
- DBIC_MULTICREATE_DEBUG=0
- BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.18.1
+ - BREWVER=5.16.2
- perl: 5.18
env:
- POISON_ENV=true
- DBIC_TRACE_PROFILE=console
- - perl: 5.18
+ - perl: 5.8
env:
- CLEANTEST=true
- POISON_ENV=true
- POISON_ENV=true
- DBIC_TRACE=1
- DBIC_TRACE_PROFILE=console_monochrome
- - DBIC_MULTICREATE_DEBUG=0
###
# Start of the allow_failures block
- # recentish stable with blead CPAN
- - perl: devcpan_5.18.1_thr_mb
+ # old threaded with blead CPAN
+ - perl: devcpan_5.8.7_thr
env:
- - CLEANTEST=false
+ - CLEANTEST=true
+ - BREWOPTS="-Duseithreads"
+ - BREWVER=5.8.7
+ - DEVREL_DEPS=true
+
+ # 5.10.0 threaded with blead CPAN
+ - perl: devcpan_5.10.0_thr_mb
+ env:
+ - CLEANTEST=true
- BREWOPTS="-Duseithreads -Dusemorebits"
- - BREWVER=5.18.1
+ - BREWVER=5.10.0
- DEVREL_DEPS=true
- # bleadperl with stock CPAN
- - perl: bleadperl_thr_mb
+ # 5.12.2 with blead CPAN
+ - perl: devcpan_5.12.2_thr
+ env:
+ - CLEANTEST=true
+ - BREWOPTS="-Duseithreads"
+ - BREWVER=5.12.2
+ - DEVREL_DEPS=true
+
+ # recentish threaded stable with blead CPAN
+ - perl: devcpan_5.18.2_thr_mb
env:
- CLEANTEST=false
- BREWOPTS="-Duseithreads -Dusemorebits"
+ - BREWVER=5.18.2
+ - DEVREL_DEPS=true
+
+ # bleadperl with stock CPAN, full depchain test
+ - perl: bleadperl
+ env:
+ - CLEANTEST=true
- BREWVER=blead
# bleadperl with blead CPAN
# which ones of the above can fail
allow_failures:
- # Fails tests because of https://github.com/Perl-Toolchain-Gang/CPAN-Meta/issues/25
- # Can't be simply masked due to https://rt.cpan.org/Ticket/Display.html?id=88903
- - perl: 5.10.0_thr_dbg
-
# these run with various dev snapshots - allowed to fail
- - perl: devcpan_5.18.1_thr_mb
- - perl: bleadperl_thr_mb
+ - perl: devcpan_5.8.7_thr
+ - perl: devcpan_5.10.0_thr_mb
+ - perl: devcpan_5.12.2_thr
+ - perl: devcpan_5.18.2_thr_mb
+ - perl: bleadperl
- perl: devcpan_bleadperl_thr_mb
Revision history for DBIx::Class
- 0.08901-TRIAL (EXPERIMENTAL BETA RELEASE)
++<unreleased DQ stuff, last was 0.08901-TRIAL>
+ * Start of experimental Data::Query-based release cycle
+ - Any and all newly introduced syntax features may very well change
+ or disappear altogether before the 0.09000 release
+
- * New Features / Changes
++<unreleased mainline>
+ * Fixes
+ - Fix on_connect_* not always firing in some cases - a race condition
+ existed between storage accessor setters and the determine_driver
+ routines, triggering a connection before the set-cycle is finished
+
+ 0.08270 2014-01-30 21:54 (PST)
+ * Fixes
+ - Fix 0.08260 regression in DBD::SQLite bound int handling. Inserted
+ data was not affected, but any function <=> integer comparison would
+ have failed (originally fixed way back in 0e773352)
+ - Fix failure to load DateTime formatter when connecting to Firebird
+ over ODBC
+
+ * Misc
+ - All drivers based on ::Storage::DBI::Firebird::Common now return the
+ same sqlt_type value (affects ::DBI::Interbase, ::DBI::Firebird and
+ ::DBI::ODBC::Firebird)
+
+ 0.08260 2014-01-28 18:52 (UTC)
+ * New Features
- A new zero-to-DBIC style manual: DBIx::Class::Manual::QuickStart
+ * Notable Changes and Deprecations
+ - Explicitly deprecate combination of distinct and selecting a
+ non-column via $rs->get_column()
+
* Fixes
- More robust handling of circular relationship declarations by loading
foreign classes less frequently (should resolve issues like
http://lists.scsys.co.uk/pipermail/dbix-class/2013-June/011374.html)
- - Fix multiple edge cases steming from interaction of a non-selecting
+ Note that none of this is a manifestations of a DBIC bug, but rather
+ unexpected (but correct) behavior of load-order-dependent (hence
+ logically broken) Resultclass hierarchies. In order to deal with this
+ DBIC is scaling back a large number of sanity checks, which are to be
+ reintroduce pending a better framework for source registration
+ - Fix multiple edge cases of complex prefetch combining incorrectly
+ with correlated subquery selections
+ - Fix multiple edge cases stemming from interaction of a non-selecting
order_by specification and distinct and/or complex prefetch
+ - Fix unbound growth of a resultset during repeated execute/exhaust
+ cycles (GH#29)
+ - Work around (and be very vocal about the fact) when DBIC encounters
+ an exception object with broken string overloading
- Clarify ambiguous behavior of distinct when used with ResultSetColumn
i.e. $rs->search({}, { distinct => 1 })->get_column (...)
- Setting quote_names propagates to SQL::Translator when producing
- Back out self-cleaning from DBIx::Class::Carp for the time being
(as a side effect fixes RT#86267)
- Fix incorrect internal use of implicit list context in copy()
+ - Fix 0.08250 regression in driver determination when DBI_DSN is used
- Tests no longer fail if $ENV{DBI_DSN} is set
- Throw clearer exception on ->new_related() with a non-existent
- relationship.
+ relationship
+ - Fix incorrect parethesis unroll with multicolumn in, (never noticed
+ before fixing false positive in SQLA::Test 1.77)
- Fix t/storage/replicated.t class loading problem
- Stop using the deprecated Class::MOP::load_class()
+ - Fix warning in t/54taint.t with explicitly unset PERL5LIB (RT#91972)
+ - Fix t/54taint.t failing under a local::lib with installed earlier
+ DBIC version (RT#92486)
* Misc
+ - Massive incompatible change of ::BlockRunner internals (was never
+ documented as usable externally, this last set of changes settles
+ the design for proper documentation and opening up)
+ - Adjust exceptions in tests to accommodate changes in the upcoming
+ DBD::SQLite based on libsqlite 3.8.2
+ - More robust lock file naming scheme - allow tests to work on exotic
+ MSWin32 filesystems (habitual offender being http://is.gd/iy5XVP)
+ - Better diagnostics when File::Spec->tmpdir gives us crap in testing
- Replace $row with $result in all docs to be consistent and to
clarify various return values
use 5.008001;
use inc::Module::Install 1.06;
+ BEGIN { makemaker_args( NORECURS => 1 ) } # needs to happen early for old EUMM
##
+## TEMPORARY (and non-portable)
+## Get the dq stuff
+##
+my $target_libdir;
+BEGIN {
+ $target_libdir = 'lib/DBIx/Class/_TempExtlib';
+
+ if ($Module::Install::AUTHOR) {
+
+ `rm -rf $target_libdir`;
+ `mkdir $target_libdir`;
+ for (
+ [ 'Data-Query' => 'master' ],
+ [ 'SQL-Abstract' => 'dq' ],
+ ) {
+ my $tdir = "/tmp/dqlib/$_->[0]/";
+
+ `rm -rf $tdir`;
+
+ `GIT_SSH=maint/careless_ssh.bash git clone --bare --quiet --branch=$_->[1] --depth=1 git://git.shadowcat.co.uk/dbsrgits/$_->[0] $tdir`;
+ printf "\nIncluding %s git rev %s\n",
+ $_->[0],
+ scalar `GIT_DIR=$tdir git rev-parse $_->[1]`,
+ ;
+ `git archive --format=tar --remote=file://$tdir $_->[1] lib/ | tar --strip-components=1 -xC $target_libdir`;
+
+ #`rm -rf $tdir`;
+ }
+ }
+}
+
+use lib $target_libdir;
+
+##
## DO NOT USE THIS HACK IN YOUR DISTS!!! (it makes #toolchain sad)
##
# get cpanX --installdeps . to behave in a checkout (most users do not expect
# for that)
BEGIN {
$Module::Install::AUTHOR = 0 if (grep { $ENV{"PERL5_${_}_IS_RUNNING"} } (qw/CPANM CPANPLUS CPAN/) );
- makemaker_args( NORECURS => 1 );
}
homepage 'http://www.dbix-class.org/';
all_from 'lib/DBIx/Class.pm';
Meta->{values}{x_authority} = 'cpan:RIBASUSHI';
+ # nothing determined at runtime, except for possibly SQLT dep, see
+ # comment further down
+ dynamic_config 0;
+
tests_recursive (qw|
t
|);
'Data::Page' => '2.00',
'Devel::GlobalDestruction' => '0.09',
'Hash::Merge' => '0.12',
- 'Moo' => '1.002',
+ 'Moo' => '1.003000',
'MRO::Compat' => '0.12',
'Module::Find' => '0.07',
'namespace::clean' => '0.24',
'Path::Class' => '0.18',
'Scope::Guard' => '0.03',
- 'SQL::Abstract' => '1.73',
+ 'SQL::Abstract' => '1.77',
'Try::Tiny' => '0.07',
# Technically this is not a core dependency - it is only required
# by the MySQL codepath. However this particular version is bundled
# since 5.10.0 and is a pure-perl module anyway - let it slide
'Text::Balanced' => '2.00',
+
+ # deps for Data::Query
+ 'SQL::ReservedWords' => '0.8',
+ 'Safe::Isa' => '1.000003',
};
my $build_requires = {
+ };
+
+ my $test_requires = {
+ 'File::Temp' => '0.22',
+ 'Test::Deep' => '0.101',
+ 'Test::Exception' => '0.31',
+ 'Test::Warn' => '0.21',
+ 'Test::More' => '0.94',
+
# needed for testing only, not for operation
# we will move away from this dep eventually, perhaps to DBD::CSV or something
###
### often *not* easy or even possible)
###
'DBD::SQLite' => '1.29',
- };
-
- my $test_requires = {
- 'File::Temp' => '0.22',
- 'Test::Deep' => '0.101',
- 'Test::Exception' => '0.31',
- 'Test::Warn' => '0.21',
- 'Test::More' => '0.94',
- # not sure if this is necessary at all, ask schwern some day
- 'Test::Builder' => '0.94',
# this is already a dep of n::c, but just in case - used by t/55namespaces_cleaned.t
# remove and do a manual glob-collection if n::c is no longer a dep
};
# if the user has this env var set and no SQLT installed, tests will fail
- # same rationale for direct test_requires as the strictures stuff above
- # (even though no dist will be created from this)
+ # Note - this is added as test_requires *directly*, so it gets properly
+ # excluded on META.yml cleansing (even though no dist can be created from this)
# we force this req regarless of author_deps, worst case scenario it will
# be specified twice
+ #
+ # also note that we *do* set dynamic_config => 0, as this is the only thing
+ # that we determine dynamically, and in all fairness if someone sets the
+ # envvar *and* is not running a full Makefile/make/maketest cycle - they get
+ # to keep the pieces
if ($ENV{DBICTEST_SQLT_DEPLOY}) {
local @INC = ('lib', @INC);
require DBIx::Class::Optional::Dependencies;
# only do author-includes if not part of a `make` run
if ($Module::Install::AUTHOR and ! $ENV{MAKELEVEL}) {
+ invoke_author_mode()
+ }
+ else {
+ # make sure this Makefile can not be used to make a dist
+ # (without the author includes there are no meta cleanup, no sanity checks, etc)
+ postamble <<EOP;
+ create_distdir: nonauthor_stop_distdir_creation
+ nonauthor_stop_distdir_creation:
+ \t\$(NOECHO) \$(ECHO) Creation of dists in non-author mode is not allowed
+ \t\$(NOECHO) \$(FALSE)
+ EOP
+ }
+
+ # compose final req list, for alphabetical ordering
+ my %final_req;
+ for my $rtype (keys %$reqs) {
+ for my $mod (keys %{$reqs->{$rtype}} ) {
+
+ # sanity check req duplications
+ die "$mod specified as both a '$rtype' and a '$final_req{$mod}[0]'\n"
+ if $final_req{$mod};
+
+ $final_req{$mod} = [ $rtype, $reqs->{$rtype}{$mod}||0 ],
+ }
+ }
+
+ # actual require
+ for my $mod (sort keys %final_req) {
+ my ($rtype, $ver) = @{$final_req{$mod}};
+ no strict 'refs';
+ $rtype->($mod, $ver);
+ }
+
+ # author-mode or not - this is where we show a list of missing deps
+ # IFF we are running interactively
+ auto_install();
+
+ WriteAll();
+
+ exit 0;
+
+ # needs to be here to keep 5.8 string eval happy
+ # (the include of Makefile.PL.inc loop)
+ my $mm_proto;
+
+ sub invoke_author_mode {
# get options here, make $args available to all snippets
require Getopt::Long;
my $getopt = Getopt::Long::Parser->new(
# We need the MM facilities to generate the pieces for the final MM run.
# Just instantiate a throaway object here
- my $mm_proto = ExtUtils::MakeMaker->new({
+ #
+ # Also EUMM and MI disagree on what is the format of Meta->name, just
+ # punt here until a new M::I is shipped (if at all)
+ my $name = Meta->name || die 'The Module::Install metadata must be available at this point but is not - did you rearrange the Makefile.PL...?';
+ $name =~ s/\-/::/g;
+ $mm_proto = ExtUtils::MakeMaker->new({
NORECURS => 1,
- NAME => Meta->name || die 'The Module::Install metadata must be available at this point but is not - did you rearrange the Makefile.PL...?',
+ NAME => $name,
});
# Crutch for DISTBUILDING_IN_HELL
;
}
}
- else {
- # make sure this Makefile can not be used to make a dist
- # (without the author includes there are no meta cleanup, no sanity checks, etc)
- postamble <<EOP;
- create_distdir: nonauthor_stop_distdir_creation
- nonauthor_stop_distdir_creation:
- \t\$(NOECHO) \$(ECHO) Creation of dists in non-author mode is not allowed
- \t\$(NOECHO) \$(FALSE)
- EOP
- }
-
- # compose final req list, for alphabetical ordering
- my %final_req;
- for my $rtype (keys %$reqs) {
- for my $mod (keys %{$reqs->{$rtype}} ) {
-
- # sanity check req duplications
- if ($final_req{$mod}) {
- die "$mod specified as both a '$rtype' and a '$final_req{$mod}[0]'\n";
- }
-
- $final_req{$mod} = [ $rtype, $reqs->{$rtype}{$mod}||0 ],
- }
- }
-
- # actual require
- for my $mod (sort keys %final_req) {
- my ($rtype, $ver) = @{$final_req{$mod}};
- no strict 'refs';
- $rtype->($mod, $ver);
- }
-
- # author-mode or not - this is where we show a list of missing deps
- # IFF we are running interactively
- auto_install();
-
- WriteAll();
use strict;
use warnings;
+use DBIx::Class::_TempExtlib;
+
our $VERSION;
# Always remember to do all digits for the version even if they're 0
# i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
# $VERSION declaration must stay up here, ahead of any other package
# declarations, as to not confuse various modules attempting to determine
# this ones version, whether that be s.c.o. or Module::Metadata, etc
-$VERSION = '0.08270';
+$VERSION = '0.08901';
$VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases
- BEGIN {
- package # hide from pause
- DBIx::Class::_ENV_;
-
- use Config;
-
- use constant {
-
- # but of course
- BROKEN_FORK => ($^O eq 'MSWin32') ? 1 : 0,
-
- HAS_ITHREADS => $Config{useithreads} ? 1 : 0,
-
- # ::Runmode would only be loaded by DBICTest, which in turn implies t/
- DBICTEST => eval { DBICTest::RunMode->is_author } ? 1 : 0,
-
- # During 5.13 dev cycle HELEMs started to leak on copy
- PEEPEENESS =>
- # request for all tests would force "non-leaky" illusion and vice-versa
- defined $ENV{DBICTEST_ALL_LEAKS} ? !$ENV{DBICTEST_ALL_LEAKS}
- # otherwise confess that this perl is busted ONLY on smokers
- : eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006) ? 1
- # otherwise we are good
- : 0
- ,
-
- ASSERT_NO_INTERNAL_WANTARRAY => $ENV{DBIC_ASSERT_NO_INTERNAL_WANTARRAY} ? 1 : 0,
-
- IV_SIZE => $Config{ivsize},
- };
-
- if ($] < 5.009_005) {
- require MRO::Compat;
- constant->import( OLD_MRO => 1 );
- }
- else {
- require mro;
- constant->import( OLD_MRO => 0 );
- }
- }
-
+ use DBIx::Class::_Util;
use mro 'c3';
use DBIx::Class::Optional::Dependencies;
zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
+ Zefram: Andrew Main <zefram@fysh.org>
+
=head1 COPYRIGHT
Copyright (c) 2005 - 2011 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
use Scalar::Util qw/blessed weaken reftype/;
use DBIx::Class::_Util 'fail_on_internal_wantarray';
use Try::Tiny;
-use Data::Compare (); # no imports!!! guard against insane architecture
-
+use Data::Dumper::Concise ();
+use Data::Query::Constants;
+use Data::Query::ExprHelpers;
# not importing first() as it will clash with our own method
use List::Util ();
if $source->isa('DBIx::Class::ResultSourceHandle');
$attrs = { %{$attrs||{}} };
- delete @{$attrs}{qw(_sqlmaker_select_args _related_results_construction)};
+ delete @{$attrs}{qw(_last_sqlmaker_alias_map _related_results_construction)};
if ($attrs->{page}) {
$attrs->{rows} ||= 10;
$call_cond = { @_ };
}
+ if (blessed($call_cond) and $call_cond->isa('Data::Query::ExprBuilder')) {
+ $call_cond = \$call_cond->{expr};
+ }
+
# see if we can keep the cache (no $rs changes)
my $cache;
my %safe = (alias => 1, cache => 1);
ref $call_cond eq 'ARRAY' && ! @$call_cond
)) {
$cache = $self->get_cache;
+ } elsif (
+ $self->{attrs}{cache} and
+ ($self->{attrs}{grep_cache} or $call_attrs->{grep_cache})
+ ) {
+ if (
+ keys %$call_attrs
+ and not (exists $call_attrs->{grep_cache} and !$call_attrs->{grep_cache})
+ ) {
+ die "Can't do complex search on resultset with grep_cache set";
+ }
+ my $grep_one = $self->_construct_perl_predicate($call_cond);
+ $cache = [ grep $grep_one->($_), $self->all ];
}
my $old_attrs = { %{$self->{attrs}} };
sub _stack_cond {
my ($self, $left, $right) = @_;
- # collapse single element top-level conditions
- # (single pass only, unlikely to need recursion)
- for ($left, $right) {
- if (ref $_ eq 'ARRAY') {
- if (@$_ == 0) {
- $_ = undef;
- }
- elsif (@$_ == 1) {
- $_ = $_->[0];
- }
- }
- elsif (ref $_ eq 'HASH') {
- my ($first, $more) = keys %$_;
+ my $source = $self->result_source;
- # empty hash
- if (! defined $first) {
- $_ = undef;
- }
- # one element hash
- elsif (! defined $more) {
- if ($first eq '-and' and ref $_->{'-and'} eq 'HASH') {
- $_ = $_->{'-and'};
- }
- elsif ($first eq '-or' and ref $_->{'-or'} eq 'ARRAY') {
- $_ = $_->{'-or'};
- }
- }
- }
- }
+ my $converter = $source->schema->storage->sql_maker->converter;
- # merge hashes with weeding out of duplicates (simple cases only)
- if (ref $left eq 'HASH' and ref $right eq 'HASH') {
+ my @top = map $source->_extract_top_level_conditions(
+ $converter->_expr_to_dq($_)
+ ), grep defined, $left, $right;
- # shallow copy to destroy
- $right = { %$right };
- for (grep { exists $right->{$_} } keys %$left) {
- # the use of eq_deeply here is justified - the rhs of an
- # expression can contain a lot of twisted weird stuff
- delete $right->{$_} if Data::Compare::Compare( $left->{$_}, $right->{$_} );
- }
+ return undef unless @top;
- $right = undef unless keys %$right;
- }
+ my %seen;
+ my @uniq = grep { !$seen{Data::Dumper::Concise::Dumper($_)}++ } @top;
- if (defined $left xor defined $right) {
- return defined $left ? $left : $right;
- }
- elsif (! defined $left) {
- return undef;
- }
- else {
- return { -and => [ $left, $right ] };
+ return \$uniq[0] if @uniq == 1;
+
+ return \Operator({ 'SQL.Naive' => 'AND' }, \@uniq);
+}
+
+my %perl_op_map = (
+ '=' => { numeric => '==', string => 'eq' },
+);
+
+sub _construct_perl_predicate {
+ my ($self, $cond) = @_;
+
+ # This shouldn't really live here but it'll do for the moment.
+
+ my %alias_map = (
+ $self->current_source_alias => {
+ join_path => [],
+ source => $self->result_source,
+ columns_info => $self->result_source->columns_info,
+ },
+ );
+
+ my $attrs = $self->_resolved_attrs;
+ foreach my $j ( @{$attrs->{from}}[1 .. $#{$attrs->{from}} ] ) {
+ next unless $j->[0]{-alias};
+ next unless $j->[0]{-join_path};
+ $alias_map{$j->[0]{-alias}} = {
+ join_path => [ map { keys %$_ } @{$j->[0]{-join_path}} ],
+ source => $j->[0]{-rsrc},
+ columns_info => $j->[0]{-rsrc}->columns_info,
+ };
}
+
+ my %as_map = map +($attrs->{select}[$_] => $attrs->{as}[$_]),
+ grep !ref($attrs->{select}[$_]), 0..$#{$attrs->{select}};
+
+ my $storage = $self->result_source->schema->storage;
+ my $sql_maker = $storage->sql_maker;
+ my $tree = map_dq_tree {
+ if (is_Operator) {
+ my $op = $_->{operator}{'SQL.Naive'} or die "No operator";
+ if (lc($op) =~ /^(?:and|or|not)$/i) {
+ return Operator({ 'Perl' => lc($op) }, $op->{args});
+ }
+ if (my $op_map = $perl_op_map{$op}) {
+ die "Binop doesn't have two args - wtf?"
+ unless @{$_->{args}} == 2;
+ my $data_type;
+ my @mapped_args = map {
+ if (is_Identifier) {
+ die "Identifier not alias.colname"
+ unless @{$_->{elements}} == 2;
+ my ($alias, $col) = @{$_->{elements}};
+ die "${alias}.${col} not selected"
+ unless $as_map{"${alias}.${col}"};
+ unless ($data_type) {
+ my $colinfo = $alias_map{$alias}{columns_info}{$col};
+ unless (defined $colinfo->{is_numeric}) {
+ $colinfo->{is_numeric} = (
+ $storage->is_datatype_numeric($colinfo->{data_type})
+ ? 1
+ : 0
+ );
+ }
+ $data_type = $colinfo->{is_numeric} ? 'numeric' : 'string';
+ }
+ Identifier(@{$alias_map{$alias}{join_path}}, $col);
+ } elsif (is_Value) {
+ $_;
+ } else {
+ die "Argument to operator neither identifier nor value";
+ }
+ } @{$_->{args}};
+ die "Couldn't determine numeric versus string" unless $data_type;
+ return \Operator({ Perl => $op_map->{$data_type} }, \@mapped_args);
+ }
+ }
+ die "Unable to map node to perl";
+ } $sql_maker->converter->_where_to_dq($cond);
+ my ($code, @values) = @{$storage->perl_renderer->render($tree)};
+ my $sub = eval q!sub { !.$code.q! }!
+ or die "Failed to build sub: $@";
+ my @args = map $_->{value}, @values;
+ return sub { local $_ = $_[0]; $sub->(@args) };
}
=head2 search_literal
return undef unless @{$rows||[]};
# sanity check - people are too clever for their own good
- if ($attrs->{collapse} and my $aliastypes = $attrs->{_sqlmaker_select_args}[3]{_aliastypes} ) {
+ if ($attrs->{collapse} and my $aliastypes = $attrs->{_last_sqlmaker_alias_map} ) {
my $multiplied_selectors;
for my $sel_alias ( grep { $_ ne $attrs->{alias} } keys %{ $aliastypes->{selecting} } ) {
$sql_maker->{name_sep} = '';
}
+ # delete local is 5.12+
+ local @{$sql_maker}{qw(renderer converter)};
+ delete @{$sql_maker}{qw(renderer converter)};
+
my ($lquote, $rquote, $sep) = map { quotemeta $_ } ($sql_maker->_quote_chars, $sql_maker->name_sep);
- my $having_sql = $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} });
+ my $having_sql = $sql_maker->_render_sqla(where => $attrs->{having});
+
my %seen_having;
# search for both a proper quoted qualified string, for a naive unquoted scalarref
# and if all fails for an utterly naive quoted scalar-with-function
while ($having_sql =~ /
- $rquote $sep $lquote (.+?) $rquote
+ (?: $rquote $sep)? $lquote (.+?) $rquote
|
[\s,] \w+ \. (\w+) [\s,]
|
if (! $needs_subq) {
# Most databases do not allow aliasing of tables in UPDATE/DELETE. Thus
# a condition containing 'me' or other table prefixes will not work
- # at all. Tell SQLMaker to dequalify idents via a gross hack.
- $cond = do {
- my $sqla = $rsrc->storage->sql_maker;
- local $sqla->{_dequalify_idents} = 1;
- \[ $sqla->_recurse_where($self->{cond}) ];
- };
+ # at all - so we convert the WHERE to a dq tree now, dequalify all
+ # identifiers found therein via a scan across the tree, and then use
+ # \{} style to pass the result onwards for use in the final query
+ if ($self->{cond}) {
+ $cond = do {
+ my $converter = $rsrc->storage->sql_maker->converter;
+ scan_dq_nodes({
+ DQ_IDENTIFIER ,=> sub { $_ = [ $_->[-1] ] for $_[0]->{elements} }
+ }, my $where_dq = $converter->_where_to_dq($self->{cond}));
+ \$where_dq;
+ };
+ }
}
else {
# we got this far - means it is time to wrap a subquery
my $subrs = (ref $self)->new($rsrc, $attrs);
if (@$idcols == 1) {
- $cond = { $idcols->[0] => { -in => $subrs->as_query } };
+ $cond = { $idcols->[0] => { -in => \$subrs->_as_select_dq } };
}
elsif ($storage->_use_multicolumn_in) {
# no syntax for calling this properly yet
# !!! EXPERIMENTAL API !!! WILL CHANGE !!!
- $cond = $storage->sql_maker->_where_op_multicolumn_in (
- $idcols, # how do I convey a list of idents...? can binds reside on lhs?
- $subrs->as_query
+ my $left = $storage->sql_maker->_render_sqla(select_select => $idcols);
+ $left =~ s/^SELECT //i;
+ my $right = $storage->sql_maker
+ ->converter
+ ->_literal_to_dq(${$subrs->as_query});
+ $cond = \Operator(
+ { 'SQL.Naive' => 'in' },
+ [ Literal(SQL => "( $left )"), $right ],
),
}
else {
$rel,
);
+ if (ref($related) eq 'REF' and ref($$related) eq 'HASH') {
+ $related = $self->result_source
+ ->_extract_fixed_values_for($$related, $rel);
+ }
+
my @rows_to_add = ref $item->{$rel} eq 'ARRAY' ? @{$item->{$rel}} : ($item->{$rel});
my @populate = map { {%$_, %$related} } @rows_to_add;
}
}
-
# populate() arguments went over several incarnations
# What we ultimately support is AoH
sub _normalize_populate_args {
if (! defined $self->{cond}) {
# just massage $data below
}
- elsif ($self->{cond} eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
- %new_data = %{ $self->{attrs}{related_objects} || {} }; # nothing might have been inserted yet
- @cols_from_relations = keys %new_data;
- }
- elsif (ref $self->{cond} ne 'HASH') {
- $self->throw_exception(
- "Can't abstract implicit construct, resultset condition not a hash"
- );
- }
- else {
+ elsif (ref $self->{cond} eq 'HASH') {
# precedence must be given to passed values over values inherited from
# the cond, so the order here is important.
my $collapsed_cond = $self->_collapse_cond($self->{cond});
}
}
}
+ elsif (ref $self->{cond} eq 'REF' and ref ${$self->{cond}} eq 'HASH') {
+ if ((${$self->{cond}})->{'DBIx::Class::ResultSource.UNRESOLVABLE'}) {
+ %new_data = %{ $self->{attrs}{related_objects} || {} }; # nothing might have been inserted yet
+ @cols_from_relations = keys %new_data;
+ } else {
+ %new_data = %{$self->_remove_alias(
+ $self->result_source
+ ->_extract_fixed_values_for(${$self->{cond}}),
+ $alias
+ )};
+ }
+ }
+ else {
+ $self->throw_exception(
+ "Can't abstract implicit construct, resultset condition not a hash"
+ );
+ }
%new_data = (
%new_data,
$attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs
);
$aq;
}
+sub _as_select_dq {
+ my $self = shift;
+ my $attrs = { %{ $self->_resolved_attrs } };
+ my $storage = $self->result_source->storage;
+ my (undef, $ident, @args) = $storage->_select_args(
+ $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs
+ );
+ $ident = $ident->from if blessed($ident);
+ $storage->sql_maker->converter->_select_to_dq(
+ $ident, @args
+ );
+}
+
=head2 find_or_new
=over 4
$source->_resolve_join(
$join,
$alias,
- { %{ $attrs->{seen_join} || {} } },
+ ($attrs->{seen_join} = { %{ $attrs->{seen_join} || {} } }),
( $attrs->{seen_join} && keys %{$attrs->{seen_join}})
? $attrs->{from}[-1][0]{-join_path}
: []
}
}
- # collapse the selector to a literal so that it survives a possible distinct parse
- # if it turns out to be an aggregate - at least the user will get a proper exception
- # instead of silent drop of the group_by altogether
- my $new = bless {
- _select => \ ($rsrc->storage->sql_maker->_render_sqla(select_select => $select) =~ /^\s*SELECT\s*(.+)/i)[0],
+ return bless {
+ _select => $select,
_as => $column,
- _parent_resultset => $new_parent_rs }, $class;
- return $new;
+ _parent_resultset => $new_parent_rs
+ }, $class;
}
=head2 as_query
sub _resultset {
my $self = shift;
- return $self->{_resultset} ||= $self->{_parent_resultset}->search(undef,
- {
- select => [$self->{_select}],
- as => [$self->{_as}]
+ return $self->{_resultset} ||= do {
+
+ my $select = $self->{_select};
+
+ if ($self->{_parent_resultset}{attrs}{distinct}) {
+ my $alias = $self->{_parent_resultset}->current_source_alias;
+ my $rsrc = $self->{_parent_resultset}->result_source;
+ my %cols = map { $_ => 1, "$alias.$_" => 1 } $rsrc->columns;
+
+ unless( $cols{$select} ) {
+ carp_unique(
+ 'Use of distinct => 1 while selecting anything other than a column '
+ . 'declared on the primary ResultSource is deprecated - please supply '
+ . 'an explicit group_by instead'
+ );
+
+ # collapse the selector to a literal so that it survives the distinct parse
+ # if it turns out to be an aggregate - at least the user will get a proper exception
+ # instead of silent drop of the group_by altogether
- $select = \ $rsrc->storage->sql_maker->_recurse_fields($select);
++ $select = \ ($rsrc->storage->sql_maker->_render_sqla(select_select => $select) =~ /^\s*SELECT\s*(.+)/i)[0],
+ }
}
- );
+
+ $self->{_parent_resultset}->search(undef, {
+ columns => { $self->{_as} => $select }
+ });
+ };
}
1;
use Try::Tiny;
use List::Util qw(first max);
- use B 'perlstring';
+use Scalar::Util qw(blessed);
use DBIx::Class::ResultSource::RowParser::Util qw(
assemble_simple_parser
$_ =~ s/^ (?: foreign | self ) \.//x for ($f, $s);
$relinfo->{$rel}{fk_map}{$s} = $f;
}
+ } elsif (blessed($cond) and $cond->isa('Data::Query::ExprBuilder')) {
+ my $cols = $self->_join_condition_to_hashref($cond->{expr});
+ @{$relinfo->{$rel}{fk_map}}{values %$cols} = keys %$cols;
}
}
my $new = { _column_data => $col_data };
bless $new, ref $self;
- $new->result_source($self->result_source);
+ $new->result_source(my $source = $self->result_source);
$new->set_inflated_columns($changes);
$new->insert;
# constraints
my $relnames_copied = {};
- foreach my $relname ($self->result_source->relationships) {
- my $rel_info = $self->result_source->relationship_info($relname);
+ foreach my $relname ($source->relationships) {
+ my $rel_info = $source->relationship_info($relname);
next unless $rel_info->{attrs}{cascade_copy};
- my $resolved = $self->result_source->_resolve_condition(
+ my $resolved = $source->_resolve_condition(
$rel_info->{cond}, $relname, $new, $relname
);
+ if (ref($resolved) eq 'REF') {
+ $resolved = $source->_extract_fixed_values_for($$resolved, 'me');
+ }
+
my $copied = $relnames_copied->{ $rel_info->{source} } ||= {};
foreach my $related ($self->search_related($relname)->all) {
my $id_str = join("\0", $related->id);
$class->throw_exception("No accessor type declared for prefetched relationship '$relname'")
unless $relinfo->{attrs}{accessor};
+ my $rel_rs = $new->related_resultset($relname);
+
my @rel_objects;
if (
- $prefetch->{$relname}
- and
- @{$prefetch->{$relname}}
+ @{ $prefetch->{$relname} || [] }
and
ref($prefetch->{$relname}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class
) {
- my $rel_rs = $new->related_resultset($relname);
-
if (ref $prefetch->{$relname}[0] eq 'ARRAY') {
my $rel_rsrc = $rel_rs->result_source;
my $rel_class = $rel_rs->result_class;
$new->{_inflated_column}{$relname} = $rel_objects[0];
}
- $new->related_resultset($relname)->set_cache(\@rel_objects);
+ $rel_rs->set_cache(\@rel_objects);
}
}
Depending on the resultset attributes one of:
SELECT * FROM (
- SELECT *, ROWNUM rownum__index FROM (
+ SELECT *, ROWNUM AS rownum__index FROM (
SELECT ...
) WHERE ROWNUM <= ($limit+$offset)
) WHERE rownum__index >= ($offset+1)
or
SELECT * FROM (
- SELECT *, ROWNUM rownum__index FROM (
+ SELECT *, ROWNUM AS rownum__index FROM (
SELECT ...
)
) WHERE rownum__index BETWEEN ($offset+1) AND ($limit+$offset)
return <<EOS;
SELECT $sq_attrs->{selection_outer} FROM (
- SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM (
+ SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM (
SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having}
) $qalias WHERE ROWNUM <= ?
) $qalias WHERE $idx_name >= ?
return <<EOS;
SELECT $sq_attrs->{selection_outer} FROM (
- SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM (
+ SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM (
SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having}
) $qalias
) $qalias WHERE $idx_name BETWEEN ? AND ?
for my $ch ($self->_order_by_chunks ($inner_order)) {
$ch = $ch->[0] if ref $ch eq 'ARRAY';
- ($ch, my $is_desc) = $self->_split_order_chunk($ch);
-
- # !NOTE! outside chunks come in reverse order ( !$is_desc )
- push @out_chunks, { ($is_desc ? '-asc' : '-desc') => \$ch };
+ $ch =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+ my $dir = uc ($1||'ASC');
+ push @out_chunks, \join (' ', $ch, $dir eq 'ASC' ? 'DESC' : 'ASC' );
}
$sq_attrs->{order_by_middle} = $self->_order_by (\@out_chunks);
# Whatever order bindvals there are, they will be realiased and
# reselected, and need to show up at end of the initial inner select
push @{$self->{select_bind}}, @{$self->{order_bind}};
+
+ # if this is a part of something bigger, we need to add back all
+ # the extra order_by's, as they may be relied upon by the outside
+ # of a prefetch or something
+ if ($rs_attrs->{_is_internal_subuery}) {
+ $sq_attrs->{selection_outer} .= sprintf ", $extra_order_sel->{$_} AS $_"
+ for sort
+ { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+ grep { $_ !~ /[^\w\-]/ } # ignore functions
+ keys %$extra_order_sel
+ ;
+ }
}
# and this is order re-alias magic
return $sql;
}
+=head2 RowCountOrGenericSubQ
+
+This is not exactly a limit dialect, but more of a proxy for B<Sybase ASE>.
+If no $offset is supplied the limit is simply performed as:
+
+ SET ROWCOUNT $limit
+ SELECT ...
+ SET ROWCOUNT 0
+
+Otherwise we fall back to L</GenericSubQ>
+
+=cut
+
+sub _RowCountOrGenericSubQ {
+ my $self = shift;
+ my ($sql, $rs_attrs, $rows, $offset) = @_;
+
+ return $self->_GenericSubQ(@_) if $offset;
+
+ return sprintf <<"EOF", $rows, $sql, $self->_parse_rs_attrs( $rs_attrs );
+SET ROWCOUNT %d
+%s %s
+SET ROWCOUNT 0
+EOF
+}
+
=head2 GenericSubQ
SELECT * FROM (
my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
my $root_rsrc = $rs_attrs->{_rsroot_rsrc};
+ my $root_tbl_name = $root_rsrc->name;
- # Explicitly require an order_by
- # GenSubQ is slow enough as it is, just emulating things
- # like in other cases is not wise - make the user work
- # to shoot their DBA in the foot
- my $supplied_order = delete $rs_attrs->{order_by} or $self->throw_exception (
- 'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, '
- . 'root-table-based order criteria.'
- );
-
- my $usable_order_ci = $root_rsrc->storage->_main_source_order_by_portion_is_stable(
- $root_rsrc,
- $supplied_order,
- $rs_attrs->{where},
- ) or $self->throw_exception(
- 'Generic Subquery Limit can not work with order criteria based on sources other than the current one'
- );
-
-###
-###
-### we need to know the directions after we figured out the above - reextract *again*
-### this is eyebleed - trying to get it to work at first
- my @order_bits = do {
+ my ($first_order_by) = do {
local $self->{quote_char};
local $self->{order_bind};
- map { ref $_ ? $_->[0] : $_ } $self->_order_by_chunks ($supplied_order)
- };
+ map { ref $_ ? $_->[0] : $_ } $self->_order_by_chunks ($rs_attrs->{order_by})
+ } or $self->throw_exception (
+ 'Generic Subquery Limit does not work on resultsets without an order. Provide a single, '
+ . 'unique-column order criteria.'
+ );
- # truncate to what we'll use
- $#order_bits = ( (keys %$usable_order_ci) - 1 );
+ $first_order_by =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+ my $direction = lc ($1 || 'asc');
- # @order_bits likely will come back quoted (due to how the prefetch
- # rewriter operates
- # Hence supplement the column_info lookup table with quoted versions
- if ($self->quote_char) {
- $usable_order_ci->{$self->_quote($_)} = $usable_order_ci->{$_}
- for keys %$usable_order_ci;
- }
+ my ($first_ord_alias, $first_ord_col) = $first_order_by =~ /^ (?: ([^\.]+) \. )? ([^\.]+) $/x;
-# calculate the condition
- my $count_tbl_alias = 'rownum__emulation';
- my $root_alias = $rs_attrs->{alias};
- my $root_tbl_name = $root_rsrc->name;
-
- my (@unqualified_names, @qualified_names, @is_desc, @new_order_by);
+ $self->throw_exception(sprintf
+ "Generic Subquery Limit order criteria can be only based on the root-source '%s'"
+ . " (aliased as '%s')", $root_rsrc->source_name, $rs_attrs->{alias},
+ ) if ($first_ord_alias and $first_ord_alias ne $rs_attrs->{alias});
- for my $bit (@order_bits) {
+ $first_ord_alias ||= $rs_attrs->{alias};
- ($bit, my $is_desc) = $self->_split_order_chunk($bit);
+ $self->throw_exception(
+ "Generic Subquery Limit first order criteria '$first_ord_col' must be unique"
+ ) unless $root_rsrc->_identifying_column_set([$first_ord_col]);
+
+ my $sq_attrs = do {
+ # perform the mangling only using the very first order crietria
+ # (the one we care about)
+ local $rs_attrs->{order_by} = $first_order_by;
+ $self->_subqueried_limit_attrs ($sql, $rs_attrs);
+ };
- push @is_desc, $is_desc;
- push @unqualified_names, $usable_order_ci->{$bit}{-colname};
- push @qualified_names, $usable_order_ci->{$bit}{-fq_colname};
+ my $cmp_op = $direction eq 'desc' ? '>' : '<';
+ my $count_tbl_alias = 'rownum__emulation';
- push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_ci->{$bit}{-fq_colname} };
+ my ($order_sql, @order_bind) = do {
+ local $self->{order_bind};
+ my $s = $self->_order_by (delete $rs_attrs->{order_by});
+ ($s, @{$self->{order_bind}});
};
+ my $group_having_sql = $self->_parse_rs_attrs($rs_attrs);
- my (@where_cond, @skip_colpair_stack);
- for my $i (0 .. $#order_bits) {
- my $ci = $usable_order_ci->{$order_bits[$i]};
-
- my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $root_alias);
- my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } };
-
- push @skip_colpair_stack, [
- { $main_col => { -ident => $subq_col } },
- ];
-
- # we can trust the nullability flag because
- # we already used it during _id_col_set resolution
- #
- if ($ci->{is_nullable}) {
- push @{$skip_colpair_stack[-1]}, { $main_col => undef, $subq_col=> undef };
-
- $cur_cond = [
- {
- ($is_desc[$i] ? $subq_col : $main_col) => { '!=', undef },
- ($is_desc[$i] ? $main_col : $subq_col) => undef,
- },
- {
- $subq_col => { '!=', undef },
- $main_col => { '!=', undef },
- -and => $cur_cond,
- },
- ];
- }
-
- push @where_cond, { '-and', => [ @skip_colpair_stack[0..$i-1], $cur_cond ] };
- }
+ my $in_sel = $sq_attrs->{selection_inner};
-# reuse the sqlmaker WHERE, this will not be returning binds
- my $counted_where = do {
- local $self->{where_bind};
- $self->where(\@where_cond);
- };
+ # add the order supplement (if any) as this is what will be used for the outer WHERE
+ $in_sel .= ", $_" for keys %{$sq_attrs->{order_supplement}};
my $rownum_cond;
if ($offset) {
$rownum_cond = 'BETWEEN ? AND ?';
+
push @{$self->{limit_bind}},
[ $self->__offset_bindtype => $offset ],
[ $self->__total_bindtype => $offset + $rows - 1]
}
else {
$rownum_cond = '< ?';
+
push @{$self->{limit_bind}},
[ $self->__rows_bindtype => $rows ]
;
}
-# and what we will order by inside
- my $inner_order_sql = do {
- local $self->{order_bind};
-
- my $s = $self->_order_by (\@new_order_by);
-
- $self->throw_exception('Inner gensubq order may not contain binds... something went wrong')
- if @{$self->{order_bind}};
-
- $s;
- };
-
-### resume originally scheduled programming
-###
-###
-
- # we need to supply the order for the supplements to be properly calculated
- my $sq_attrs = $self->_subqueried_limit_attrs (
- $sql, { %$rs_attrs, order_by => \@new_order_by }
- );
-
- my $in_sel = $sq_attrs->{selection_inner};
-
- # add the order supplement (if any) as this is what will be used for the outer WHERE
- $in_sel .= ", $_" for sort keys %{$sq_attrs->{order_supplement}};
-
- my $group_having_sql = $self->_parse_rs_attrs($rs_attrs);
-
+ # even though binds in order_by make no sense here (the rs needs to be
+ # ordered by a unique column first) - pass whatever there may be through
+ # anyway
+ push @{$self->{limit_bind}}, @order_bind;
return sprintf ("
SELECT $sq_attrs->{selection_outer}
FROM (
SELECT $in_sel $sq_attrs->{query_leftover}${group_having_sql}
) %s
-WHERE ( SELECT COUNT(*) FROM %s %s $counted_where ) $rownum_cond
-$inner_order_sql
+WHERE ( SELECT COUNT(*) FROM %s %s WHERE %s $cmp_op %s ) $rownum_cond
+$order_sql
", map { $self->_quote ($_) } (
$rs_attrs->{alias},
$root_tbl_name,
$count_tbl_alias,
+ "$count_tbl_alias.$first_ord_col",
+ "$first_ord_alias.$first_ord_col",
));
}
for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) {
# order with bind
$chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY';
- ($chunk) = $self->_split_order_chunk($chunk);
+ $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
next if $in_sel_index->{$chunk};
__PACKAGE__->sql_name_sep('.');
__PACKAGE__->mk_group_accessors('simple' => qw/
- _connect_info _dbi_connect_info _dbic_connect_attributes _driver_determined
+ _connect_info _dbic_connect_attributes _driver_determined
_dbh _dbh_details _conn_pid _sql_maker _sql_maker_opts _dbh_autocommit
_perform_autoinc_retrieval _autoinc_supplied_for_op
/);
sub _determine_supports_join_optimizer { 1 };
# Each of these methods need _determine_driver called before itself
- # in order to function reliably. This is a purely DRY optimization
+ # in order to function reliably. We also need to separate accessors
+ # from plain old method calls, since an accessor called as a setter
+ # does *not* need the driver determination loop fired (and in fact
+ # can produce hard to find bugs, like e.g. losing on_connect_*
+ # semantics on fresh connections)
#
- # get_(use)_dbms_capability need to be called on the correct Storage
- # class, as _use_X may be hardcoded class-wide, and _supports_X calls
- # _determine_supports_X which obv. needs a correct driver as well
- my @rdbms_specific_methods = qw/
+ # The construct below is simply a parameterized around()
+ my $storage_accessor_idx = { map { $_ => 1 } qw(
sqlt_type
- deployment_statements
+ datetime_parser_type
sql_maker
cursor_class
+ )};
+ for my $meth (keys %$storage_accessor_idx, qw(
+ deployment_statements
build_datetime_parser
- datetime_parser_type
txn_begin
_server_info
_get_server_version
- /;
-
- for my $meth (@rdbms_specific_methods) {
+ )) {
my $orig = __PACKAGE__->can ($meth)
or die "$meth is not a ::Storage::DBI method!";
- no strict qw/refs/;
- no warnings qw/redefine/;
+ no strict 'refs';
+ no warnings 'redefine';
*{__PACKAGE__ ."::$meth"} = subname $meth => sub {
if (
# only fire when invoked on an instance, a valid class-based invocation
and
! $_[0]->{_in_determine_driver}
and
- ($_[0]->_dbi_connect_info||[])->[0]
+ # if this is a known *setter* - just set it, no need to connect
+ # and determine the driver
+ ! ( $storage_accessor_idx->{$meth} and @_ > 1 )
+ and
+ # Only try to determine stuff if we have *something* that either is or can
+ # provide a DSN. Allows for bare $schema's generated with a plain ->connect()
+ # to still be marginally useful
+ $_[0]->_dbi_connect_info->[0]
) {
$_[0]->_determine_driver;
};
}
+sub perl_renderer {
+ my ($self) = @_;
+ $self->{perl_renderer} ||= do {
+ require DBIx::Class::PerlRenderer;
+ DBIx::Class::PerlRenderer->new;
+ };
+}
+
=head1 NAME
DBIx::Class::Storage::DBI - DBI storage handler
my %seek_and_destroy;
sub _arm_global_destructor {
+
+ # quick "garbage collection" pass - prevents the registry
+ # from slowly growing with a bunch of undef-valued keys
+ defined $seek_and_destroy{$_} or delete $seek_and_destroy{$_}
+ for keys %seek_and_destroy;
+
weaken (
$seek_and_destroy{ refaddr($_[0]) } = $_[0]
);
$info = $self->_normalize_connect_info($info)
if ref $info eq 'ARRAY';
- for my $storage_opt (keys %{ $info->{storage_options} }) {
- my $value = $info->{storage_options}{$storage_opt};
-
- $self->$storage_opt($value);
- }
-
- # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only
- # the new set of options
- $self->_sql_maker(undef);
- $self->_sql_maker_opts({});
-
- for my $sql_maker_opt (keys %{ $info->{sql_maker_options} }) {
- my $value = $info->{sql_maker_options}{$sql_maker_opt};
-
- $self->_sql_maker_opts->{$sql_maker_opt} = $value;
- }
-
my %attrs = (
%{ $self->_default_dbi_connect_attributes || {} },
%{ $info->{attributes} || {} },
push @args, \%attrs if keys %attrs;
}
+
+ # this is the authoritative "always an arrayref" thing fed to DBI->connect
+ # OR a single-element coderef-based $dbh factory
$self->_dbi_connect_info(\@args);
+ # extract the individual storage options
+ for my $storage_opt (keys %{ $info->{storage_options} }) {
+ my $value = $info->{storage_options}{$storage_opt};
+
+ $self->$storage_opt($value);
+ }
+
+ # Extract the individual sqlmaker options
+ #
+ # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only
+ # the new set of options
+ $self->_sql_maker(undef);
+ $self->_sql_maker_opts({});
+
+ for my $sql_maker_opt (keys %{ $info->{sql_maker_options} }) {
+ my $value = $info->{sql_maker_options}{$sql_maker_opt};
+
+ $self->_sql_maker_opts->{$sql_maker_opt} = $value;
+ }
+
# FIXME - dirty:
- # save attributes them in a separate accessor so they are always
+ # save attributes in a separate accessor so they are always
# introspectable, even in case of a CODE $dbhmaker
$self->_dbic_connect_attributes (\%attrs);
return $self->_connect_info;
}
+ sub _dbi_connect_info {
+ my $self = shift;
+
+ return $self->{_dbi_connect_info} = $_[0]
+ if @_;
+
+ my $conninfo = $self->{_dbi_connect_info} || [];
+
+ # last ditch effort to grab a DSN
+ if ( ! defined $conninfo->[0] and $ENV{DBI_DSN} ) {
+ my @new_conninfo = @$conninfo;
+ $new_conninfo[0] = $ENV{DBI_DSN};
+ $conninfo = \@new_conninfo;
+ }
+
+ return $conninfo;
+ }
+
+
sub _normalize_connect_info {
my ($self, $info_arg) = @_;
my %info;
sub dbh_do {
my $self = shift;
- my $run_target = shift;
+ my $run_target = shift; # either a coderef or a method name
# short circuit when we know there is no need for a runner
#
DBIx::Class::Storage::BlockRunner->new(
storage => $self,
- run_code => sub { $self->$run_target ($self->_get_dbh, @$args ) },
wrap_txn => 0,
- retry_handler => sub { ! ( $_[0]->retried_count or $_[0]->storage->connected ) },
- )->run;
+ retry_handler => sub {
+ $_[0]->failed_attempt_count == 1
+ and
+ ! $_[0]->storage->connected
+ },
+ )->run(sub {
+ $self->$run_target ($self->_get_dbh, @$args )
+ });
}
sub txn_do {
return $self->_dbh;
}
+ # *DELIBERATELY* not a setter (for the time being)
+ # Too intertwined with everything else for any kind of sanity
sub sql_maker {
- my ($self) = @_;
+ my $self = shift;
+
+ $self->throw_exception('sql_maker() is not a setter method') if @_;
+
unless ($self->_sql_maker) {
my $sql_maker_class = $self->sql_maker_class;
sub _populate_dbh {
my ($self) = @_;
- my @info = @{$self->_dbi_connect_info || []};
$self->_dbh(undef); # in case ->connected failed we might get sent here
$self->_dbh_details({}); # reset everything we know
+ $self->_sql_maker(undef); # this may also end up being different
- $self->_dbh($self->_connect(@info));
+ $self->_dbh($self->_connect);
$self->_conn_pid($$) unless DBIx::Class::_ENV_::BROKEN_FORK; # on win32 these are in fact threads
require DBI::Const::GetInfoReturn;
my $self = shift;
- $self->ensure_connected;
+
+ my $drv;
+ try {
+ $drv = $self->_extract_driver_from_connect_info;
+ $self->ensure_connected;
+ };
+
+ $drv = "DBD::$drv" if $drv;
my $res = {
DBIC_DSN => $self->_dbi_connect_info->[0],
DBI_VER => DBI->VERSION,
DBIC_VER => DBIx::Class->VERSION,
DBIC_DRIVER => ref $self,
+ $drv ? (
+ DBD => $drv,
+ DBD_VER => try { $drv->VERSION },
+ ) : (),
};
+ # try to grab data even if we never managed to connect
+ # will cover us in cases of an oddly broken half-connect
for my $inf (
#keys %DBI::Const::GetInfoType::GetInfoType,
qw/
$started_connected = 1;
}
else {
- # if connect_info is a CODEREF, we have no choice but to connect
- if (ref $self->_dbi_connect_info->[0] &&
- reftype $self->_dbi_connect_info->[0] eq 'CODE') {
- $self->_populate_dbh;
- $driver = $self->_dbh->{Driver}{Name};
- }
- else {
- # try to use dsn to not require being connected, the driver may still
- # force a connection in _rebless to determine version
- # (dsn may not be supplied at all if all we do is make a mock-schema)
- my $dsn = $self->_dbi_connect_info->[0] || $ENV{DBI_DSN} || '';
- ($driver) = $dsn =~ /dbi:([^:]+):/i;
- $driver ||= $ENV{DBI_DRIVER};
- }
+ $driver = $self->_extract_driver_from_connect_info;
}
if ($driver) {
}
}
+ sub _extract_driver_from_connect_info {
+ my $self = shift;
+
+ my $drv;
+
+ # if connect_info is a CODEREF, we have no choice but to connect
+ if (
+ ref $self->_dbi_connect_info->[0]
+ and
+ reftype $self->_dbi_connect_info->[0] eq 'CODE'
+ ) {
+ $self->_populate_dbh;
+ $drv = $self->_dbh->{Driver}{Name};
+ }
+ else {
+ # try to use dsn to not require being connected, the driver may still
+ # force a connection later in _rebless to determine version
+ # (dsn may not be supplied at all if all we do is make a mock-schema)
+ ($drv) = ($self->_dbi_connect_info->[0] || '') =~ /^dbi:([^:]+):/i;
+ $drv ||= $ENV{DBI_DRIVER};
+ }
+
+ return $drv;
+ }
+
sub _determine_connector_driver {
my ($self, $conn) = @_;
}
sub _connect {
- my ($self, @info) = @_;
+ my $self = shift;
+
+ my $info = $self->_dbi_connect_info;
$self->throw_exception("You did not provide any connection_info")
- if ( ! defined $info[0] and ! $ENV{DBI_DSN} and ! $ENV{DBI_DRIVER} );
+ unless defined $info->[0];
my ($old_connect_via, $dbh);
};
try {
- if(ref $info[0] eq 'CODE') {
- $dbh = $info[0]->();
+ if(ref $info->[0] eq 'CODE') {
+ $dbh = $info->[0]->();
}
else {
require DBI;
- $dbh = DBI->connect(@info);
+ $dbh = DBI->connect(@$info);
}
die $DBI::errstr unless $dbh;
die sprintf ("%s fresh DBI handle with a *false* 'Active' attribute. "
. 'This handle is disconnected as far as DBIC is concerned, and we can '
. 'not continue',
- ref $info[0] eq 'CODE'
- ? "Connection coderef $info[0] returned a"
+ ref $info->[0] eq 'CODE'
+ ? "Connection coderef $info->[0] returned a"
: 'DBI->connect($schema->storage->connect_info) resulted in a'
) unless $dbh->FETCH('Active');
# Default via _default_dbi_connect_attributes is 1, hence it was an explicit
# request, or an external handle. Complain and set anyway
unless ($dbh->{RaiseError}) {
- carp( ref $info[0] eq 'CODE'
+ carp( ref $info->[0] eq 'CODE'
? "The 'RaiseError' of the externally supplied DBI handle is set to false. "
."DBIx::Class will toggle it back to true, unless the 'unsafe' connect "
};
$self->_dbh_autocommit($dbh->{AutoCommit});
- $dbh;
+ return $dbh;
}
sub txn_begin {
# soooooo much better now. But that is also another
# battle...
#return (
- # 'select', @{$orig_attrs->{_sqlmaker_select_args}}
- #) if $orig_attrs->{_sqlmaker_select_args};
+ # 'select', $orig_attrs->{!args_as_stored_at_the_end_of_this_method!}
+ #) if $orig_attrs->{!args_as_stored_at_the_end_of_this_method!};
my $sql_maker = $self->sql_maker;
my $alias2source = $self->_resolve_ident_sources ($ident);
($attrs->{from}, $attrs->{_aliastypes}) = $self->_prune_unused_joins ($attrs);
}
+ # FIXME this is a gross, inefficient, largely incorrect and fragile hack
+ # during the result inflation stage we *need* to know what was the aliastype
+ # map as sqla saw it when the final pieces of SQL were being assembled
+ # Originally we simply carried around the entirety of $attrs, but this
+ # resulted in resultsets that are being reused growing continuously, as
+ # the hash in question grew deeper and deeper.
+ # Instead hand-pick what to take with us here (we actually don't need much
+ # at this point just the map itself)
+ $orig_attrs->{_last_sqlmaker_alias_map} = $attrs->{_aliastypes};
+
###
# This would be the point to deflate anything found in $attrs->{where}
# (and leave $attrs->{bind} intact). Problem is - inflators historically
# invoked, and that's just bad...
###
- return ( 'select', @{ $orig_attrs->{_sqlmaker_select_args} = [
- @{$attrs}{qw(from select where)}, $attrs, @limit_args
- ]} );
+ return ( 'select', @{$attrs}{qw(from select where)}, $attrs, @limit_args );
}
# Returns a counting SELECT for a simple count
that the Pool object should get.
my $schema = Schema::Class->clone;
- $schema->storage_type( ['::DBI::Replicated', {balancer=>'::Random'}] );
+ $schema->storage_type(['::DBI::Replicated', { balancer_type => '::Random' }]);
$schema->connection(...);
Next, you need to add in the Replicants. Basically this is an array of
_dbh_get_info
_determine_connector_driver
+ _extract_driver_from_connect_info
_describe_connection
_warn_undetermined_driver
sql_quote_char
sql_name_sep
+ perl_renderer
+
_prefetch_autovalues
_perform_autoinc_retrieval
_autoinc_supplied_for_op
my $result = $resultset->search(undef, {force_pool=>'master'})->find($pk);
- This attribute will safely be ignore by non replicated storages, so you can use
+ This attribute will safely be ignored by non replicated storages, so you can use
the same code for both types of systems.
Lastly, you can use the L</execute_reliably> method, which works very much like
sub sql_maker {
my $self = shift;
- unless ($self->_sql_maker) {
- my $maker = $self->next::method (@_);
+ # it is critical to get the version *before* calling next::method
+ # otherwise the potential connect will obliterate the sql_maker
+ # next::method will populate in the _sql_maker accessor
+ my $mysql_ver = $self->_server_info->{normalized_dbms_version};
- # mysql 3 does not understand a bare JOIN
- my $mysql_ver = $self->_dbh_get_info('SQL_DBMS_VER');
- $maker->needs_inner_join(1) if $mysql_ver =~ /^3/;
- }
+ my $sm = $self->next::method(@_);
+
+ # mysql 3 does not understand a bare JOIN
- $sm->{_default_jointype} = 'INNER' if $mysql_ver < 4;
++ $sm->needs_inner_join(1) if $mysql_ver < 4;
- return $self->_sql_maker;
+ $sm;
}
sub sqlt_type {
use List::Util 'first';
use Scalar::Util 'blessed';
use Sub::Name 'subname';
+use Data::Query::Constants;
+use Data::Query::ExprHelpers;
use namespace::clean;
#
# join collapse *will not work* on heavy data types.
my $connecting_aliastypes = $self->_resolve_aliastypes_from_select_args({
%$inner_attrs,
- select => [],
+ select => undef,
});
for (sort map { keys %{$_->{-seen_columns}||{}} } map { values %$_ } values %$connecting_aliastypes) {
$sql_maker->{name_sep} = '';
}
+ # delete local is 5.12+
+ local @{$sql_maker}{qw(renderer converter)};
+ delete @{$sql_maker}{qw(renderer converter)};
+
my ($lquote, $rquote, $sep) = map { quotemeta $_ } ($sql_maker->_quote_chars, $sql_maker->name_sep);
# generate sql chunks
my $to_scan = {
restricting => [
- $sql_maker->_recurse_where ($attrs->{where}),
- $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} }),
+ ($attrs->{where}
+ ? ($sql_maker->_recurse_where($attrs->{where}))[0]
+ : ()
+ ),
+ ($attrs->{having}
+ ? ($sql_maker->_recurse_where($attrs->{having}))[0]
+ : ()
+ ),
],
grouping => [
- $sql_maker->_parse_rs_attrs ({ group_by => $attrs->{group_by} }),
+ ($attrs->{group_by}
+ ? ($sql_maker->_render_sqla(group_by => $attrs->{group_by}))[0]
+ : (),
+ )
],
joining => [
$sql_maker->_recurse_from (
),
],
selecting => [
- ($attrs->{select}
- ? ($sql_maker->_render_sqla(select_select => $attrs->{select}))[0]
- : ()),
- map { $sql_maker->_recurse_fields($_) } @{$attrs->{select}},
++ map { $sql_maker->_render_sqla(select_select => $_) =~ /^SELECT\s+(.+)/ } @{$attrs->{select}||[]},
],
ordering => [
map { $_->[0] } $self->_extract_order_criteria ($attrs->{order_by}, $sql_maker),
],
};
- # throw away empty chunks
- $_ = [ map { $_ || () } @$_ ] for values %$to_scan;
+ # throw away empty chunks and all 2-value arrayrefs: the thinking is that these are
+ # bind value specs left in by the sloppy renderer above. It is ok to do this
+ # at this point, since we are going to end up rewriting this crap anyway
+ for my $v (values %$to_scan) {
+ my @nv;
+ for (@$v) {
+ next if (
+ ! defined $_
+ or
+ (
+ ref $_ eq 'ARRAY'
+ and
+ ( @$_ == 0 or @$_ == 2 )
+ )
+ );
+
+ if (ref $_) {
+ require Data::Dumper::Concise;
+ $self->throw_exception("Unexpected ref in scan-plan: " . Data::Dumper::Concise::Dumper($v) );
+ }
+
+ push @nv, $_;
+ }
+
+ $v = \@nv;
+ }
+
+ # kill all selectors which look like a proper subquery
+ # this is a sucky heuristic *BUT* - if we get it wrong the query will simply
+ # fail to run, so we are relatively safe
+ $to_scan->{selecting} = [ grep {
+ $_ !~ / \A \s* \( \s* SELECT \s+ .+? \s+ FROM \s+ .+? \) \s* \z /xsi
+ } @{ $to_scan->{selecting} || [] } ];
# first see if we have any exact matches (qualified or unqualified)
for my $type (keys %$to_scan) {
}
}
- my @order_by = $self->_extract_order_criteria($attrs->{order_by})
+ my $sql_maker = $self->sql_maker;
+ my @order_by = $self->_extract_order_criteria($attrs->{order_by}, $sql_maker)
or return (\@group_by, $attrs->{order_by});
# add any order_by parts that are not already present in the group_by
# the proper overall order without polluting the group criteria (and
# possibly changing the outcome entirely)
- my ($leftovers, $sql_maker, @new_order_by, $order_chunks, $aliastypes);
+ my ($leftovers, @new_order_by, $order_chunks, $aliastypes);
my $group_already_unique = $self->_columns_comprise_identifying_set($colinfos, \@group_by);
# pesky tests won't pass
# wrap any part of the order_by that "responds" to an ordering alias
# into a MIN/MAX
- $sql_maker ||= $self->sql_maker;
- $order_chunks ||= [
- map { ref $_ eq 'ARRAY' ? $_ : [ $_ ] } $sql_maker->_order_by_chunks($attrs->{order_by})
- ];
- my ($chunk, $is_desc) = $sql_maker->_split_order_chunk($order_chunks->[$o_idx][0]);
+ $order_chunks ||= do {
+ my @c;
+ my $dq_node = $sql_maker->converter->_order_by_to_dq($attrs->{order_by});
- $new_order_by[$o_idx] = \[
- sprintf( '%s( %s )%s',
- ($is_desc ? 'MAX' : 'MIN'),
- $chunk,
- ($is_desc ? ' DESC' : ''),
- ),
- @ {$order_chunks->[$o_idx]} [ 1 .. $#{$order_chunks->[$o_idx]} ]
- ];
+ while (is_Order($dq_node)) {
+ push @c, {
+ is_desc => $dq_node->{reverse},
+ dq_node => $dq_node->{by},
+ };
+
+ @{$c[-1]}{qw(sql bind)} = $sql_maker->_render_dq($dq_node->{by});
+
+ $dq_node = $dq_node->{from};
+ }
+
+ \@c;
+ };
+
+ $new_order_by[$o_idx] = {
+ ($order_chunks->[$o_idx]{is_desc} ? '-desc' : '-asc') => \[
+ sprintf ( '%s( %s )',
+ ($order_chunks->[$o_idx]{is_desc} ? 'MAX' : 'MIN'),
+ $order_chunks->[$o_idx]{sql},
+ ),
+ @{ $order_chunks->[$o_idx]{bind} || [] }
+ ]
+ };
}
}
# recreate the untouched order parts
if (@new_order_by) {
- $new_order_by[$_] ||= \ $order_chunks->[$_] for ( 0 .. $#$order_chunks );
+ $new_order_by[$_] ||= {
+ ( $order_chunks->[$_]{is_desc} ? '-desc' : '-asc' )
+ => \ $order_chunks->[$_]{dq_node}
+ } for ( 0 .. $#$order_chunks );
}
return (
}
sub _extract_order_criteria {
- my ($self, $order_by, $sql_maker) = @_;
-
- my $parser = sub {
- my ($sql_maker, $order_by, $orig_quote_chars) = @_;
+ my ($self, $order_by, $sql_maker, $ident_only) = @_;
- return scalar $sql_maker->_order_by_chunks ($order_by)
- unless wantarray;
+ $sql_maker ||= $self->sql_maker;
- my ($lq, $rq, $sep) = map { quotemeta($_) } (
- ($orig_quote_chars ? @$orig_quote_chars : $sql_maker->_quote_chars),
- $sql_maker->name_sep
- );
-
- my @chunks;
- for ($sql_maker->_order_by_chunks ($order_by) ) {
- my $chunk = ref $_ ? [ @$_ ] : [ $_ ];
- ($chunk->[0]) = $sql_maker->_split_order_chunk($chunk->[0]);
+ my $order_dq = $sql_maker->converter->_order_by_to_dq($order_by);
- # order criteria may have come back pre-quoted (literals and whatnot)
- # this is fragile, but the best we can currently do
- $chunk->[0] =~ s/^ $lq (.+?) $rq $sep $lq (.+?) $rq $/"$1.$2"/xe
- or $chunk->[0] =~ s/^ $lq (.+) $rq $/$1/x;
+ my @by;
+ while (is_Order($order_dq)) {
+ push @by, $order_dq->{by};
+ $order_dq = $order_dq->{from};
+ }
- push @chunks, $chunk;
+ # delete local is 5.12+
+ local @{$sql_maker}{qw(quote_char renderer converter)};
+ delete @{$sql_maker}{qw(quote_char renderer converter)};
+
+ return map { [ $sql_maker->_render_dq($_) ] } do {
+ if ($ident_only) {
+ my @by_ident;
+ scan_dq_nodes({ DQ_IDENTIFIER ,=> sub { push @by_ident, $_[0] } }, @by);
+ @by_ident
+ } else {
+ @by
}
-
- return @chunks;
};
-
- if ($sql_maker) {
- return $parser->($sql_maker, $order_by);
- }
- else {
- $sql_maker = $self->sql_maker;
-
- # pass these in to deal with literals coming from
- # the user or the deep guts of prefetch
- my $orig_quote_chars = [$sql_maker->_quote_chars];
-
- local $sql_maker->{quote_char};
- return $parser->($sql_maker, $order_by, $orig_quote_chars);
- }
}
sub _order_by_is_stable {
my ($self, $ident, $order_by, $where) = @_;
my @cols = (
- (map { $_->[0] } $self->_extract_order_criteria($order_by)),
+ (map { $_->[0] } $self->_extract_order_criteria($order_by, undef, 1)),
$where ? @{$self->_extract_fixed_condition_columns($where)} :(),
) or return undef;
sub _extract_fixed_condition_columns {
my ($self, $where) = @_;
+ if (ref($where) eq 'REF' and ref($$where) eq 'HASH') {
+ # Yes. I know.
+ my $fixed = DBIx::Class::ResultSource->_extract_fixed_values_for($$where);
+ return [ keys %$fixed ];
+ }
+
return unless ref $where eq 'HASH';
my @cols;
use strict;
use warnings;
+# Needs to load 1st so that the correct SQLA::Test is picked up
+use DBIx::Class::_TempExtlib;
+
# this noop trick initializes the STDOUT, so that the TAP::Harness
# issued IO::Select->can_read calls (which are blocking wtf wtf wtf)
# keep spinning and scheduling jobs
}
}
+# This is a pretty good candidate for a standalone extraction (Test::AutoSkip?)
+BEGIN {
+ if (
+ ! $ENV{RELEASE_TESTING}
+ and
+ ! $ENV{AUTHOR_TESTING}
+ and
+ $0 =~ /^ (.*) x?t [\/\\] .+ \.t $/x
+ and
+ -f ( my $fn = "$1.auto_todo")
+ ) {
+ # fuck you win32
+ require File::Spec;
+ my $canonical_dollarzero = File::Spec::Unix->catpath(File::Spec->splitpath($0));
+
+ for my $t ( map {
+ ( $_ =~ /^ \s* ( [^\#\n]+ ) /x ) ? $1 : ()
+ } do { local @ARGV = $fn; <> } ) {
+ if ( $canonical_dollarzero =~ m! (?: \A | / ) \Q$t\E \z !x ) {
+ require Test::Builder;
+ Test::Builder->new->todo_start("Global todoification of '$t' specified in $fn");
+ }
+ }
+ }
+}
+
use Module::Runtime 'module_notional_filename';
BEGIN {
for my $mod (qw( DBIC::SqlMakerTest SQL::Abstract )) {
use Path::Class::File ();
use File::Spec;
use Fcntl qw/:DEFAULT :flock/;
+ use Config;
=head1 NAME
sub import {
my $self = shift;
- my $tmpdir = DBICTest::RunMode->tmpdir;
- my $lockpath = $tmpdir->file('.dbictest_global.lock');
+ my $lockpath = DBICTest::RunMode->tmpdir->file('_dbictest_global.lock');
{
my $u = local_umask(0); # so that the file opens as 666, and any user can lock
- sysopen ($global_lock_fh, $lockpath, O_RDWR|O_CREAT) or do {
- my $err = $!;
-
- my @x_tests = map { (defined $_) ? ( $_ ? 1 : 0 ) : 'U' } map {(-e, -d, -f, -r, -w, -x, -o)} ($tmpdir, $lockpath);
-
- die sprintf <<"EOE", $lockpath, $err, scalar $>, scalar $), (stat($tmpdir))[4,5,2], @x_tests;
- Unable to open %s: %s
- Process EUID/EGID: %s / %s
- TmpDir UID/GID: %s / %s
- TmpDir StatMode: %o
- TmpDir X-tests: -e:%s -d:%s -f:%s -r:%s -w:%s -x:%s -o:%s
- TmpFile X-tests: -e:%s -d:%s -f:%s -r:%s -w:%s -x:%s -o:%s
- EOE
- };
+ sysopen ($global_lock_fh, $lockpath, O_RDWR|O_CREAT)
+ or die "Unable to open $lockpath: $!";
}
for (@_) {
# this is executed on every connect, and thus installs a disconnect/DESTROY
# guard for every new $dbh
on_connect_do => sub {
+
my $storage = shift;
my $dbh = $storage->_get_dbh;
# no fsync on commit
$dbh->do ('PRAGMA synchronous = OFF');
- if ($ENV{DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER}) {
-
- $storage->throw_exception(
- 'PRAGMA reverse_unordered_selects does not work correctly before libsqlite 3.7.9'
- ) if $storage->_server_info->{normalized_dbms_version} < 3.007009;
-
+ if (
+ $ENV{DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER}
+ and
+ # the pragma does not work correctly before libsqlite 3.7.9
+ $storage->_server_info->{normalized_dbms_version} >= 3.007009
+ ) {
$dbh->do ('PRAGMA reverse_unordered_selects = ON');
}
my $cur_inode = (stat($db_file))[1];
if ($orig_inode != $cur_inode) {
- # pack/unpack to match the unsigned longs returned by `stat`
- $fail_reason = sprintf 'was recreated (initially inode %s, now %s)', (
- map { unpack ('L', pack ('l', $_) ) } ($orig_inode, $cur_inode )
- );
+ my @inodes = ($orig_inode, $cur_inode);
+ # unless this is a fixed perl (P5RT#84590) pack/unpack before display
+ # to match the unsigned longs returned by `stat`
+ @inodes = map { unpack ('L', pack ('l', $_) ) } @inodes
+ unless $Config{st_ino_size};
+
+ $fail_reason = sprintf
+ 'was recreated (initially inode %s, now %s)',
+ @inodes
+ ;
}
}
use base qw/DBICTest::BaseResult/;
use Carp qw/confess/;
+use Data::Query::ExprDeclare;
__PACKAGE__->table('artist');
__PACKAGE__->source_info({
# the undef condition in this rel is *deliberate*
# tests oddball legacy syntax
__PACKAGE__->has_many(
- cds => 'DBICTest::Schema::CD', undef,
+ cds => 'DBICTest::Schema::CD',
+ expr { $_->foreign->artist == $_->self->artistid },
{ order_by => { -asc => 'year'} },
);
);
__PACKAGE__->many_to_many('artworks', 'artwork_to_artist', 'artwork');
+ __PACKAGE__->has_many(
+ cds_without_genre => 'DBICTest::Schema::CD',
+ sub {
+ my $args = shift;
+ return (
+ {
+ "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" },
+ "$args->{foreign_alias}.genreid" => undef,
+ }, $args->{self_rowobj} && {
+ "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,
+ "$args->{foreign_alias}.genreid" => undef,
+ }
+ ),
+ },
+ );
sub sqlt_deploy_hook {
my ($self, $sqlt_table) = @_;
exselect_outer => 'ORDER__BY__001, ORDER__BY__002, ORDER__BY__003',
exselect_inner => 'title AS ORDER__BY__001, bar AS ORDER__BY__002, sensors AS ORDER__BY__003',
},
+
+ {
+ order_by => [
+ 'name',
+ ],
+ order_inner => 'name',
+ order_outer => 'name DESC',
+ order_req => 'name',
+ },
) {
my $o_sel = $ord_set->{exselect_outer}
? ', ' . $ord_set->{exselect_outer}
: ''
;
+ my $rs = $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}});
+
+ # query actually works
+ ok( defined $rs->count, 'Query actually works' );
+
is_same_sql_bind(
- $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query,
+ $rs->as_query,
"(SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name
FROM (
SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name$o_sel
[ [ { sqlt_datatype => 'varchar', sqlt_size => 100, dbic_colname => 'source' }
=> 'Library' ] ],
);
+
}
# with groupby
ORDER BY title
FETCH FIRST 5 ROWS ONLY
) me
- ORDER BY title DESC
+ ORDER BY me.title DESC
FETCH FIRST 2 ROWS ONLY
) me
- ORDER BY title
+ ORDER BY me.title
) me
JOIN owners owner ON owner.id = me.owner
WHERE ( source = ? )
'(
SELECT me.id, owner__id, owner__name, bar, baz
FROM (
- SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index
+ SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index
FROM (
SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
FROM books me
'(
SELECT me.id, owner__id, owner__name, bar, baz
FROM (
- SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index
+ SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index
FROM (
SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
FROM books me
FROM (
SELECT me.name, me.id
FROM (
- SELECT me.name, me.id, ROWNUM rownum__index
+ SELECT me.name, me.id, ROWNUM AS rownum__index
FROM (
SELECT me.name, me.id
FROM owners me
WHERE source != ? AND me.title = ? AND source = ?
GROUP BY (me.id / ?), owner.id
HAVING ?
- ORDER BY me.id
FETCH FIRST 7 ROWS ONLY
) me
- ORDER BY me.id DESC
FETCH FIRST 4 ROWS ONLY
)',
[
use DBICTest;
use DBIC::SqlMakerTest;
- use DBIx::Class::SQLMaker::ACCESS ();
+ # the entire point of the subclass is that parenthesis have to be
+ # just right for ACCESS to be happy
+ # globalize for entirety of the test
+ $SQL::Abstract::Test::parenthesis_significant = 1;
- my $sa = DBIx::Class::SQLMaker::ACCESS->new;
+ my $schema = DBICTest->init_schema (storage_type => 'DBIx::Class::Storage::DBI::ACCESS', no_deploy => 1, quote_names => 1);
+
+ is_same_sql_bind(
+ $schema->resultset('Artist')->search(
+ {
+ artistid => 1,
+ },
+ {
+ join => [{ cds => 'tracks' }],
+ '+select' => [ 'tracks.title' ],
+ '+as' => [ 'track_title' ],
+ }
+ )->as_query,
+ '(
+ SELECT [me].[artistid], [me].[name], [me].[rank], [me].[charfield],
+ [tracks].[title]
+ FROM (
+ (
+ [artist] [me]
+ LEFT JOIN cd [cds]
+ ON [cds].[artist] = [me].[artistid]
+ )
+ LEFT JOIN [track] [tracks]
+ ON [tracks].[cd] = [cds].[cdid]
+ )
- WHERE ( [artistid] = ? )
++ WHERE [artistid] = ?
+ )',
+ [
+ [{ sqlt_datatype => 'integer', dbic_colname => 'artistid' }
+ => 1 ],
+ ],
+ 'correct SQL for two-step left join'
+ );
+
+ is_same_sql_bind(
+ $schema->resultset('Track')->search(
+ {
+ trackid => 1,
+ },
+ {
+ join => [{ cd => 'artist' }],
+ '+select' => [ 'artist.name' ],
+ '+as' => [ 'artist_name' ],
+ }
+ )->as_query,
+ '(
+ SELECT [me].[trackid], [me].[cd], [me].[position], [me].[title], [me].[last_updated_on], [me].[last_updated_at],
+ [artist].[name]
+ FROM (
+ (
+ [track] [me]
+ INNER JOIN cd [cd]
+ ON [cd].[cdid] = [me].[cd]
+ )
+ INNER JOIN [artist] [artist]
+ ON [artist].[artistid] = [cd].[artist]
+ )
- WHERE ( [trackid] = ? )
++ WHERE [trackid] = ?
+ )',
+ [
+ [{ sqlt_datatype => 'integer', dbic_colname => 'trackid' }
+ => 1 ],
+ ],
+ 'correct SQL for two-step inner join',
+ );
+
+
+ my $sa = $schema->storage->sql_maker;
+ # the legacy tests assume no quoting - leave things as-is
-local $sa->{quote_char};
++$sa->quote_char(undef);
# my ($self, $table, $fields, $where, $order, @rest) = @_;
my ($sql, @bind) = $sa->select(