From: Peter Rabbitson <ribasushi@cpan.org>
Date: Wed, 24 Sep 2014 06:53:51 +0000 (+0200)
Subject: After 5268b1da populate remained the sole user of Data::Compare
X-Git-Tag: v0.082800~15
X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=cf9ba393c6309a66fe31b2decb7574fdf068a759;p=dbsrgits%2FDBIx-Class.git

After 5268b1da populate remained the sole user of Data::Compare

Might as well throw it out - the check that takes place is rather rare and
a freeze-and-compare approach is likely more efficient anyway
---

diff --git a/Makefile.PL b/Makefile.PL
index ad540f1..ff2682c 100644
--- a/Makefile.PL
+++ b/Makefile.PL
@@ -58,8 +58,6 @@ perl_version '5.008001';
 ### All of them *MUST* go to DBIx::Class::Optional::Dependencies
 ###
 my $runtime_requires = {
-  # FIXME - temporary, needs throwing out for something more efficient
-  'Data::Compare'            => '1.22',
 
   # DBI itself should be capable of installation and execution in pure-perl
   # mode. However it has never been tested yet, so consider XS for the time
diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm
index a53200d..8a99d06 100644
--- a/lib/DBIx/Class/Storage/DBI.pm
+++ b/lib/DBIx/Class/Storage/DBI.pm
@@ -12,9 +12,8 @@ use Scalar::Util qw/refaddr weaken reftype blessed/;
 use List::Util qw/first/;
 use Context::Preserve 'preserve_context';
 use Try::Tiny;
-use Data::Compare (); # no imports!!! guard against insane architecture
 use SQL::Abstract qw(is_plain_value is_literal_value);
-use DBIx::Class::_Util qw(quote_sub perlstring);
+use DBIx::Class::_Util qw(quote_sub perlstring serialize);
 use namespace::clean;
 
 # default cursor class, overridable in connect_info attributes
@@ -2070,7 +2069,7 @@ sub _insert_bulk {
   # can't just hand SQLA a set of some known "values" (e.g. hashrefs that
   # can be later matched up by address), because we want to supply a real
   # value on which perhaps e.g. datatype checks will be performed
-  my ($proto_data, $value_type_by_col_idx);
+  my ($proto_data, $serialized_bind_type_by_col_idx);
   for my $col_idx (0..$#$cols) {
     my $colname = $cols->[$col_idx];
     if (ref $data->[0][$col_idx] eq 'SCALAR') {
@@ -2089,7 +2088,7 @@ sub _insert_bulk {
 
       # store value-less (attrs only) bind info - we will be comparing all
       # supplied binds against this for sanity
-      $value_type_by_col_idx->{$col_idx} = [ map { $_->[0] } @$resolved_bind ];
+      $serialized_bind_type_by_col_idx->{$col_idx} = serialize [ map { $_->[0] } @$resolved_bind ];
 
       $proto_data->{$colname} = \[ $sql, map { [
         # inject slice order to use for $proto_bind construction
@@ -2100,7 +2099,7 @@ sub _insert_bulk {
       ];
     }
     else {
-      $value_type_by_col_idx->{$col_idx} = undef;
+      $serialized_bind_type_by_col_idx->{$col_idx} = undef;
 
       $proto_data->{$colname} = \[ '?', [
         { dbic_colname => $colname, _bind_data_slice_idx => $col_idx }
@@ -2116,7 +2115,7 @@ sub _insert_bulk {
     [ $proto_data ],
   );
 
-  if (! @$proto_bind and keys %$value_type_by_col_idx) {
+  if (! @$proto_bind and keys %$serialized_bind_type_by_col_idx) {
     # if the bindlist is empty and we had some dynamic binds, this means the
     # storage ate them away (e.g. the NoBindVars component) and interpolated
     # them directly into the SQL. This obviously can't be good for multi-inserts
@@ -2150,7 +2149,7 @@ sub _insert_bulk {
     for my $row_idx (1..$#$data) {  # we are comparing against what we got from [0] above, hence start from 1
       my $val = $data->[$row_idx][$col_idx];
 
-      if (! exists $value_type_by_col_idx->{$col_idx}) { # literal no binds
+      if (! exists $serialized_bind_type_by_col_idx->{$col_idx}) { # literal no binds
         if (ref $val ne 'SCALAR') {
           $bad_slice_report_cref->(
             "Incorrect value (expecting SCALAR-ref \\'$$reference_val')",
@@ -2166,7 +2165,7 @@ sub _insert_bulk {
           );
         }
       }
-      elsif (! defined $value_type_by_col_idx->{$col_idx} ) {  # regular non-literal value
+      elsif (! defined $serialized_bind_type_by_col_idx->{$col_idx} ) {  # regular non-literal value
         if (is_literal_value($val)) {
           $bad_slice_report_cref->("Literal SQL found where a plain bind value is expected", $row_idx, $col_idx);
         }
@@ -2194,16 +2193,17 @@ sub _insert_bulk {
           }
           # need to check the bind attrs - a bind will happen only once for
           # the entire dataset, so any changes further down will be ignored.
-          elsif (! Data::Compare::Compare(
-            $value_type_by_col_idx->{$col_idx},
-            [
+          elsif (
+            $serialized_bind_type_by_col_idx->{$col_idx}
+              ne
+            serialize [
               map
               { $_->[0] }
               @{$self->_resolve_bindattrs(
                 $source, [ @{$$val}[1 .. $#$$val] ], $colinfos,
               )}
-            ],
-          )) {
+            ]
+          ) {
             $bad_slice_report_cref->(
               'Differing bind attributes on literal/bind values not supported',
               $row_idx,
diff --git a/t/53lean_startup.t b/t/53lean_startup.t
index 0c66c7d..b6c8be9 100644
--- a/t/53lean_startup.t
+++ b/t/53lean_startup.t
@@ -129,7 +129,6 @@ BEGIN {
     Method::Generate::Accessor
     Method::Generate::Constructor
     Context::Preserve
-    Data::Compare
   ));
 
   my $s = DBICTest::Schema->connect('dbi:SQLite::memory:');