From e9bd8a70d4653ca6df08ae810b5ca9937da67ab7 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Apr 2014 22:00:00 +0100 Subject: [PATCH 001/548] Commemorate the first DBIx::Class hackathon and beef up the contributor list :) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The event was a blast. A lot of very useful work was done and at least one very thorny problem got resolved once and for all. For more info check out the accounts of some of the participants at http://dbix-class.org/hackathon01-20140412 Sincere thanks go to all who attended: Aaron "arc' Crane Andrew Mehta Colin "bfwg" Newell Dagfinn Ilmari Mannsåker Duncan Garland Ian "idn" Norton James "theorbtwo" Mastros Jose Luis "pplu" Martinez Jess "castaway" Robinson Mark "mdk" Keating Murray "minty" Walker Oriol "uree" Soriano Peter "ribasushi" Rabbitson Stephen Peters Tom "TBSliver" Bloor As well as to the event sponsors who made this all possible: Eligo http://eligo.co.uk/ Ctrl-o http://ctrlo.com/ EPO http://enlightenedperl.org/ See you all next time! --- .mailmap | 2 ++ lib/DBIx/Class.pm | 26 ++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/.mailmap b/.mailmap index 587e076a4..7fa161b21 100644 --- a/.mailmap +++ b/.mailmap @@ -18,12 +18,14 @@ David Schmidt David Schmidt David Schmidt Devin Austin +Duncan Garland Felix Antonius Wilhelm Ostmann Gerda Shank Gianni Ceccarelli Gordon Irving Hakim Cassimally Jonathan Chu +Jose Luis Martinez Matt Phillips Norbert Csongrádi Roman Filippov diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index ba237a2ee..6c30f4abb 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -300,6 +300,8 @@ amiri: Amiri Barksdale amoore: Andrew Moore +Andrew Mehta + andrewalker: Andre Walker andyg: Andy Grundman @@ -314,6 +316,8 @@ ash: Ash Berlin bert: Norbert Csongrádi +bfwg: Colin Newell + blblack: Brandon L. Black bluefeet: Aran Deltac @@ -338,6 +342,8 @@ claco: Christopher H. Laco clkao: CL Kao +Ctrl-o L + da5id: David Jack Olrik dariusj: Darius Jokilehto @@ -356,12 +362,16 @@ dnm: Justin Wheeler dpetrov: Dimitar Petrov +duncan_dmg: Duncan Garland + dwc: Daniel Westermann-Clark dyfrgi: Michael Leuchtenburg edenc: Eden Cardim +Eligo L + ether: Karen Etheridge felliott: Fitz Elliott @@ -382,6 +392,8 @@ Haarg: Graham Knop hobbs: Andrew Rodland +idn: Ian Norton + ilmari: Dagfinn Ilmari MannsEker initself: Mike Baas @@ -422,10 +434,14 @@ mattlaw: Matt Lawrence mattp: Matt Phillips +mdk: Mark Keating + michaelr: Michael Reddick milki: Jonathan Chu +minty: Murray Walker + mithaldu: Christian Walde mjemmeson: Michael Jemmeson @@ -466,6 +482,8 @@ phaylon: Robert Sedlacek plu: Johannes Plunien +pplu: Jose Luis Martinez + Possum: Daniel LeWarne quicksilver: Jules Bean @@ -508,12 +526,18 @@ Squeeks sszabo: Stephan Szabo +Stephen Peters + talexb: Alex Beamish tamias: Ronald J Kimball +TBSliver: Tom Bloor + teejay : Aaron Trevena +theorbtwo: James Mastros + Todd Lipcon Tom Hukins @@ -536,6 +560,8 @@ wreis: Wallace Reis xenoterracide: Caleb Cushing +uree: Oriol Soriano + yrlnry: Mark Jason Dominus zamolxes: Bogdan Lucaciu From 070b8a0c17c8bf59611f0cf0409c77f3c474bd28 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 10 Apr 2014 13:28:16 +0200 Subject: [PATCH 002/548] Reduce Travis' stamina http://blog.travis-ci.com/2013-11-27-fast-finishing-builds/ --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index ed2c04e70..e22f22b07 100644 --- a/.travis.yml +++ b/.travis.yml @@ -75,6 +75,7 @@ env: - CLEANTEST=true matrix: + fast_finish: true include: # this particular perl is quite widespread - perl: 5.8.8_thr_mb From 47749813ce6ff4520eaf390431672133d289f962 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 11 Apr 2014 10:47:59 +0200 Subject: [PATCH 003/548] Refactor the double-testing-install attempt under travis Should be no functional changes, just shuffling to get everything to run under run_or_err --- maint/travis-ci_scripts/common.bash | 58 ++++++++++------------------- 1 file changed, 19 insertions(+), 39 deletions(-) diff --git a/maint/travis-ci_scripts/common.bash b/maint/travis-ci_scripts/common.bash index d50aebb43..896a44c82 100755 --- a/maint/travis-ci_scripts/common.bash +++ b/maint/travis-ci_scripts/common.bash @@ -17,19 +17,20 @@ tstamp() { echo -n "[$(date '+%H:%M:%S')]" ; } run_or_err() { echo_err -n "$(tstamp) $1 ... " + LASTCMD="$2" LASTEXIT=0 START_TIME=$SECONDS - # the tee is a handy debugging tool when stumpage is exceedingly strong - #LASTOUT=$( bash -c "$2" 2>&1 | tee /dev/stderr) || LASTEXIT=$? - LASTOUT=$( bash -c "$2" 2>&1 ) || LASTEXIT=$? + LASTOUT=$( eval "$2" 2>&1 ) || LASTEXIT=$? DELTA_TIME=$(( $SECONDS - $START_TIME )) if [[ "$LASTEXIT" != "0" ]] ; then - echo_err "FAILED !!! (after ${DELTA_TIME}s)" - echo_err "Command executed:" - echo_err "$2" - echo_err "STDOUT+STDERR:" - echo_err "$LASTOUT" + if [[ -z "$3" ]] ; then + echo_err "FAILED !!! (after ${DELTA_TIME}s)" + echo_err "Command executed:" + echo_err "$LASTCMD" + echo_err "STDOUT+STDERR:" + echo_err "$LASTOUT" + fi return $LASTEXIT else @@ -121,46 +122,23 @@ parallel_installdeps_notest() { installdeps() { if [[ -z "$@" ]] ; then return; fi - echo_err "$(tstamp) Processing dependencies: $@" + MODLIST=$(printf "%q " "$@" | perl -pe 's/^\s+|\s+$//g') local -x HARNESS_OPTIONS HARNESS_OPTIONS="j$NUMTHREADS" - echo_err -n "Attempting install of $# modules under parallel ($HARNESS_OPTIONS) testing ... " - - LASTEXIT=0 - START_TIME=$SECONDS - LASTOUT=$( _dep_inst_with_test "$@" ) || LASTEXIT=$? - DELTA_TIME=$(( $SECONDS - $START_TIME )) + if ! run_or_err "Attempting install of $# modules under parallel ($HARNESS_OPTIONS) testing ($MODLIST)" "_dep_inst_with_test $MODLIST" quiet_fail ; then + local errlog="failed after ${DELTA_TIME}s Exit:$LASTEXIT Log:$(/usr/bin/nopaste -q -s Shadowcat -d "Parallel testfail" <<< "$LASTOUT")" + echo "$errlog" - if [[ "$LASTEXIT" = "0" ]] ; then - echo_err "done (took ${DELTA_TIME}s)" - else - local errlog="after ${DELTA_TIME}s Exit:$LASTEXIT Log:$(/usr/bin/nopaste -q -s Shadowcat -d "Parallel testfail" <<< "$LASTOUT")" - echo_err -n "failed ($errlog) retrying with sequential testing ... " POSTMORTEM="$POSTMORTEM$( echo - echo "Depinstall under $HARNESS_OPTIONS parallel testing failed $errlog" - echo "=============================================================" - echo "Attempted installation of: $@" - echo "=============================================================" + echo "Depinstall of $MODLIST under $HARNESS_OPTIONS parallel testing $errlog" )" HARNESS_OPTIONS="" - LASTEXIT=0 - START_TIME=$SECONDS - LASTOUT=$( _dep_inst_with_test "$@" ) || LASTEXIT=$? - DELTA_TIME=$(( $SECONDS - $START_TIME )) - - if [[ "$LASTEXIT" = "0" ]] ; then - echo_err "done (took ${DELTA_TIME}s)" - else - echo_err "FAILED !!! (after ${DELTA_TIME}s)" - echo_err "STDOUT+STDERR:" - echo_err "$LASTOUT" - exit 1 - fi + run_or_err "Retrying same $# modules without parallel testing" "_dep_inst_with_test $MODLIST" fi INSTALLDEPS_OUT="${INSTALLDEPS_OUT}${LASTOUT}" @@ -169,9 +147,11 @@ installdeps() { _dep_inst_with_test() { if [[ "$DEVREL_DEPS" == "true" ]] ; then # --dev is already part of CPANM_OPT - $TIMEOUT_CMD cpanm "$@" 2>&1 + LASTCMD="$TIMEOUT_CMD cpanm $@" + $LASTCMD 2>&1 else - $TIMEOUT_CMD cpan "$@" 2>&1 + LASTCMD="$TIMEOUT_CMD cpan $@" + $LASTCMD 2>&1 # older perls do not have a CPAN which can exit with error on failed install for m in "$@"; do From 8f1a96a2e99bec81a1dcd86fbded3baa37730174 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 26 Mar 2014 08:11:43 +0100 Subject: [PATCH 004/548] Add progress meter to travis builds --- maint/travis-ci_scripts/common.bash | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/maint/travis-ci_scripts/common.bash b/maint/travis-ci_scripts/common.bash index 896a44c82..da6ce336f 100755 --- a/maint/travis-ci_scripts/common.bash +++ b/maint/travis-ci_scripts/common.bash @@ -20,7 +20,16 @@ run_or_err() { LASTCMD="$2" LASTEXIT=0 START_TIME=$SECONDS + + PRMETER_PIDFILE="$(tempfile)_$SECONDS" + # the double bash is to hide the job control messages + bash -c "bash -c 'echo \$\$ >> $PRMETER_PIDFILE; while true; do sleep 10; echo -n \"\${SECONDS}s ... \"; done' &" + LASTOUT=$( eval "$2" 2>&1 ) || LASTEXIT=$? + + # stop progress meter + for p in $(cat "$PRMETER_PIDFILE"); do kill $p ; done + DELTA_TIME=$(( $SECONDS - $START_TIME )) if [[ "$LASTEXIT" != "0" ]] ; then From 66950d7a1a50c9f375a3f0fc7f40f02c789c814d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 10 Apr 2014 13:00:55 +0200 Subject: [PATCH 005/548] Add extra fetch_first test for right-side-only order --- t/sqlmaker/limit_dialects/fetch_first.t | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/t/sqlmaker/limit_dialects/fetch_first.t b/t/sqlmaker/limit_dialects/fetch_first.t index 10d3e6022..c521b528a 100644 --- a/t/sqlmaker/limit_dialects/fetch_first.t +++ b/t/sqlmaker/limit_dialects/fetch_first.t @@ -114,6 +114,15 @@ for my $ord_set ( exselect_outer => 'ORDER__BY__001, ORDER__BY__002, ORDER__BY__003', exselect_inner => 'title AS ORDER__BY__001, bar AS ORDER__BY__002, sensors AS ORDER__BY__003', }, + + { + order_by => [ + 'name', + ], + order_inner => 'name', + order_outer => 'name DESC', + order_req => 'name', + }, ) { my $o_sel = $ord_set->{exselect_outer} ? ', ' . $ord_set->{exselect_outer} @@ -124,8 +133,13 @@ for my $ord_set ( : '' ; + my $rs = $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}}); + + # query actually works + ok( defined $rs->count, 'Query actually works' ); + is_same_sql_bind( - $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query, + $rs->as_query, "(SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name FROM ( SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name$o_sel @@ -145,6 +159,7 @@ for my $ord_set ( [ [ { sqlt_datatype => 'varchar', sqlt_size => 100, dbic_colname => 'source' } => 'Library' ] ], ); + } # with groupby From b775fa8e88ff5a798e4b4a198b782c76edd93f6b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 10 Apr 2014 13:10:55 +0200 Subject: [PATCH 006/548] Add explicit AS keyword to RowNum (Oracle) limit dialect This should have no effect on the actual query, only done to bring in line with the generalized DQ engine --- lib/DBIx/Class/SQLMaker/LimitDialects.pm | 8 ++++---- t/sqlmaker/limit_dialects/rownum.t | 12 ++++++------ t/sqlmaker/limit_dialects/torture.t | 6 +++--- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/lib/DBIx/Class/SQLMaker/LimitDialects.pm b/lib/DBIx/Class/SQLMaker/LimitDialects.pm index ec9300aec..9abaded16 100644 --- a/lib/DBIx/Class/SQLMaker/LimitDialects.pm +++ b/lib/DBIx/Class/SQLMaker/LimitDialects.pm @@ -221,7 +221,7 @@ sub _FirstSkip { Depending on the resultset attributes one of: SELECT * FROM ( - SELECT *, ROWNUM rownum__index FROM ( + SELECT *, ROWNUM AS rownum__index FROM ( SELECT ... ) WHERE ROWNUM <= ($limit+$offset) ) WHERE rownum__index >= ($offset+1) @@ -229,7 +229,7 @@ Depending on the resultset attributes one of: or SELECT * FROM ( - SELECT *, ROWNUM rownum__index FROM ( + SELECT *, ROWNUM AS rownum__index FROM ( SELECT ... ) ) WHERE rownum__index BETWEEN ($offset+1) AND ($limit+$offset) @@ -286,7 +286,7 @@ EOS return <{selection_outer} FROM ( - SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM ( + SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} ) $qalias WHERE ROWNUM <= ? ) $qalias WHERE $idx_name >= ? @@ -297,7 +297,7 @@ EOS return <{selection_outer} FROM ( - SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM ( + SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} ) $qalias ) $qalias WHERE $idx_name BETWEEN ? AND ? diff --git a/t/sqlmaker/limit_dialects/rownum.t b/t/sqlmaker/limit_dialects/rownum.t index b01790f4a..6985c23ff 100644 --- a/t/sqlmaker/limit_dialects/rownum.t +++ b/t/sqlmaker/limit_dialects/rownum.t @@ -42,7 +42,7 @@ for my $test_set ( sql => '( SELECT id, artist__id, bleh FROM ( - SELECT id, artist__id, bleh, ROWNUM rownum__index + SELECT id, artist__id, bleh, ROWNUM AS rownum__index FROM ( SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR (foo.womble, "blah") AS bleh FROM cd me @@ -70,7 +70,7 @@ for my $test_set ( sql => '( SELECT id, artist__id, bleh FROM ( - SELECT id, artist__id, bleh, ROWNUM rownum__index + SELECT id, artist__id, bleh, ROWNUM AS rownum__index FROM ( SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR(foo.womble, "blah") AS bleh FROM cd me @@ -102,7 +102,7 @@ for my $test_set ( sql => '( SELECT id, artist__id, bleh FROM ( - SELECT id, artist__id, bleh, ROWNUM rownum__index + SELECT id, artist__id, bleh, ROWNUM AS rownum__index FROM ( SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR(foo.womble, "blah") AS bleh FROM cd me @@ -130,7 +130,7 @@ for my $test_set ( sql => '( SELECT id, ends_with_me__id FROM ( - SELECT id, ends_with_me__id, ROWNUM rownum__index + SELECT id, ends_with_me__id, ROWNUM AS rownum__index FROM ( SELECT foo.id AS id, ends_with_me.id AS ends_with_me__id FROM cd me @@ -157,7 +157,7 @@ for my $test_set ( sql => '( SELECT id, ends_with_me__id FROM ( - SELECT id, ends_with_me__id, ROWNUM rownum__index + SELECT id, ends_with_me__id, ROWNUM AS rownum__index FROM ( SELECT foo.id AS id, ends_with_me.id AS ends_with_me__id FROM cd me @@ -202,7 +202,7 @@ is_same_sql_bind( '( SELECT owner_name, owner_books FROM ( - SELECT owner_name, owner_books, ROWNUM rownum__index + SELECT owner_name, owner_books, ROWNUM AS rownum__index FROM ( SELECT owner.name AS owner_name, ( SELECT COUNT( * ) FROM owners owner WHERE (count.id = owner.id)) AS owner_books diff --git a/t/sqlmaker/limit_dialects/torture.t b/t/sqlmaker/limit_dialects/torture.t index f4e7d1d15..f27318986 100644 --- a/t/sqlmaker/limit_dialects/torture.t +++ b/t/sqlmaker/limit_dialects/torture.t @@ -392,7 +392,7 @@ my $tests = { '( SELECT me.id, owner__id, owner__name, bar, baz FROM ( - SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index + SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz FROM books me @@ -428,7 +428,7 @@ my $tests = { '( SELECT me.id, owner__id, owner__name, bar, baz FROM ( - SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index + SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz FROM books me @@ -459,7 +459,7 @@ my $tests = { FROM ( SELECT me.name, me.id FROM ( - SELECT me.name, me.id, ROWNUM rownum__index + SELECT me.name, me.id, ROWNUM AS rownum__index FROM ( SELECT me.name, me.id FROM owners me From f3d7b702505f7eabec97857ffcba3f99edcd9d8d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Apr 2014 06:36:01 +0200 Subject: [PATCH 007/548] Fix missing true return lost during e5a62c46f This never resulted in a serious problem because it was papered over by a bug in Class::C3::Componentised. Addressing it at the source requires a bit extra coding, hence no new CC3C release yet. --- lib/DBIx/Class/Storage/DBI/Firebird.pm | 9 ++------- lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm | 2 ++ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBI/Firebird.pm b/lib/DBIx/Class/Storage/DBI/Firebird.pm index f0178bd09..e615eb050 100644 --- a/lib/DBIx/Class/Storage/DBI/Firebird.pm +++ b/lib/DBIx/Class/Storage/DBI/Firebird.pm @@ -8,9 +8,10 @@ use warnings; # in ::Storage::DBI::InterBase as opposed to inheriting # directly from ::Storage::DBI::Firebird::Common use base qw/DBIx::Class::Storage::DBI::InterBase/; - use mro 'c3'; +1; + =head1 NAME DBIx::Class::Storage::DBI::Firebird - Driver for the Firebird RDBMS via @@ -21,10 +22,6 @@ L This is an empty subclass of L for use with L, see that driver for details. -=cut - -1; - =head1 AUTHOR See L and L. @@ -33,5 +30,3 @@ See L and L. You may distribute this code under the same terms as Perl itself. -=cut -# vim:sts=2 sw=2: diff --git a/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm b/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm index 095f41663..c5254b491 100644 --- a/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm +++ b/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm @@ -69,3 +69,5 @@ You may distribute this code under the same terms as Perl itself. =cut # vim:sts=2 sw=2: + +1; From 93b306f050130d5f2821ae2c8323cd496ec89398 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 11 Mar 2014 07:39:56 +0100 Subject: [PATCH 008/548] Minimal optimization of the new+prefetch codepath (no func. changes) --- lib/DBIx/Class/Row.pm | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 000498ae6..cad0185cd 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -1240,17 +1240,15 @@ sub inflate_result { $class->throw_exception("No accessor type declared for prefetched relationship '$relname'") unless $relinfo->{attrs}{accessor}; + my $rel_rs = $new->related_resultset($relname); + my @rel_objects; if ( - $prefetch->{$relname} - and - @{$prefetch->{$relname}} + @{ $prefetch->{$relname} || [] } and ref($prefetch->{$relname}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class ) { - my $rel_rs = $new->related_resultset($relname); - if (ref $prefetch->{$relname}[0] eq 'ARRAY') { my $rel_rsrc = $rel_rs->result_source; my $rel_class = $rel_rs->result_class; @@ -1274,7 +1272,7 @@ sub inflate_result { $new->{_inflated_column}{$relname} = $rel_objects[0]; } - $new->related_resultset($relname)->set_cache(\@rel_objects); + $rel_rs->set_cache(\@rel_objects); } } From 3a3ebbbe64caf0b9f790f8cc88a5eeab75e128da Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 11 Apr 2014 00:38:10 +0200 Subject: [PATCH 009/548] Clarify that sql_maker is just a getter at this point --- lib/DBIx/Class/Storage/DBI.pm | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 9b5e3b03d..5245bea11 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -991,8 +991,13 @@ sub _get_dbh { return $self->_dbh; } +# *DELIBERATELY* not a setter (for the time being) +# Too intertwined with everything else for any kind of sanity sub sql_maker { - my ($self) = @_; + my $self = shift; + + $self->throw_exception('sql_maker() is not a setter method') if @_; + unless ($self->_sql_maker) { my $sql_maker_class = $self->sql_maker_class; From b6a469f7cbcc6114eb59a8557906af4ec6b0e9a4 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 11 Apr 2014 00:43:35 +0200 Subject: [PATCH 010/548] Properly detect and test mysql v3 default JOIN behavior While investigating how to rewrite the rather useless 39712b481, it became clear that not only can the test be performed offline, but that there is already a pseudo-test due to sqlite's version of 3.x.y being mistaken for MySQL 3 Properly fix the test to check behavior on both versions 3 and 4, and in the process fix a potential bug of a stale sqlmaker when a reconnect cycle takes place against a *different* physical rdbms (which btw makes me realize another problem with trying to do mixed-environment replication... sigh) --- lib/DBIx/Class/Storage/DBI.pm | 1 + lib/DBIx/Class/Storage/DBI/mysql.pm | 16 ++++++++------- t/71mysql.t | 14 ------------- t/sqlmaker/mysql.t | 31 ++++++++++++++++++++++++++++- 4 files changed, 40 insertions(+), 22 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 5245bea11..1a302ce5a 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1061,6 +1061,7 @@ sub _populate_dbh { $self->_dbh(undef); # in case ->connected failed we might get sent here $self->_dbh_details({}); # reset everything we know + $self->_sql_maker(undef); # this may also end up being different $self->_dbh($self->_connect); diff --git a/lib/DBIx/Class/Storage/DBI/mysql.pm b/lib/DBIx/Class/Storage/DBI/mysql.pm index 83ee8b2c7..06059834e 100644 --- a/lib/DBIx/Class/Storage/DBI/mysql.pm +++ b/lib/DBIx/Class/Storage/DBI/mysql.pm @@ -106,15 +106,17 @@ sub _run_connection_actions { sub sql_maker { my $self = shift; - unless ($self->_sql_maker) { - my $maker = $self->next::method (@_); + # it is critical to get the version *before* calling next::method + # otherwise the potential connect will obliterate the sql_maker + # next::method will populate in the _sql_maker accessor + my $mysql_ver = $self->_server_info->{normalized_dbms_version}; - # mysql 3 does not understand a bare JOIN - my $mysql_ver = $self->_dbh_get_info('SQL_DBMS_VER'); - $maker->{_default_jointype} = 'INNER' if $mysql_ver =~ /^3/; - } + my $sm = $self->next::method(@_); + + # mysql 3 does not understand a bare JOIN + $sm->{_default_jointype} = 'INNER' if $mysql_ver < 4; - return $self->_sql_maker; + $sm; } sub sqlt_type { diff --git a/t/71mysql.t b/t/71mysql.t index 242989e7a..e1e68ee83 100644 --- a/t/71mysql.t +++ b/t/71mysql.t @@ -199,20 +199,6 @@ lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die'; my $cd = $rs->next; is ($cd->artist->name, $artist->name, 'Prefetched artist'); }, 'join does not throw (mysql 3 test)'; - - # induce a jointype override, make sure it works even if we don't have mysql3 - local $schema->storage->sql_maker->{_default_jointype} = 'inner'; - is_same_sql_bind ( - $rs->as_query, - '( - SELECT `me`.`cdid`, `me`.`artist`, `me`.`title`, `me`.`year`, `me`.`genreid`, `me`.`single_track`, - `artist`.`artistid`, `artist`.`name`, `artist`.`rank`, `artist`.`charfield` - FROM cd `me` - INNER JOIN `artist` `artist` ON `artist`.`artistid` = `me`.`artist` - )', - [], - 'overridden default join type works', - ); } ## Can we properly deal with the null search problem? diff --git a/t/sqlmaker/mysql.t b/t/sqlmaker/mysql.t index 2755a3d5b..b5ce8a59a 100644 --- a/t/sqlmaker/mysql.t +++ b/t/sqlmaker/mysql.t @@ -12,6 +12,7 @@ use DBIC::DebugObj; my $schema = DBICTest::Schema->connect (DBICTest->_database, { quote_char => '`' }); # cheat require DBIx::Class::Storage::DBI::mysql; +*DBIx::Class::Storage::DBI::mysql::_get_server_version = sub { 5 }; bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); # check that double-subqueries are properly wrapped @@ -102,7 +103,7 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); FROM ( SELECT `artist`.`artistid` FROM cd `me` - INNER JOIN `artist` `artist` + JOIN `artist` `artist` ON `artist`.`artistid` = `me`.`artist` WHERE `artist`.`name` LIKE ? ) `_forced_double_subquery` @@ -138,4 +139,32 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); ); } +# Test support for inner joins on mysql v3 +for ( + [ 3 => 'INNER JOIN' ], + [ 4 => 'JOIN' ], +) { + my ($ver, $join_op) = @$_; + + no warnings 'redefine'; + local *DBIx::Class::Storage::DBI::mysql::_get_server_version = sub { $ver }; + + # we do not care at this point if data is available, just do a reconnect cycle + # to clear all caches + $schema->storage->disconnect; + $schema->storage->ensure_connected; + + is_same_sql_bind ( + $schema->resultset('CD')->search ({}, { prefetch => 'artist' })->as_query, + "( + SELECT `me`.`cdid`, `me`.`artist`, `me`.`title`, `me`.`year`, `me`.`genreid`, `me`.`single_track`, + `artist`.`artistid`, `artist`.`name`, `artist`.`rank`, `artist`.`charfield` + FROM cd `me` + $join_op `artist` `artist` ON `artist`.`artistid` = `me`.`artist` + )", + [], + "default join type works for version $ver", + ); +} + done_testing; From b411156e1759840702bb2b7ddf681a5b103f9585 Mon Sep 17 00:00:00 2001 From: Mintywalker Date: Sat, 12 Apr 2014 18:13:10 +0100 Subject: [PATCH 011/548] $pedantry =~ s/columns/column/ --- lib/DBIx/Class/ResultSource.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 3233e3ab4..8cc409ded 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -201,7 +201,7 @@ schema, see L. { is_nullable => 1 } -Set this to a true value for a columns that is allowed to contain NULL +Set this to a true value for a column that is allowed to contain NULL values, default is false. This is currently only used to create tables from your schema, see L. From e24b5f64b2e8cd3fc745a603a2bacce3aef3e66a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 20 May 2014 16:16:40 +0200 Subject: [PATCH 012/548] Remove forgotten part of a48693f4 (incompletely reverted by e327f126) --- t/lib/PrefetchBug.pm | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 t/lib/PrefetchBug.pm diff --git a/t/lib/PrefetchBug.pm b/t/lib/PrefetchBug.pm deleted file mode 100644 index 278bf5b51..000000000 --- a/t/lib/PrefetchBug.pm +++ /dev/null @@ -1,11 +0,0 @@ -package - PrefetchBug; - -use strict; -use warnings; - -use base qw/DBIx::Class::Schema/; - -__PACKAGE__->load_classes(); - -1; From a5fc497548f9f980d3fd1deb7b7cb9095d079651 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 21 May 2014 09:57:50 +0200 Subject: [PATCH 013/548] Fix bogus reference to related class (introduced way back in 97c96475) --- lib/DBIx/Class/Relationship/Base.pm | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 20a9c17dd..a41d6b4df 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -288,7 +288,7 @@ Then, assuming MyApp::Schema::LinerNotes has an accessor named notes, you can do For a 'belongs_to relationship, note the 'cascade_update': - MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd, + MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd, { proxy => ['title'], cascade_update => 1 } ); $track->title('New Title'); @@ -299,7 +299,7 @@ For a 'belongs_to relationship, note the 'cascade_update': A hashref where each key is the accessor you want installed in the main class, and its value is the name of the original in the foreign class. - MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd', { + MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd', { proxy => { cd_title => 'title' }, }); @@ -309,7 +309,7 @@ This will create an accessor named C on the C<$track> result object. NOTE: you can pass a nested struct too, for example: - MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd', { + MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd', { proxy => [ 'year', { cd_title => 'title' } ], }); From 90b2bd88b27275885ebaad6de8545ad264bf9cb6 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 22 May 2014 13:37:08 +0200 Subject: [PATCH 014/548] Fix multi-value literal populate not working with simplified bind spec This arcane use-case got missed when 1b5ddf23 was integrated --- Changes | 2 ++ lib/DBIx/Class/Storage/DBI.pm | 40 +++++++++++++++++++++++------------ t/100populate.t | 2 +- 3 files changed, 29 insertions(+), 15 deletions(-) diff --git a/Changes b/Changes index 387cb0a05..784b3d183 100644 --- a/Changes +++ b/Changes @@ -4,6 +4,8 @@ Revision history for DBIx::Class - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver routines, triggering a connection before the set-cycle is finished + - Fix multi-value literal populate not working with simplified bind + specifications 0.08270 2014-01-30 21:54 (PST) * Fixes diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 1a302ce5a..75c843486 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1695,13 +1695,10 @@ sub _gen_sql_bind { sub _resolve_bindattrs { my ($self, $ident, $bind, $colinfos) = @_; - $colinfos ||= {}; - my $resolve_bindinfo = sub { #my $infohash = shift; - %$colinfos = %{ $self->_resolve_column_info($ident) } - unless keys %$colinfos; + $colinfos ||= { %{ $self->_resolve_column_info($ident) } }; my $ret; if (my $col = $_[0]->{dbic_colname}) { @@ -1721,10 +1718,16 @@ sub _resolve_bindattrs { my $resolved = ( ref $_ ne 'ARRAY' or @$_ != 2 ) ? [ {}, $_ ] : ( ! defined $_->[0] ) ? [ {}, $_->[1] ] - : (ref $_->[0] eq 'HASH') ? [ (exists $_->[0]{dbd_attrs} or $_->[0]{sqlt_datatype}) - ? $_->[0] - : $resolve_bindinfo->($_->[0]) - , $_->[1] ] + : (ref $_->[0] eq 'HASH') ? [( + ! keys %{$_->[0]} + or + exists $_->[0]{dbd_attrs} + or + $_->[0]{sqlt_datatype} + ) ? $_->[0] + : $resolve_bindinfo->($_->[0]) + , $_->[1] + ] : (ref $_->[0] eq 'SCALAR') ? [ { sqlt_datatype => ${$_->[0]} }, $_->[1] ] : [ $resolve_bindinfo->( { dbic_colname => $_->[0] } @@ -2291,12 +2294,21 @@ sub _dbh_execute_for_fetch { my $fetch_tuple = sub { return undef if ++$fetch_row_idx > $#$data; - return [ map { defined $_->{_literal_bind_subindex} - ? ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]} - ->[ $_->{_literal_bind_subindex} ] - ->[1] - : $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] - } map { $_->[0] } @$proto_bind]; + return [ map { + ! defined $_->{_literal_bind_subindex} + + ? $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ] + + # There are no attributes to resolve here - we already did everything + # when we constructed proto_bind. However we still want to sanity-check + # what the user supplied, so pass stuff through to the resolver *anyway* + : $self->_resolve_bindattrs ( + undef, # a fake rsrc + [ ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]}->[ $_->{_literal_bind_subindex} ] ], + {}, # a fake column_info bag + )->[0][1] + + } map { $_->[0] } @$proto_bind ]; }; my $tuple_status = []; diff --git a/t/100populate.t b/t/100populate.t index 177231a46..4a3f0ac7a 100644 --- a/t/100populate.t +++ b/t/100populate.t @@ -196,7 +196,7 @@ is($link7->title, 'gtitle', 'Link 7 title'); # test mixed binds with literal sql/bind $rs->populate([ map { +{ - url => \[ '? || ?', [ {} => 'cpan.org_' ], [ undef, $_ ] ], + url => \[ '? || ?', [ {} => 'cpan.org_' ], $_ ], title => "The 'best of' cpan", } } (1 .. 5) ]); From ad1d374e603e34f4f58d1004d0bf4e2b9982422d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dagfinn=20Ilmari=20Manns=C3=A5ker?= Date: Sat, 12 Apr 2014 18:29:47 +0100 Subject: [PATCH 015/548] Refactor _recurse_fields to return the bind values Only ->select actually wants it in $self->{select_bind}, the others either don't care or want them somewhere else. --- lib/DBIx/Class/ResultSetColumn.pm | 2 +- lib/DBIx/Class/SQLMaker.pm | 34 +++++++++++++----------- lib/DBIx/Class/SQLMaker/LimitDialects.pm | 9 +++---- lib/DBIx/Class/Storage/DBIHacks.pm | 3 +-- 4 files changed, 25 insertions(+), 23 deletions(-) diff --git a/lib/DBIx/Class/ResultSetColumn.pm b/lib/DBIx/Class/ResultSetColumn.pm index 1e2a0ebab..b083b469f 100644 --- a/lib/DBIx/Class/ResultSetColumn.pm +++ b/lib/DBIx/Class/ResultSetColumn.pm @@ -494,7 +494,7 @@ sub _resultset { # collapse the selector to a literal so that it survives the distinct parse # if it turns out to be an aggregate - at least the user will get a proper exception # instead of silent drop of the group_by altogether - $select = \ $rsrc->storage->sql_maker->_recurse_fields($select); + $select = \[ $rsrc->storage->sql_maker->_recurse_fields($select) ]; } } diff --git a/lib/DBIx/Class/SQLMaker.pm b/lib/DBIx/Class/SQLMaker.pm index e863a0ff6..319d3fb35 100644 --- a/lib/DBIx/Class/SQLMaker.pm +++ b/lib/DBIx/Class/SQLMaker.pm @@ -110,7 +110,7 @@ sub select { my ($self, $table, $fields, $where, $rs_attrs, $limit, $offset) = @_; - $fields = $self->_recurse_fields($fields); + ($fields, @{$self->{select_bind}}) = $self->_recurse_fields($fields); if (defined $offset) { $self->throw_exception('A supplied offset must be a non-negative integer') @@ -231,42 +231,48 @@ sub _recurse_fields { return $$fields if $ref eq 'SCALAR'; if ($ref eq 'ARRAY') { - return join(', ', map { $self->_recurse_fields($_) } @$fields); + my (@select, @bind); + for my $field (@$fields) { + my ($select, @new_bind) = $self->_recurse_fields($field); + push @select, $select; + push @bind, @new_bind; + } + return (join(', ', @select), @bind); } elsif ($ref eq 'HASH') { my %hash = %$fields; # shallow copy my $as = delete $hash{-as}; # if supplied - my ($func, $args, @toomany) = %hash; + my ($func, $rhs, @toomany) = %hash; # there should be only one pair if (@toomany) { $self->throw_exception( "Malformed select argument - too many keys in hash: " . join (',', keys %$fields ) ); } - if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) { + if (lc ($func) eq 'distinct' && ref $rhs eq 'ARRAY' && @$rhs > 1) { $self->throw_exception ( 'The select => { distinct => ... } syntax is not supported for multiple columns.' - .' Instead please use { group_by => [ qw/' . (join ' ', @$args) . '/ ] }' - .' or { select => [ qw/' . (join ' ', @$args) . '/ ], distinct => 1 }' + .' Instead please use { group_by => [ qw/' . (join ' ', @$rhs) . '/ ] }' + .' or { select => [ qw/' . (join ' ', @$rhs) . '/ ], distinct => 1 }' ); } + my ($rhs_sql, @rhs_bind) = $self->_recurse_fields($rhs); my $select = sprintf ('%s( %s )%s', $self->_sqlcase($func), - $self->_recurse_fields($args), + $rhs_sql, $as ? sprintf (' %s %s', $self->_sqlcase('as'), $self->_quote ($as) ) : '' ); - return $select; + return ($select, @rhs_bind); } # Is the second check absolutely necessary? elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) { - push @{$self->{select_bind}}, @{$$fields}[1..$#$$fields]; - return $$fields->[0]; + return @{$$fields}; } else { $self->throw_exception( $ref . qq{ unexpected in _recurse_fields()} ); @@ -288,11 +294,9 @@ sub _parse_rs_attrs { my $sql = ''; if ($arg->{group_by}) { - # horrible horrible, waiting for refactor - local $self->{select_bind}; - if (my $g = $self->_recurse_fields($arg->{group_by}) ) { - $sql .= $self->_sqlcase(' group by ') . $g; - push @{$self->{group_bind} ||= []}, @{$self->{select_bind}||[]}; + if ( my ($group_sql, @group_bind) = $self->_recurse_fields($arg->{group_by}) ) { + $sql .= $self->_sqlcase(' group by ') . $group_sql; + push @{$self->{group_bind}}, @group_bind; } } diff --git a/lib/DBIx/Class/SQLMaker/LimitDialects.pm b/lib/DBIx/Class/SQLMaker/LimitDialects.pm index 9abaded16..da65b7c2e 100644 --- a/lib/DBIx/Class/SQLMaker/LimitDialects.pm +++ b/lib/DBIx/Class/SQLMaker/LimitDialects.pm @@ -725,23 +725,22 @@ sub _subqueried_limit_attrs { my ($re_sep, $re_alias) = map { quotemeta $_ } ( $self->{name_sep}, $rs_attrs->{alias} ); - # insulate from the multiple _recurse_fields calls below - local $self->{select_bind}; - # correlate select and as, build selection index my (@sel, $in_sel_index); for my $i (0 .. $#{$rs_attrs->{select}}) { my $s = $rs_attrs->{select}[$i]; - my $sql_sel = $self->_recurse_fields ($s); my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef; + # we throw away the @bind here deliberately + my ($sql_sel) = $self->_recurse_fields ($s); + push @sel, { arg => $s, sql => $sql_sel, unquoted_sql => do { local $self->{quote_char}; - $self->_recurse_fields ($s); + ($self->_recurse_fields ($s))[0]; # ignore binds again }, as => $sql_alias diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 80283dc3c..3c7d1c43e 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -389,7 +389,6 @@ sub _resolve_aliastypes_from_select_args { my $sql_maker = $self->sql_maker; # these are throw away results, do not pollute the bind stack - local $sql_maker->{select_bind}; local $sql_maker->{where_bind}; local $sql_maker->{group_bind}; local $sql_maker->{having_bind}; @@ -429,7 +428,7 @@ sub _resolve_aliastypes_from_select_args { ), ], selecting => [ - map { $sql_maker->_recurse_fields($_) } @{$attrs->{select}}, + map { ($sql_maker->_recurse_fields($_))[0] } @{$attrs->{select}}, ], ordering => [ map { $_->[0] } $self->_extract_order_criteria ($attrs->{order_by}, $sql_maker), From 885c3dbda2628fa3c1c979f7ddd48443bcbaf5ab Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 16 May 2014 11:31:21 +0200 Subject: [PATCH 016/548] Add test ensuring we do not lose binds on esoteric RSC+distinct Test inspired by auditing cc2b92553 (a precursor of ad1d374e60) --- lib/DBIx/Class/ResultSetColumn.pm | 4 +- t/88result_set_column.t | 62 +++++++++++++++++++------------ 2 files changed, 41 insertions(+), 25 deletions(-) diff --git a/lib/DBIx/Class/ResultSetColumn.pm b/lib/DBIx/Class/ResultSetColumn.pm index b083b469f..3756dbf4a 100644 --- a/lib/DBIx/Class/ResultSetColumn.pm +++ b/lib/DBIx/Class/ResultSetColumn.pm @@ -487,8 +487,8 @@ sub _resultset { unless( $cols{$select} ) { carp_unique( 'Use of distinct => 1 while selecting anything other than a column ' - . 'declared on the primary ResultSource is deprecated - please supply ' - . 'an explicit group_by instead' + . 'declared on the primary ResultSource is deprecated (you selected ' + . "'$self->{_as}') - please supply an explicit group_by instead" ); # collapse the selector to a literal so that it survives the distinct parse diff --git a/t/88result_set_column.t b/t/88result_set_column.t index b1c1e96d3..226c20965 100644 --- a/t/88result_set_column.t +++ b/t/88result_set_column.t @@ -248,17 +248,23 @@ is_same_sql_bind ( $schema->resultset('CD')->create({ artist => 1, title => 'dealbroker no tracks', year => 2001 }); + my $yp1 = \[ 'year + ?', 1 ]; + my $rs = $schema->resultset ('CD')->search ( { 'artist.name' => { '!=', 'evancarrol' }, 'tracks.trackid' => { '!=', undef } }, { order_by => 'me.year', join => [qw(artist tracks)], - columns => [ 'year', { cnt => { count => 'me.cdid' }} ], + columns => [ + 'year', + { cnt => { count => 'me.cdid' } }, + { year_plus_one => $yp1 }, + ], }, ); my $rstypes = { - 'explicitly grouped' => $rs->search_rs({}, { group_by => 'year' }), + 'explicitly grouped' => $rs->search_rs({}, { group_by => [ 'year', $yp1 ] } ), 'implicitly grouped' => $rs->search_rs({}, { distinct => 1 }), }; @@ -277,27 +283,37 @@ is_same_sql_bind ( # would silently drop the group_by entirely, likely ending up with nonsensival results # With the current behavior the user will at least get a nice fat exception from the # RDBMS (or maybe the RDBMS will even decide to handle the situation sensibly...) - warnings_exist { is_same_sql_bind( - $rstypes->{'implicitly grouped'}->get_column('cnt')->as_query, - '( - SELECT COUNT( me.cdid ) - FROM cd me - JOIN artist artist - ON artist.artistid = me.artist - LEFT JOIN track tracks - ON tracks.cd = me.cdid - WHERE artist.name != ? AND tracks.trackid IS NOT NULL - GROUP BY COUNT( me.cdid ) - ORDER BY MIN(me.year) - )', - [ [ { dbic_colname => 'artist.name', sqlt_datatype => 'varchar', sqlt_size => 100 } - => 'evancarrol' - ] ], - 'Expected (though nonsensical) SQL generated on rscol-with-distinct-over-function', - ) } qr/ - \QUse of distinct => 1 while selecting anything other than a column \E - \Qdeclared on the primary ResultSource is deprecated\E - /x, 'deprecation warning'; + for ( + [ cnt => 'COUNT( me.cdid )' ], + [ year_plus_one => 'year + ?' => [ {} => 1 ] ], + ) { + my ($col, $sel_grp_sql, @sel_grp_bind) = @$_; + + warnings_exist { is_same_sql_bind( + $rstypes->{'implicitly grouped'}->get_column($col)->as_query, + "( + SELECT $sel_grp_sql + FROM cd me + JOIN artist artist + ON artist.artistid = me.artist + LEFT JOIN track tracks + ON tracks.cd = me.cdid + WHERE artist.name != ? AND tracks.trackid IS NOT NULL + GROUP BY $sel_grp_sql + ORDER BY MIN(me.year) + )", + [ + @sel_grp_bind, + [ { dbic_colname => 'artist.name', sqlt_datatype => 'varchar', sqlt_size => 100 } + => 'evancarrol' ], + @sel_grp_bind, + ], + 'Expected (though nonsensical) SQL generated on rscol-with-distinct-over-function', + ) } qr/ + \QUse of distinct => 1 while selecting anything other than a column \E + \Qdeclared on the primary ResultSource is deprecated (you selected '$col')\E + /x, 'deprecation warning'; + } { local $TODO = 'multiplying join leaks through to the count aggregate... this may never actually work'; From 4f90e9f81ee3fc1ed1a145c15a1676674c0c54b2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 22 May 2014 14:34:11 +0200 Subject: [PATCH 017/548] Remove dead code missed by cleanup b72339859 --- lib/DBIx/Class/ResultSet.pm | 33 --------------------------------- 1 file changed, 33 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index ffade2120..b0b0e017e 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -1090,39 +1090,6 @@ sub single { $self->_construct_results->[0]; } - -# _collapse_query -# -# Recursively collapse the query, accumulating values for each column. - -sub _collapse_query { - my ($self, $query, $collapsed) = @_; - - $collapsed ||= {}; - - if (ref $query eq 'ARRAY') { - foreach my $subquery (@$query) { - next unless ref $subquery; # -or - $collapsed = $self->_collapse_query($subquery, $collapsed); - } - } - elsif (ref $query eq 'HASH') { - if (keys %$query and (keys %$query)[0] eq '-and') { - foreach my $subquery (@{$query->{-and}}) { - $collapsed = $self->_collapse_query($subquery, $collapsed); - } - } - else { - foreach my $col (keys %$query) { - my $value = $query->{$col}; - $collapsed->{$col}{$value}++; - } - } - } - - return $collapsed; -} - =head2 get_column =over 4 From 8d005ad9929e4bf227919cb6374e2a9e9689324f Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 17 May 2014 11:39:49 +0200 Subject: [PATCH 018/548] Massively refactor and sanify condition collapsing Wow... what a ride. This commit adds a faithful reimplementation of the SQLA descend algorithm, but instead of SQL produces a reduced HASH that nevertheless corresponds to the original query 1:1 (or so I *really* hope) This is another one of these "I can try it with DQ but I don't even have tests" stories, so once again opted to implement things the "dumb" way. The benefits are quite substantial: - Better deduplication of WHERE condition - Consolidated functions - the vaguely similar logic in the current version of _collapse_cond replaces almost the entirety of: ::DBIHacks::_extract_fixed_condition_columns ::ResultSet::_collapse_cond ::ResultSet::_stack_cond - Extra fixes for create/populate inheritance corner cases - More predictable SQL condition generation order (which incidentally may prove problematic down the road with broken tests, but oh well, we'll burn when we get there) - Ton of extra tests and corner cases We even managed to fulfill a longstanding TODO, even though it is a *lucky* side-effect and not a real fix. Making a note to address this later... sigh. This work started from d8b7d9f58..337f3ee80, which while taking the right direction had too many loose ends. ilmari++ --- Changes | 5 + lib/DBIx/Class/ResultSet.pm | 138 ++++--------- lib/DBIx/Class/Storage/DBIHacks.pm | 261 +++++++++++++++++++++--- t/101populate_rs.t | 15 +- t/prefetch/correlated.t | 24 +-- t/relationship/update_or_create_multi.t | 2 +- t/resultset/as_query.t | 8 +- t/resultset/bind_attr.t | 3 - t/resultset/update_delete.t | 5 +- t/sqlmaker/dbihacks_internals.t | 173 ++++++++++++++++ t/sqlmaker/limit_dialects/torture.t | 172 ++++++++-------- 11 files changed, 577 insertions(+), 229 deletions(-) create mode 100644 t/sqlmaker/dbihacks_internals.t diff --git a/Changes b/Changes index 784b3d183..e36363746 100644 --- a/Changes +++ b/Changes @@ -6,6 +6,11 @@ Revision history for DBIx::Class routines, triggering a connection before the set-cycle is finished - Fix multi-value literal populate not working with simplified bind specifications + - Massively improve the implied resultset condition parsing - now all + applicable conditions within a resultset should be properly picked + up by create() and populate() + - Ensure definitive condition extractor handles bizarre corner cases + without bombing out (RT#93244) 0.08270 2014-01-30 21:54 (PST) * Fixes diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index b0b0e017e..292dbc366 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -585,60 +585,32 @@ sub _normalize_selection { sub _stack_cond { my ($self, $left, $right) = @_; - # collapse single element top-level conditions - # (single pass only, unlikely to need recursion) - for ($left, $right) { - if (ref $_ eq 'ARRAY') { - if (@$_ == 0) { - $_ = undef; - } - elsif (@$_ == 1) { - $_ = $_->[0]; - } - } - elsif (ref $_ eq 'HASH') { - my ($first, $more) = keys %$_; + ( + (ref $_ eq 'ARRAY' and !@$_) + or + (ref $_ eq 'HASH' and ! keys %$_) + ) and $_ = undef for ($left, $right); - # empty hash - if (! defined $first) { - $_ = undef; - } - # one element hash - elsif (! defined $more) { - if ($first eq '-and' and ref $_->{'-and'} eq 'HASH') { - $_ = $_->{'-and'}; - } - elsif ($first eq '-or' and ref $_->{'-or'} eq 'ARRAY') { - $_ = $_->{'-or'}; - } - } - } + # either on of the two undef or both undef + if ( ( (defined $left) xor (defined $right) ) or ! defined $left ) { + return defined $left ? $left : $right; } - # merge hashes with weeding out of duplicates (simple cases only) - if (ref $left eq 'HASH' and ref $right eq 'HASH') { + my $cond = $self->result_source->schema->storage->_collapse_cond({ -and => [$left, $right] }); - # shallow copy to destroy - $right = { %$right }; - for (grep { exists $right->{$_} } keys %$left) { - # the use of eq_deeply here is justified - the rhs of an - # expression can contain a lot of twisted weird stuff - delete $right->{$_} if Data::Compare::Compare( $left->{$_}, $right->{$_} ); - } + for my $c (grep { ref $cond->{$_} eq 'ARRAY' and ($cond->{$_}[0]||'') eq '-and' } keys %$cond) { - $right = undef unless keys %$right; - } + my @vals = sort @{$cond->{$c}}[ 1..$#{$cond->{$c}} ]; + my @fin = shift @vals; + for my $v (@vals) { + push @fin, $v unless Data::Compare::Compare( $fin[-1], $v ); + } - if (defined $left xor defined $right) { - return defined $left ? $left : $right; - } - elsif (! defined $left) { - return undef; - } - else { - return { -and => [ $left, $right ] }; + $cond->{$c} = (@fin == 1) ? $fin[0] : [-and => @fin ]; } + + $cond; } =head2 search_literal @@ -2466,28 +2438,36 @@ sub _merge_with_rscond { ); } else { - # precedence must be given to passed values over values inherited from - # the cond, so the order here is important. - my $collapsed_cond = $self->_collapse_cond($self->{cond}); - my %implied = %{$self->_remove_alias($collapsed_cond, $alias)}; + if ($self->{cond}) { + my $implied = $self->_remove_alias( + $self->result_source->schema->storage->_collapse_cond($self->{cond}), + $alias, + ); - while ( my($col, $value) = each %implied ) { - my $vref = ref $value; - if ( - $vref eq 'HASH' - and - keys(%$value) == 1 - and - (keys %$value)[0] eq '=' - ) { - $new_data{$col} = $value->{'='}; - } - elsif( !$vref or $vref eq 'SCALAR' or blessed($value) ) { - $new_data{$col} = $value; + for my $c (keys %$implied) { + my $v = $implied->{$c}; + if ( + ! ref $v + or + overload::Method($v, '""') + ) { + $new_data{$c} = $v; + } + elsif ( + ref $v eq 'HASH' and keys %$v == 1 and exists $v->{'='} and ( + ref $v->{'='} eq 'SCALAR' + or + ( ref $v->{'='} eq 'REF' and ref ${$v->{'='}} eq 'ARRAY' ) + ) + ) { + $new_data{$c} = $v->{'='}; + } } } } + # precedence must be given to passed values over values inherited from + # the cond, so the order here is important. %new_data = ( %new_data, %{ $self->_remove_alias($data, $alias) }, @@ -2549,38 +2529,6 @@ sub _has_resolved_attr { return 0; } -# _collapse_cond -# -# Recursively collapse the condition. - -sub _collapse_cond { - my ($self, $cond, $collapsed) = @_; - - $collapsed ||= {}; - - if (ref $cond eq 'ARRAY') { - foreach my $subcond (@$cond) { - next unless ref $subcond; # -or - $collapsed = $self->_collapse_cond($subcond, $collapsed); - } - } - elsif (ref $cond eq 'HASH') { - if (keys %$cond and (keys %$cond)[0] eq '-and') { - foreach my $subcond (@{$cond->{-and}}) { - $collapsed = $self->_collapse_cond($subcond, $collapsed); - } - } - else { - foreach my $col (keys %$cond) { - my $value = $cond->{$col}; - $collapsed->{$col} = $value; - } - } - } - - return $collapsed; -} - # _remove_alias # # Remove the specified alias from the specified query hash. A copy is made so diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 3c7d1c43e..4147e81f7 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -709,6 +709,9 @@ sub _resolve_ident_sources { # for all sources sub _resolve_column_info { my ($self, $ident, $colnames) = @_; + + return {} if $colnames and ! @$colnames; + my $alias2src = $self->_resolve_ident_sources($ident); my (%seen_cols, @auto_colnames); @@ -880,8 +883,8 @@ sub _order_by_is_stable { my ($self, $ident, $order_by, $where) = @_; my @cols = ( - (map { $_->[0] } $self->_extract_order_criteria($order_by)), - $where ? @{$self->_extract_fixed_condition_columns($where)} :(), + ( map { $_->[0] } $self->_extract_order_criteria($order_by) ), + ( $where ? @{ $self->_extract_fixed_condition_columns($where) || [] } : () ), ) or return undef; my $colinfo = $self->_resolve_column_info($ident, \@cols); @@ -952,7 +955,7 @@ sub _main_source_order_by_portion_is_stable { my $unqualified_idset = $main_rsrc->_identifying_column_set({ ( $where ? %{ $self->_resolve_column_info( - $main_rsrc, $self->_extract_fixed_condition_columns($where) + $main_rsrc, $self->_extract_fixed_condition_columns($where)||[] ) } : () ), %$order_portion_ci @@ -978,8 +981,212 @@ sub _main_source_order_by_portion_is_stable { die 'How did we get here...'; } +# Attempts to flatten a passed in SQLA condition as much as possible towards +# a plain hashref, *without* altering its semantics. Required by +# create/populate being able to extract definitive conditions from preexisting +# resultset {where} stacks +# +# FIXME - while relatively robust, this is still imperfect, one of the first +# things to tackle with DQ +sub _collapse_cond { + my ($self, $where, $where_is_anded_array) = @_; + + if (! $where) { + return; + } + elsif ($where_is_anded_array or ref $where eq 'HASH') { + + my @pairs; + + my @pieces = $where_is_anded_array ? @$where : $where; + while (@pieces) { + my $chunk = shift @pieces; + + if (ref $chunk eq 'HASH') { + push @pairs, map { [ $_ => $chunk->{$_} ] } sort keys %$chunk; + } + elsif (ref $chunk eq 'ARRAY') { + push @pairs, [ -or => $chunk ] + if @$chunk; + } + elsif ( ! ref $chunk) { + push @pairs, [ $chunk, shift @pieces ]; + } + else { + push @pairs, [ '', $chunk ]; + } + } + + return unless @pairs; + + my @conds = $self->_collapse_cond_unroll_pairs(\@pairs) + or return; + + # Consolidate various @conds back into something more compact + my $fin; + + for my $c (@conds) { + if (ref $c ne 'HASH') { + push @{$fin->{-and}}, $c; + } + else { + for my $col (sort keys %$c) { + if (exists $fin->{$col}) { + my ($l, $r) = ($fin->{$col}, $c->{$col}); + + (ref $_ ne 'ARRAY' or !@$_) and $_ = [ -and => $_ ] for ($l, $r); + + if (@$l and @$r and $l->[0] eq $r->[0] and $l->[0] eq '-and') { + $fin->{$col} = [ -and => map { @$_[1..$#$_] } ($l, $r) ]; + } + else { + $fin->{$col} = [ -and => $fin->{$col}, $c->{$col} ]; + } + } + else { + $fin->{$col} = $c->{$col}; + } + } + } + } + + if ( ref $fin->{-and} eq 'ARRAY' and @{$fin->{-and}} == 1 ) { + my $piece = (delete $fin->{-and})->[0]; + if (ref $piece eq 'ARRAY') { + $fin->{-or} = $fin->{-or} ? [ $piece, $fin->{-or} ] : $piece; + } + elsif (! exists $fin->{''}) { + $fin->{''} = $piece; + } + } + + return $fin; + } + elsif (ref $where eq 'ARRAY') { + my @w = @$where; + + while ( @w and ( + (ref $w[0] eq 'ARRAY' and ! @{$w[0]} ) + or + (ref $w[0] eq 'HASH' and ! keys %{$w[0]}) + )) { shift @w }; + + return unless @w; + + if ( @w == 1 ) { + return ( ref $w[0] ) + ? $self->_collapse_cond($w[0]) + : { $w[0] => undef } + ; + } + elsif ( ref $w[0] ) { + return \@w; + } + elsif ( @w == 2 ) { + if ( ( $w[0]||'' ) =~ /^\-and$/i ) { + return (ref $w[1] eq 'HASH' or ref $w[1] eq 'ARRAY') + ? $self->_collapse_cond($w[1], (ref $w[1] eq 'ARRAY') ) + : $self->throw_exception("Unsupported top-level op/arg pair: [ $w[0] => $w[1] ]") + ; + } + else { + return $self->_collapse_cond({ @w }); + } + } + } + else { + # not a hash not an array + return { '' => $where }; + } + + # catchall, some of the things above fall through + return $where; +} + +sub _collapse_cond_unroll_pairs { + my ($self, $pairs) = @_; + + my @conds; + + while (@$pairs) { + my ($lhs, $rhs) = @{ shift @$pairs }; + + if ($lhs eq '') { + push @conds, $self->_collapse_cond($rhs); + } + elsif ( $lhs =~ /^\-and$/i ) { + push @conds, $self->_collapse_cond($rhs, (ref $rhs eq 'ARRAY')); + } + elsif ( $lhs =~ /^\-or$/i ) { + push @conds, $self->_collapse_cond( + (ref $rhs eq 'HASH') ? [ map { $_ => $rhs->{$_} } sort keys %$rhs ] : $rhs + ); + } + else { + if (ref $rhs eq 'HASH' and ! keys %$rhs) { + # FIXME - SQLA seems to be doing... nothing...? + } + elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) { + for my $p ($self->_collapse_cond_unroll_pairs([ [ $lhs => $rhs->{'='} ] ])) { + + # extra sanity check + if (keys %$p > 1) { + require Data::Dumper::Concise; + local $Data::Dumper::Deepcopy = 1; + $self->throw_exception( + "Internal error: unexpected collapse unroll:" + . Data::Dumper::Concise::Dumper { in => { $lhs => $rhs }, out => $p } + ); + } + + my ($l, $r) = %$p; + + push @conds, ( ! ref $r or overload::Method($r, '""' ) ) + ? { $l => $r } + : { $l => { '=' => $r } } + ; + } + } + elsif (ref $rhs eq 'ARRAY') { + # some of these conditionals encounter multi-values - roll them out using + # an unshift, which will cause extra looping in the while{} above + if (! @$rhs ) { + push @conds, { $lhs => [] }; + } + elsif ( ($rhs->[0]||'') =~ /^\-(?:and|or)$/i ) { + $self->throw_exception("Value modifier not followed by any values: $lhs => [ $rhs->[0] ] ") + if @$rhs == 1; + + if( $rhs->[0] =~ /^\-and$/i ) { + unshift @$pairs, map { [ $lhs => $_ ] } @{$rhs}[1..$#$rhs]; + } + # if not an AND then it's an OR + elsif(@$rhs == 2) { + unshift @$pairs, [ $lhs => $rhs->[1] ]; + } + else { + push @conds, { $lhs => $rhs }; + } + } + elsif (@$rhs == 1) { + unshift @$pairs, [ $lhs => $rhs->[0] ]; + } + else { + push @conds, { $lhs => $rhs }; + } + } + else { + push @conds, { $lhs => $rhs }; + } + } + } + + return @conds; +} + + # returns an arrayref of column names which *definitely* have some -# sort of non-nullable equality requested in the given condition +# sort of non-nullable *single* equality requested in the given condition # specification. This is used to figure out if a resultset is # constrained to a column which is part of a unique constraint, # which in turn allows us to better predict how ordering will behave @@ -988,31 +1195,35 @@ sub _main_source_order_by_portion_is_stable { # this is a rudimentary, incomplete, and error-prone extractor # however this is OK - it is conservative, and if we can not find # something that is in fact there - the stack will recover gracefully -# Also - DQ and the mst it rode in on will save us all RSN!!! sub _extract_fixed_condition_columns { - my ($self, $where) = @_; - - return unless ref $where eq 'HASH'; - - my @cols; - for my $lhs (keys %$where) { - if ($lhs =~ /^\-and$/i) { - push @cols, ref $where->{$lhs} eq 'ARRAY' - ? ( map { @{ $self->_extract_fixed_condition_columns($_) } } @{$where->{$lhs}} ) - : @{ $self->_extract_fixed_condition_columns($where->{$lhs}) } - ; - } - elsif ($lhs !~ /^\-/) { - my $val = $where->{$lhs}; - - push @cols, $lhs if (defined $val and ( - ! ref $val + my $self = shift; + my $where_hash = $self->_collapse_cond(shift); + + my $res; + for my $c (keys %$where_hash) { + if (defined (my $v = $where_hash->{$c}) ) { + if ( + ! ref $v or - (ref $val eq 'HASH' and keys %$val == 1 and defined $val->{'='}) - )); + (ref $v eq 'HASH' and keys %$v == 1 and defined $v->{'='} and ( + ! ref $v->{'='} + or + ref $v->{'='} eq 'SCALAR' + or + ( ref $v->{'='} eq 'REF' and ref ${$v->{'='}} eq 'ARRAY' ) + or + overload::Method($v->{'='}, '""') + )) + ) { + $res->{$c} = 1; + } + elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') { + $res->{$_} = 1 for map { @{ $self->_extract_fixed_condition_columns({ $c => $_ }) } } @{$v}[1..$#$v]; + } } } - return \@cols; + + return [ sort keys %$res ]; } 1; diff --git a/t/101populate_rs.t b/t/101populate_rs.t index 56e87f075..8a0eea431 100644 --- a/t/101populate_rs.t +++ b/t/101populate_rs.t @@ -24,7 +24,7 @@ my $schema = DBICTest->init_schema(); my $art_rs = $schema->resultset('Artist'); my $cd_rs = $schema->resultset('CD'); -my $restricted_art_rs = $art_rs->search({rank => 42}); +my $restricted_art_rs = $art_rs->search({ -and => [ rank => 42, charfield => { '=', \['(SELECT MAX(artistid) FROM artist) + ?', 6] } ] }); ok( $schema, 'Got a Schema object'); ok( $art_rs, 'Got Good Artist Resultset'); @@ -343,7 +343,9 @@ ARRAY_CONTEXT: { ]); ## Did it use the condition in the resultset? + $more_crap->discard_changes; cmp_ok( $more_crap->rank, '==', 42, "Got Correct rank for result object"); + cmp_ok( $more_crap->charfield, '==', $more_crap->id + 5, "Got Correct charfield for result object"); } } @@ -626,7 +628,9 @@ VOID_CONTEXT: { })->first; ## Did it use the condition in the resultset? + $more_crap->discard_changes; cmp_ok( $more_crap->rank, '==', 42, "Got Correct rank for result object"); + cmp_ok( $more_crap->charfield, '==', $more_crap->id + 5, "Got Correct charfield for result object"); } } @@ -655,7 +659,11 @@ ARRAYREF_OF_ARRAYREF_STYLE: { is $cooler->name, 'Cooler', 'Correct Name'; is $lamer->name, 'Lamer', 'Correct Name'; - cmp_ok $cooler->rank, '==', 42, 'Correct Rank'; + for ($cooler, $lamer) { + $_->discard_changes; + cmp_ok( $_->rank, '==', 42, "Got Correct rank for result object"); + cmp_ok( $_->charfield, '==', $_->id + 5, "Got Correct charfield for result object"); + } ARRAY_CONTEXT_WITH_COND_FROM_RS: { @@ -666,7 +674,9 @@ ARRAYREF_OF_ARRAYREF_STYLE: { ]); ## Did it use the condition in the resultset? + $mega_lamer->discard_changes; cmp_ok( $mega_lamer->rank, '==', 42, "Got Correct rank for result object"); + cmp_ok( $mega_lamer->charfield, '==', $mega_lamer->id + 5, "Got Correct charfield for result object"); } VOID_CONTEXT_WITH_COND_FROM_RS: { @@ -683,6 +693,7 @@ ARRAYREF_OF_ARRAYREF_STYLE: { ## Did it use the condition in the resultset? cmp_ok( $mega_lamer->rank, '==', 42, "Got Correct rank for result object"); + cmp_ok( $mega_lamer->charfield, '==', $mega_lamer->id + 5, "Got Correct charfield for result object"); } } diff --git a/t/prefetch/correlated.t b/t/prefetch/correlated.t index 6452a943c..df349fc79 100644 --- a/t/prefetch/correlated.t +++ b/t/prefetch/correlated.t @@ -36,10 +36,10 @@ is_same_sql_bind( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track, (SELECT COUNT( * ) FROM cd siblings - WHERE siblings.artist = me.artist + WHERE me.artist != ? + AND siblings.artist = me.artist AND siblings.cdid != me.cdid AND siblings.cdid != ? - AND me.artist != ? ), tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at FROM cd me @@ -50,12 +50,12 @@ is_same_sql_bind( [ # subselect - [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' } - => 23414 ], - [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' } => 2 ], + [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' } + => 23414 ], + # outher WHERE [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' } => 2 ], @@ -102,15 +102,15 @@ is_same_sql_bind( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track, (SELECT COUNT( * ) FROM cd siblings - WHERE siblings.artist = me.artist + WHERE me.artist != ? + AND siblings.artist = me.artist AND siblings.cdid != me.cdid AND siblings.cdid != ? - AND me.artist != ? ), (SELECT MIN( year ), MAX( year ) FROM cd siblings - WHERE siblings.artist = me.artist - AND me.artist != ? + WHERE me.artist != ? + AND siblings.artist = me.artist ), tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at FROM cd me @@ -121,12 +121,12 @@ is_same_sql_bind( [ # first subselect - [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' } - => 23414 ], - [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' } => 2 ], + [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' } + => 23414 ], + # second subselect [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' } => 2 ], diff --git a/t/relationship/update_or_create_multi.t b/t/relationship/update_or_create_multi.t index c7cce7a01..dd022a169 100644 --- a/t/relationship/update_or_create_multi.t +++ b/t/relationship/update_or_create_multi.t @@ -97,7 +97,7 @@ is_same_sql ( $search_sql, 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me - WHERE ( me.artist = ? AND me.title = ? AND me.genreid = ? ) + WHERE ( me.artist = ? AND me.genreid = ? AND me.title = ? ) ', 'expected select issued', ); diff --git a/t/resultset/as_query.t b/t/resultset/as_query.t index efd5e6eec..39bf88ce7 100644 --- a/t/resultset/as_query.t +++ b/t/resultset/as_query.t @@ -43,8 +43,8 @@ my $rank_resolved_bind = [ { is_same_sql_bind( $art_rs->as_query, - "(SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE ( ( ( rank = ? ) AND ( name = ? ) ) ) )", - [ $rank_resolved_bind, $name_resolved_bind ], + "(SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE name = ? AND rank = ? )", + [ $name_resolved_bind, $rank_resolved_bind ], ); } @@ -53,8 +53,8 @@ my $rscol = $art_rs->get_column( 'charfield' ); { is_same_sql_bind( $rscol->as_query, - "(SELECT me.charfield FROM artist me WHERE ( ( ( rank = ? ) AND ( name = ? ) ) ) )", - [ $rank_resolved_bind, $name_resolved_bind ], + "(SELECT me.charfield FROM artist me WHERE name = ? AND rank = ? )", + [ $name_resolved_bind, $rank_resolved_bind ], ); } diff --git a/t/resultset/bind_attr.t b/t/resultset/bind_attr.t index d93dcd1b5..71d1b9772 100644 --- a/t/resultset/bind_attr.t +++ b/t/resultset/bind_attr.t @@ -16,8 +16,6 @@ my $where_bind = { my $rs; { - local $TODO = 'bind args order needs fixing (semifor)'; - # First, the simple cases... $rs = $schema->resultset('Artist')->search( { artistid => 1 }, @@ -37,7 +35,6 @@ my $rs; is ( $rs->count, 1, 'where/bind last' ); # and the complex case - local $TODO = 'bind args order needs fixing (semifor)'; $rs = $schema->resultset('CustomSql')->search({}, { bind => [ 1999 ] }) ->search({ 'artistid' => 1 }, { where => \'title like ?', diff --git a/t/resultset/update_delete.t b/t/resultset/update_delete.t index 3314b881b..917e12f6d 100644 --- a/t/resultset/update_delete.t +++ b/t/resultset/update_delete.t @@ -133,13 +133,14 @@ is_same_sql_bind ( AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello - WHERE fourkeys_to_twokeys.pilot_sequence != ? AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? + WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? ) ) ', [ + ("'1'", "'2'") x 2, "'666'", - ("'1'", "'2'") x 4, + ("'1'", "'2'") x 2, "'c'", ], 'Correct update-SQL with multicolumn in support', diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t new file mode 100644 index 000000000..7b4506ddd --- /dev/null +++ b/t/sqlmaker/dbihacks_internals.t @@ -0,0 +1,173 @@ +use strict; +use warnings; +use Test::More; +use Test::Warn; + +use lib qw(t/lib); +use DBICTest; + +use DBIC::SqlMakerTest; +use Data::Dumper; + +my $schema = DBICTest->init_schema( no_deploy => 1); +my $sm = $schema->storage->sql_maker; + +for my $t ( + { + where => { artistid => 1, charfield => undef }, + cc_result => { artistid => 1, charfield => undef }, + sql => 'WHERE artistid = ? AND charfield IS NULL', + efcc_result => [qw( artistid )], + }, + { + where => { -and => [ artistid => 1, charfield => undef, { rank => 13 } ] }, + cc_result => { artistid => 1, charfield => undef, rank => 13 }, + sql => 'WHERE artistid = ? AND charfield IS NULL AND rank = ?', + efcc_result => [qw( artistid rank )], + }, + { + where => { -and => [ { artistid => 1, charfield => undef}, { rank => 13 } ] }, + cc_result => { artistid => 1, charfield => undef, rank => 13 }, + sql => 'WHERE artistid = ? AND charfield IS NULL AND rank = ?', + efcc_result => [qw( artistid rank )], + }, + { + where => { -and => [ -or => { name => 'Caterwauler McCrae' }, 'rank' ] }, + cc_result => { name => 'Caterwauler McCrae', rank => undef }, + sql => 'WHERE name = ? AND rank IS NULL', + efcc_result => [qw( name )], + }, + { + where => { -and => [ [ [ artist => {'=' => \'foo' } ] ], { name => \[ '= ?', 'bar' ] } ] }, + cc_result => { artist => {'=' => \'foo' }, name => \[ '= ?', 'bar' ] }, + sql => 'WHERE artist = foo AND name = ?', + efcc_result => [qw( artist )], + }, + { + where => { -and => [ -or => { name => 'Caterwauler McCrae', artistid => 2 } ] }, + cc_result => { -or => [ artistid => 2, name => 'Caterwauler McCrae' ] }, + sql => 'WHERE artistid = ? OR name = ?', + efcc_result => [], + }, + { + where => { -and => [ \'foo=bar', [ { artistid => { '=', 3 } } ], { name => 'Caterwauler McCrae'} ] }, + cc_result => { '' => \'foo=bar', name => 'Caterwauler McCrae', artistid => 3 }, + sql => 'WHERE foo=bar AND artistid = ? AND name = ?', + efcc_result => [qw( artistid name )], + }, + { + where => { artistid => [ 1 ], rank => [ 13, 2, 3 ], charfield => [ undef ] }, + cc_result => { artistid => 1, charfield => undef, rank => [13, 2, 3] }, + sql => 'WHERE artistid = ? AND charfield IS NULL AND ( rank = ? OR rank = ? OR rank = ? )', + efcc_result => [qw( artistid )], + }, + { + where => { artistid => { '=' => 1 }, rank => { '>' => 12 }, charfield => { '=' => undef } }, + cc_result => { artistid => 1, charfield => undef, rank => { '>' => 12 } }, + sql => 'WHERE artistid = ? AND charfield IS NULL AND rank > ?', + efcc_result => [qw( artistid )], + }, + { + where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ 1, 2 ] } }, + cc_result => { artistid => 1, charfield => [-and => { '=' => \'1' }, { '=' => \['?',2] } ], rank => { '=' => [1, 2] } }, + sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )', + efcc_result => [qw( artistid charfield )], + }, + { + where => { -and => [ artistid => 1, artistid => 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => [ -or => { '=', 2 } ], rank => [-and => undef, { '=', undef }, { '!=', 2 } ] }, + cc_result => { artistid => [ -and => 1, 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => 2, rank => [ -and => undef, undef, { '!=', 2 } ] }, + sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank IS NULL AND rank IS NULL AND rank != ?', + efcc_result => [qw( artistid charfield name )], + }, + { + where => { -and => [ + [ '_macro.to' => { -like => '%correct%' }, '_wc_macros.to' => { -like => '%correct%' } ], + { -and => [ { 'group.is_active' => 1 }, { 'me.is_active' => 1 } ] } + ] }, + cc_result => { + 'group.is_active' => 1, + 'me.is_active' => 1, + -or => [ + '_macro.to' => { -like => '%correct%' }, + '_wc_macros.to' => { -like => '%correct%' }, + ], + }, + sql => 'WHERE ( _macro.to LIKE ? OR _wc_macros.to LIKE ? ) AND group.is_active = ? AND me.is_active = ?', + efcc_result => [qw( group.is_active me.is_active )], + }, + { + where => { artistid => [] }, + cc_result => { artistid => [] }, + efcc_result => [], + }, + (map { + { + where => { -and => $_ }, + cc_result => undef, + efcc_result => [], + sql => '', + }, + { + where => { -or => $_ }, + cc_result => undef, + efcc_result => [], + sql => '', + }, + } ( + # bare + [], {}, + # singles + [ {} ], [ [] ], + # doubles + [ [], [] ], [ {}, {} ], [ [], {} ], [ {}, [] ], + # tripples + [ {}, [], {} ], [ [], {}, [] ] + )), + + # FIXME legacy compat crap, possibly worth undef/dieing in SQLMaker + { where => { artistid => {} }, sql => '', cc_result => undef, efcc_result => [] }, + + # batshit insanity, just to be thorough + { + where => { -and => [ [ 'artistid' ], [ -and => [ artistid => { '!=', 69 }, artistid => undef, artistid => { '=' => 200 } ]], artistid => [], { -or => [] }, { -and => [] }, [ 'charfield' ], { name => [] }, 'rank' ] }, + cc_result => { artistid => [ -and => undef, { '!=', 69 }, undef, 200, [] ], charfield => undef, name => [], rank => undef }, + sql => 'WHERE artistid IS NULL AND artistid != ? AND artistid IS NULL AND artistid = ? AND 0=1 AND charfield IS NULL AND 0=1 AND rank IS NULL', + efcc_result => [qw( artistid )], + }, +) { + + for my $w ( + $t->{where}, + [ -and => $t->{where} ], + ( keys %{$t->{where}} <= 1 ) ? [ %{$t->{where}} ] : () + ) { + my $name = do { local ($Data::Dumper::Indent, $Data::Dumper::Terse, $Data::Dumper::Sortkeys) = (0, 1, 1); Dumper $w }; + + my @orig_sql_bind = $sm->where($w); + + is_same_sql ( $orig_sql_bind[0], $t->{sql}, "Expected SQL from $name" ) + if exists $t->{sql}; + + my $collapsed_cond = $schema->storage->_collapse_cond($w); + + is_same_sql_bind( + \[ $sm->where($collapsed_cond) ], + \\@orig_sql_bind, + "Collapse did not alter final SQL based on $name", + ); + + is_deeply( + $collapsed_cond, + $t->{cc_result}, + "Expected collapsed condition produced on $name", + ); + + is_deeply( + $schema->storage->_extract_fixed_condition_columns($w), + $t->{efcc_result}, + "Expected fixed_condition produced on $name", + ); + } +} + +done_testing; diff --git a/t/sqlmaker/limit_dialects/torture.t b/t/sqlmaker/limit_dialects/torture.t index f27318986..74e60a2f9 100644 --- a/t/sqlmaker/limit_dialects/torture.t +++ b/t/sqlmaker/limit_dialects/torture.t @@ -11,9 +11,11 @@ use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema; my $native_limit_dialect = $schema->storage->sql_maker->{limit_dialect}; +my $where_string = 'me.title = ? AND source != ? AND source = ?'; + my @where_bind = ( - [ {} => 'Study' ], [ {} => 'kama sutra' ], + [ {} => 'Study' ], [ { sqlt_datatype => 'varchar', sqlt_size => 100, dbic_colname => 'source' } => 'Library' ], ); my @select_bind = ( @@ -37,16 +39,16 @@ my $tests = { LimitOffset => { limit => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? LIMIT ? - )', + )", [ @select_bind, @where_bind, @@ -56,17 +58,17 @@ my $tests = { ], ], limit_offset => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? LIMIT ? OFFSET ? - )', + )", [ @select_bind, @where_bind, @@ -77,17 +79,17 @@ my $tests = { ], ], ordered_limit => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? LIMIT ? - )', + )", [ @select_bind, @where_bind, @@ -98,18 +100,18 @@ my $tests = { ] ], ordered_limit_offset => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? LIMIT ? OFFSET ? - )', + )", [ @select_bind, @where_bind, @@ -121,7 +123,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -130,7 +132,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 3 ], [ { sqlt_datatype => 'integer' } => 1 ], @@ -140,17 +142,17 @@ my $tests = { LimitXY => { ordered_limit_offset => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? LIMIT ?, ? - )', + )", [ @select_bind, @where_bind, @@ -162,7 +164,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -171,7 +173,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 1 ], [ { sqlt_datatype => 'integer' } => 3 ], @@ -181,16 +183,16 @@ my $tests = { SkipFirst => { ordered_limit_offset => [ - '( + "( SELECT SKIP ? FIRST ? me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? - )', + )", [ [ { sqlt_datatype => 'integer' } => 3 ], [ { sqlt_datatype => 'integer' } => 4 ], @@ -202,7 +204,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT SKIP ? FIRST ? me.name, me.id @@ -210,7 +212,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 1 ], [ { sqlt_datatype => 'integer' } => 3 ], @@ -220,16 +222,16 @@ my $tests = { FirstSkip => { ordered_limit_offset => [ - '( + "( SELECT FIRST ? SKIP ? me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? - )', + )", [ [ { sqlt_datatype => 'integer' } => 4 ], [ { sqlt_datatype => 'integer' } => 3 ], @@ -241,7 +243,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT FIRST ? SKIP ? me.name, me.id @@ -249,7 +251,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 3 ], [ { sqlt_datatype => 'integer' } => 1 ], @@ -258,7 +260,7 @@ my $tests = { }, RowNumberOver => do { - my $unordered_sql = '( + my $unordered_sql = "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner__id, owner__name, bar, baz, ROW_NUMBER() OVER() AS rno__row__index @@ -267,15 +269,15 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ) me ) me WHERE rno__row__index >= ? AND rno__row__index <= ? - )'; + )"; - my $ordered_sql = '( + my $ordered_sql = "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner__id, owner__name, bar, baz, ROW_NUMBER() OVER( ORDER BY ORDER__BY__001, ORDER__BY__002 ) AS rno__row__index @@ -285,13 +287,13 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ) me ) me WHERE rno__row__index >= ? AND rno__row__index <= ? - )'; + )"; { limit => [$unordered_sql, @@ -337,7 +339,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -351,7 +353,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 2 ], [ { sqlt_datatype => 'integer' } => 4 ], @@ -362,20 +364,20 @@ my $tests = { RowNum => do { my $limit_sql = sub { - sprintf '( + sprintf "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? %s ) me WHERE ROWNUM <= ? - )', $_[0] || ''; + )", $_[0] || ''; }; { @@ -389,7 +391,7 @@ my $tests = { ], ], limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index @@ -398,13 +400,13 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ) me ) me WHERE rownum__index BETWEEN ? AND ? - )', + )", [ @select_bind, @where_bind, @@ -425,7 +427,7 @@ my $tests = { ], ], ordered_limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index @@ -434,7 +436,7 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? @@ -442,7 +444,7 @@ my $tests = { WHERE ROWNUM <= ? ) me WHERE rownum__index >= ? - )', + )", [ @select_bind, @where_bind, @@ -454,7 +456,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -468,7 +470,7 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [ [ { sqlt_datatype => 'integer' } => 2 ], [ { sqlt_datatype => 'integer' } => 4 ], @@ -479,16 +481,16 @@ my $tests = { FetchFirst => { limit => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? FETCH FIRST 4 ROWS ONLY - )', + )", [ @select_bind, @where_bind, @@ -497,14 +499,14 @@ my $tests = { ], ], limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY me.id @@ -512,7 +514,7 @@ my $tests = { ) me ORDER BY me.id DESC FETCH FIRST 4 ROWS ONLY - )', + )", [ @select_bind, @where_bind, @@ -521,17 +523,17 @@ my $tests = { ], ], ordered_limit => [ - '( + "( SELECT me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? FETCH FIRST 4 ROWS ONLY - )', + )", [ @select_bind, @where_bind, @@ -541,7 +543,7 @@ my $tests = { ], ], ordered_limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner__id, owner__name, bar, baz, ORDER__BY__001, ORDER__BY__002 @@ -550,7 +552,7 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? @@ -560,7 +562,7 @@ my $tests = { FETCH FIRST 4 ROWS ONLY ) me ORDER BY ORDER__BY__001, ORDER__BY__002 - )', + )", [ @select_bind, @order_bind, @@ -571,7 +573,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -586,22 +588,22 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [], ], }, Top => { limit => [ - '( + "( SELECT TOP 4 me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? - )', + )", [ @select_bind, @where_bind, @@ -610,20 +612,20 @@ my $tests = { ], ], limit_offset => [ - '( + "( SELECT TOP 4 me.id, owner__id, owner__name, bar, baz FROM ( SELECT TOP 7 me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY me.id ) me ORDER BY me.id DESC - )', + )", [ @select_bind, @where_bind, @@ -632,16 +634,16 @@ my $tests = { ], ], ordered_limit => [ - '( + "( SELECT TOP 4 me.id, owner.id, owner.name, ? * ?, ? FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? - )', + )", [ @select_bind, @where_bind, @@ -651,7 +653,7 @@ my $tests = { ], ], ordered_limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT TOP 4 me.id, owner__id, owner__name, bar, baz, ORDER__BY__001, ORDER__BY__002 @@ -660,7 +662,7 @@ my $tests = { FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ORDER BY ? / ?, ? @@ -668,7 +670,7 @@ my $tests = { ORDER BY ORDER__BY__001 DESC, ORDER__BY__002 DESC ) me ORDER BY ORDER__BY__001, ORDER__BY__002 - )', + )", [ @select_bind, @order_bind, @@ -679,7 +681,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT TOP 3 me.name, me.id @@ -692,21 +694,21 @@ my $tests = { ) me LEFT JOIN books books ON books.owner = me.id - )', + )", [], ], }, GenericSubQ => { ordered_limit => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz, me.price FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ) me @@ -735,7 +737,7 @@ my $tests = { ) ) < ? ORDER BY me.price DESC, me.id ASC - )', + )", [ @select_bind, @where_bind, @@ -745,14 +747,14 @@ my $tests = { ], ], ordered_limit_offset => [ - '( + "( SELECT me.id, owner__id, owner__name, bar, baz FROM ( SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz, me.price FROM books me JOIN owners owner ON owner.id = me.owner - WHERE source != ? AND me.title = ? AND source = ? + WHERE $where_string GROUP BY (me.id / ?), owner.id HAVING ? ) me @@ -781,7 +783,7 @@ my $tests = { ) ) BETWEEN ? AND ? ORDER BY me.price DESC, me.id ASC - )', + )", [ @select_bind, @where_bind, @@ -792,7 +794,7 @@ my $tests = { ], ], limit_offset_prefetch => [ - '( + "( SELECT me.name, books.id, books.source, books.owner, books.title, books.price FROM ( SELECT me.name, me.id @@ -819,7 +821,7 @@ my $tests = { LEFT JOIN books books ON books.owner = me.id ORDER BY me.name ASC, me.id DESC - )', + )", [ [ { sqlt_datatype => 'integer' } => 1 ], [ { sqlt_datatype => 'integer' } => 3 ], From 8acda57c41ff1e2eda847e53b073217cb9035136 Mon Sep 17 00:00:00 2001 From: Alexander Hartmaier Date: Wed, 5 Mar 2014 18:43:48 +0100 Subject: [PATCH 019/548] The initial (now passing) test for RT#93244 (fixed in 8d005ad9) --- t/sqlmaker/dbihacks_internals.t | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 7b4506ddd..f3d240f82 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -134,6 +134,27 @@ for my $t ( sql => 'WHERE artistid IS NULL AND artistid != ? AND artistid IS NULL AND artistid = ? AND 0=1 AND charfield IS NULL AND 0=1 AND rank IS NULL', efcc_result => [qw( artistid )], }, + + # original test from RT#93244 + { + where => { + -and => [ + \[ + "LOWER(me.title) LIKE ?", + '%spoon%', + ], + [ { 'me.title' => 'Spoonful of bees' } ], + ]}, + cc_result => { + '' => \[ + "LOWER(me.title) LIKE ?", + '%spoon%', + ], + 'me.title' => 'Spoonful of bees', + }, + sql => 'WHERE LOWER(me.title) LIKE ? AND me.title = ?', + efcc_result => [qw( me.title )], + } ) { for my $w ( From 3dd506b8d3671add7886c056b15a2c8a88c985e3 Mon Sep 17 00:00:00 2001 From: Naveed Massjouni Date: Fri, 9 May 2014 11:51:26 -0700 Subject: [PATCH 020/548] Updated discard_changes docs regarding { force_pool => 'master' } --- lib/DBIx/Class/Row.pm | 11 ++++++----- lib/DBIx/Class/Storage/DBI/Replicated.pm | 3 ++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index cad0185cd..ce08fbd9b 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -1490,11 +1490,12 @@ $attrs, if supplied, is expected to be a hashref of attributes suitable for pass second argument to C<< $resultset->search($cond, $attrs) >>; Note: If you are using L as your -storage, please kept in mind that if you L on a row that you -just updated or created, you should wrap the entire bit inside a transaction. -Otherwise you run the risk that you insert or update to the master database -but read from a replicant database that has not yet been updated from the -master. This will result in unexpected results. +storage, a default of +L<< C<< { force_pool => 'master' } >> +|DBIx::Class::Storage::DBI::Replicated/SYNOPSIS >> is automatically set for +you. Prior to C<< DBIx::Class 0.08109 >> (before 2010) one would have been +required to explicitly wrap the entire operation in a transaction to guarantee +that up-to-date results are read from the master database. =cut diff --git a/lib/DBIx/Class/Storage/DBI/Replicated.pm b/lib/DBIx/Class/Storage/DBI/Replicated.pm index 3c58716ed..fc10c75a1 100644 --- a/lib/DBIx/Class/Storage/DBI/Replicated.pm +++ b/lib/DBIx/Class/Storage/DBI/Replicated.pm @@ -1080,7 +1080,8 @@ sub _get_server_version { Due to the fact that replicants can lag behind a master, you must take care to make sure you use one of the methods to force read queries to a master should you need realtime data integrity. For example, if you insert a row, and then -immediately re-read it from the database (say, by doing $result->discard_changes) +immediately re-read it from the database (say, by doing +L<< $result->discard_changes|DBIx::Class::Row/discard_changes >>) or you insert a row and then immediately build a query that expects that row to be an item, you should force the master to handle reads. Otherwise, due to the lag, there is no certainty your data will be in the expected state. From 9736be652a2d4ce442c222c17ba5d47fff28bbed Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 25 May 2014 11:52:13 +0200 Subject: [PATCH 021/548] Improve complex order+prefetch exception message --- lib/DBIx/Class.pm | 2 ++ lib/DBIx/Class/Storage/DBIHacks.pm | 7 ++++--- t/prefetch/with_limit.t | 4 ++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 6c30f4abb..ac9f58129 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -436,6 +436,8 @@ mattp: Matt Phillips mdk: Mark Keating +mna: Maya + michaelr: Michael Reddick milki: Jonathan Chu diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 4147e81f7..aa1128615 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -653,9 +653,10 @@ sub _group_over_selection { } $self->throw_exception ( sprintf - 'A required group_by clause could not be constructed automatically due to a complex ' - . 'order_by criteria (%s). Either order_by columns only (no functions) or construct a suitable ' - . 'group_by by hand', + 'Unable to programatically derive a required group_by from the supplied ' + . 'order_by criteria. To proceed either add an explicit group_by, or ' + . 'simplify your order_by to only include plain columns ' + . '(supplied order_by: %s)', join ', ', map { "'$_'" } @$leftovers, ) if $leftovers; diff --git a/t/prefetch/with_limit.t b/t/prefetch/with_limit.t index 480dc40a3..4d08cd06a 100644 --- a/t/prefetch/with_limit.t +++ b/t/prefetch/with_limit.t @@ -152,8 +152,8 @@ throws_ok ( {'tracks.title' => { '!=' => 'foo' }}, { order_by => \ 'some oddball literal sql', join => { cds => 'tracks' } } )->next - }, qr/A required group_by clause could not be constructed automatically/, -) || exit; + }, qr/Unable to programatically derive a required group_by from the supplied order_by criteria/, +); my $artist = $use_prefetch->search({'cds.title' => $artist_many_cds->cds->first->title })->next; is($artist->cds->count, 1, "count on search limiting prefetched has_many"); From 02a2db55ba0471379640a89ba5d9d128b5486270 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 07:50:48 +0200 Subject: [PATCH 022/548] This test was essentially c/p-ed in 2cc3a7be3, consolidate The overriding difference is the new-connection == new-storage == new-sqla Add an explicit codepath to test that, but overall there is plenty of other places that verify this behavior to death Examine under -w -C -M to make sense out of the consolidation (minimal changes were attempted) --- t/sqlmaker/{quotes => }/quotes.t | 51 ++++++++---------- t/sqlmaker/quotes/quotes_newstyle.t | 83 ----------------------------- 2 files changed, 23 insertions(+), 111 deletions(-) rename t/sqlmaker/{quotes => }/quotes.t (66%) delete mode 100644 t/sqlmaker/quotes/quotes_newstyle.t diff --git a/t/sqlmaker/quotes/quotes.t b/t/sqlmaker/quotes.t similarity index 66% rename from t/sqlmaker/quotes/quotes.t rename to t/sqlmaker/quotes.t index 3fbc94cd3..84f2a3f97 100644 --- a/t/sqlmaker/quotes/quotes.t +++ b/t/sqlmaker/quotes.t @@ -10,22 +10,35 @@ use DBIC::DebugObj; my $schema = DBICTest->init_schema(); -$schema->storage->sql_maker->quote_char('`'); -$schema->storage->sql_maker->name_sep('.'); +$schema->connection( + @{ $schema->storage->_dbi_connect_info }, + { AutoCommit => 1, quote_char => [qw/[ ]/] } +); my ($sql, @bind); $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)); $schema->storage->debug(1); -my $rs; - -$rs = $schema->resultset('CD')->search( +my $rs = $schema->resultset('CD')->search( { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, { join => 'artist' }); +my $expected_bind = ["'Caterwauler McCrae'", "'2001'"]; +eval { $rs->count }; +is_same_sql_bind( + $sql, \@bind, + "SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", + $expected_bind, + 'got correct SQL for count query with bracket quoting' +); + +$schema->storage->sql_maker->quote_char('`'); +$schema->storage->sql_maker->name_sep('.'); + eval { $rs->count }; is_same_sql_bind( $sql, \@bind, - "SELECT COUNT( * ) FROM cd `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", ["'Caterwauler McCrae'", "'2001'"], + "SELECT COUNT( * ) FROM cd `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", + $expected_bind, 'got correct SQL for count query with quoting' ); @@ -40,27 +53,9 @@ $rs = $schema->resultset('CD')->search({}, eval { $rs->first }; like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref'); -$schema->storage->sql_maker->quote_char([qw/[ ]/]); -$schema->storage->sql_maker->name_sep('.'); - -$rs = $schema->resultset('CD')->search( - { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, - { join => 'artist' }); -eval { $rs->count }; -is_same_sql_bind( - $sql, \@bind, - "SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", ["'Caterwauler McCrae'", "'2001'"], - 'got correct SQL for count query with bracket quoting' -); - -my %data = ( - name => 'Bill', - order => '12' -); - -$schema->storage->sql_maker->quote_char('`'); -$schema->storage->sql_maker->name_sep('.'); - -is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE'); +is( + $schema->storage->sql_maker->update('group', { name => 'Bill', order => 12 }), + 'UPDATE `group` SET `name` = ?, `order` = ?', + 'quoted table names for UPDATE' ); done_testing; diff --git a/t/sqlmaker/quotes/quotes_newstyle.t b/t/sqlmaker/quotes/quotes_newstyle.t deleted file mode 100644 index 900a68a21..000000000 --- a/t/sqlmaker/quotes/quotes_newstyle.t +++ /dev/null @@ -1,83 +0,0 @@ -use strict; -use warnings; - -use Test::More; - -use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; -use DBIC::DebugObj; - -my $schema = DBICTest->init_schema(); - -my $dsn = $schema->storage->_dbi_connect_info->[0]; -$schema->connection( - $dsn, - undef, - undef, - { AutoCommit => 1 }, - { quote_char => '`', name_sep => '.' }, -); - -my ($sql, @bind); -$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)), -$schema->storage->debug(1); - -my $rs; - -$rs = $schema->resultset('CD')->search( - { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, - { join => 'artist' }); -eval { $rs->count }; -is_same_sql_bind( - $sql, \@bind, - "SELECT COUNT( * ) FROM cd `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", ["'Caterwauler McCrae'", "'2001'"], - 'got correct SQL for count query with quoting' -); - -my $order = 'year DESC'; -$rs = $schema->resultset('CD')->search({}, - { 'order_by' => $order }); -eval { $rs->first }; -like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)'); - -$rs = $schema->resultset('CD')->search({}, - { 'order_by' => \$order }); -eval { $rs->first }; -like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref'); - -$schema->connection( - $dsn, - undef, - undef, - { AutoCommit => 1, quote_char => [qw/[ ]/], name_sep => '.' } -); - -$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)), -$schema->storage->debug(1); - -$rs = $schema->resultset('CD')->search( - { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, - { join => 'artist' }); -eval { $rs->count }; -is_same_sql_bind( - $sql, \@bind, - "SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", ["'Caterwauler McCrae'", "'2001'"], - 'got correct SQL for count query with bracket quoting' -); - -my %data = ( - name => 'Bill', - order => '12' -); - -$schema->connection( - $dsn, - undef, - undef, - { AutoCommit => 1, quote_char => '`', name_sep => '.' } -); - -is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE'); - -done_testing; From 238e071106f84eb98f001e8d14b27ae600119a28 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 07:53:38 +0200 Subject: [PATCH 023/548] Reaching for IO::File here makes no sense --- lib/DBIx/Class/Storage/Statistics.pm | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index 7e491cd34..6c77ffbb8 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -3,7 +3,6 @@ use strict; use warnings; use base qw/DBIx::Class/; -use IO::File; use namespace::clean; __PACKAGE__->mk_group_accessors(simple => qw/callback _debugfh silence/); @@ -61,11 +60,11 @@ sub debugfh { my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} || $ENV{DBIC_TRACE}; if (defined($debug_env) && ($debug_env =~ /=(.+)$/)) { - $fh = IO::File->new($1, 'a') - or die("Cannot open trace file $1"); + open ($fh, '>>', $1) + or die("Cannot open trace file $1: $!"); } else { - $fh = IO::File->new('>&STDERR') - or die('Duplication of STDERR for debug output failed (perhaps your STDERR is closed?)'); + open ($fh, '>&STDERR') + or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!"); } $fh->autoflush(); From bedbc8111dbbb98f89a36c3cf4e2f6903a4b01be Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 10:05:08 +0200 Subject: [PATCH 024/548] Consolidate lib-wide frameskip, adjust the ::RunMode loading order --- t/73oracle_hq.t | 1 - t/lib/DBICTest.pm | 1 - t/lib/DBICTest/Base.pm | 12 ++++++++++++ t/lib/DBICTest/BaseResult.pm | 5 +---- t/lib/DBICTest/BaseResultSet.pm | 6 +----- t/lib/DBICTest/BaseSchema.pm | 4 +--- t/zzzzzzz_sqlite_deadlock.t | 8 ++++---- 7 files changed, 19 insertions(+), 18 deletions(-) create mode 100644 t/lib/DBICTest/Base.pm diff --git a/t/73oracle_hq.t b/t/73oracle_hq.t index 0f887fab3..0595edf53 100644 --- a/t/73oracle_hq.t +++ b/t/73oracle_hq.t @@ -5,7 +5,6 @@ use Test::Exception; use Test::More; use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); -use DBICTest::RunMode; $ENV{NLS_SORT} = "BINARY"; $ENV{NLS_COMP} = "BINARY"; diff --git a/t/lib/DBICTest.pm b/t/lib/DBICTest.pm index 693409257..4a9307d93 100644 --- a/t/lib/DBICTest.pm +++ b/t/lib/DBICTest.pm @@ -37,7 +37,6 @@ BEGIN { } } -use DBICTest::RunMode; use DBICTest::Schema; use DBICTest::Util::LeakTracer qw/populate_weakregistry assert_empty_weakregistry/; use DBICTest::Util 'local_umask'; diff --git a/t/lib/DBICTest/Base.pm b/t/lib/DBICTest/Base.pm new file mode 100644 index 000000000..7d2cb5605 --- /dev/null +++ b/t/lib/DBICTest/Base.pm @@ -0,0 +1,12 @@ +package #hide from pause + DBICTest::Base; + +use strict; +use warnings; + +# must load before any DBIx::Class* namespaces +use DBICTest::RunMode; + +sub _skip_namespace_frames { '^DBICTest' } + +1; diff --git a/t/lib/DBICTest/BaseResult.pm b/t/lib/DBICTest/BaseResult.pm index c732181f2..65f90d185 100644 --- a/t/lib/DBICTest/BaseResult.pm +++ b/t/lib/DBICTest/BaseResult.pm @@ -4,10 +4,7 @@ package #hide from pause use strict; use warnings; -# must load before any DBIx::Class* namespaces -use DBICTest::RunMode; - -use base 'DBIx::Class::Core'; +use base qw(DBICTest::Base DBIx::Class::Core); #use base qw/DBIx::Class::Relationship::Cascade::Rekey DBIx::Class::Core/; diff --git a/t/lib/DBICTest/BaseResultSet.pm b/t/lib/DBICTest/BaseResultSet.pm index 77d22f282..2441cb7af 100644 --- a/t/lib/DBICTest/BaseResultSet.pm +++ b/t/lib/DBICTest/BaseResultSet.pm @@ -4,11 +4,7 @@ package #hide from pause use strict; use warnings; -# must load before any DBIx::Class* namespaces -use DBICTest::RunMode; - -use base 'DBIx::Class::ResultSet'; -__PACKAGE__->_skip_namespace_frames('^DBICTest'); +use base qw(DBICTest::Base DBIx::Class::ResultSet); sub all_hri { return [ shift->search ({}, { result_class => 'DBIx::Class::ResultClass::HashRefInflator' })->all ]; diff --git a/t/lib/DBICTest/BaseSchema.pm b/t/lib/DBICTest/BaseSchema.pm index 010e3e9ee..ea7088a8c 100644 --- a/t/lib/DBICTest/BaseSchema.pm +++ b/t/lib/DBICTest/BaseSchema.pm @@ -4,9 +4,7 @@ package #hide from pause use strict; use warnings; -# must load before any DBIx::Class* namespaces -use DBICTest::RunMode; +use base qw(DBICTest::Base DBIx::Class::Schema); -use base 'DBIx::Class::Schema'; 1; diff --git a/t/zzzzzzz_sqlite_deadlock.t b/t/zzzzzzz_sqlite_deadlock.t index 6a38d2c23..b0e8f3b98 100644 --- a/t/zzzzzzz_sqlite_deadlock.t +++ b/t/zzzzzzz_sqlite_deadlock.t @@ -4,10 +4,10 @@ use warnings; use Test::More; use lib 't/lib'; -use DBICTest::RunMode; - -if ( DBICTest::RunMode->is_plain ) { - plan( skip_all => "Skipping test on plain module install" ); +BEGIN { + require DBICTest::RunMode; + plan( skip_all => "Skipping test on plain module install" ) + if DBICTest::RunMode->is_plain; } use Test::Exception; From e952df766c89f1fd6e7e2e1289162b5c6773e65c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 10:09:43 +0200 Subject: [PATCH 025/548] Move the DSN-lock machinery from 8d6b1478d into DBICTest::BaseSchema This is just a c/p job, expecting zero functional changes The t/admin/02ddl.t are cosmetic changes fixing a nested &cref leak never before encountered on <= 5.8.7 --- t/admin/02ddl.t | 8 +- t/lib/DBICTest/BaseSchema.pm | 161 ++++++++++++++++++++++++++++++++++ t/lib/DBICTest/Schema.pm | 163 ----------------------------------- 3 files changed, 166 insertions(+), 166 deletions(-) diff --git a/t/admin/02ddl.t b/t/admin/02ddl.t index 1d9ce882f..d17d677ae 100644 --- a/t/admin/02ddl.t +++ b/t/admin/02ddl.t @@ -108,13 +108,15 @@ my $admin = DBIx::Class::Admin->new( ); $admin->version("3.0"); -lives_ok { $admin->install(); } 'install schema version 3.0'; +$admin->install; is($admin->schema->get_db_version, "3.0", 'db thinks its version 3.0'); -dies_ok { $admin->install("4.0"); } 'cannot install to allready existing version'; +throws_ok { + $admin->install("4.0") +} qr/Schema already has a version. Try upgrade instead/, 'cannot install to allready existing version'; $admin->force(1); warnings_exist ( sub { - lives_ok { $admin->install("4.0") } 'can force install to allready existing version' + $admin->install("4.0") }, qr/Forcing install may not be a good idea/, 'Force warning emitted' ); is($admin->schema->get_db_version, "4.0", 'db thinks its version 4.0'); } diff --git a/t/lib/DBICTest/BaseSchema.pm b/t/lib/DBICTest/BaseSchema.pm index ea7088a8c..0e1e5e232 100644 --- a/t/lib/DBICTest/BaseSchema.pm +++ b/t/lib/DBICTest/BaseSchema.pm @@ -6,5 +6,166 @@ use warnings; use base qw(DBICTest::Base DBIx::Class::Schema); +use Fcntl qw(:DEFAULT :seek :flock); +use Time::HiRes 'sleep'; +use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry); +use DBICTest::Util 'local_umask'; +use namespace::clean; + +our $locker; +END { + # we need the $locker to be referenced here for delayed destruction + if ($locker->{lock_name} and ($ENV{DBICTEST_LOCK_HOLDER}||0) == $$) { + #warn "$$ $0 $locker->{type} LOCK RELEASED"; + } +} + +my $weak_registry = {}; + +sub connection { + my $self = shift->next::method(@_); + +# MASSIVE FIXME +# we can't really lock based on DSN, as we do not yet have a way to tell that e.g. +# DBICTEST_MSSQL_DSN=dbi:Sybase:server=192.168.0.11:1433;database=dbtst +# and +# DBICTEST_MSSQL_ODBC_DSN=dbi:ODBC:server=192.168.0.11;port=1433;database=dbtst;driver=FreeTDS;tds_version=8.0 +# are the same server +# hence we lock everything based on sqlt_type or just globally if not available +# just pretend we are python you know? :) + + + # when we get a proper DSN resolution sanitize to produce a portable lockfile name + # this may look weird and unnecessary, but consider running tests from + # windows over a samba share >.> + #utf8::encode($dsn); + #$dsn =~ s/([^A-Za-z0-9_\-\.\=])/ sprintf '~%02X', ord($1) /ge; + #$dsn =~ s/^dbi/dbi/i; + + # provide locking for physical (non-memory) DSNs, so that tests can + # safely run in parallel. While the harness (make -jN test) does set + # an envvar, we can not detect when a user invokes prove -jN. Hence + # perform the locking at all times, it shouldn't hurt. + # the lock fh *should* inherit across forks/subprocesses + # + # File locking is hard. Really hard. By far the best lock implementation + # I've seen is part of the guts of File::Temp. However it is sadly not + # reusable. Since I am not aware of folks doing NFS parallel testing, + # nor are we known to work on VMS, I am just going to punt this and + # use the portable-ish flock() provided by perl itself. If this does + # not work for you - patches more than welcome. + if ( + ! $DBICTest::global_exclusive_lock + and + ( ! $ENV{DBICTEST_LOCK_HOLDER} or $ENV{DBICTEST_LOCK_HOLDER} == $$ ) + and + ref($_[0]) ne 'CODE' + and + ($_[0]||'') !~ /^ (?i:dbi) \: SQLite \: (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x + ) { + + my $locktype = do { + # guard against infinite recursion + local $ENV{DBICTEST_LOCK_HOLDER} = -1; + + # we need to connect a forced fresh clone so that we do not upset any state + # of the main $schema (some tests examine it quite closely) + local $SIG{__WARN__} = sub {}; + local $@; + my $storage = eval { + my $st = ref($self)->connect(@{$self->storage->connect_info})->storage; + $st->ensure_connected; # do connect here, to catch a possible throw + $st; + }; + $storage + ? do { + my $t = $storage->sqlt_type || 'generic'; + eval { $storage->disconnect }; + $t; + } + : undef + ; + }; + + # Never hold more than one lock. This solves the "lock in order" issues + # unrelated tests may have + # Also if there is no connection - there is no lock to be had + if ($locktype and (!$locker or $locker->{type} ne $locktype)) { + + # this will release whatever lock we may currently be holding + # which is fine since the type does not match as checked above + undef $locker; + + my $lockpath = DBICTest::RunMode->tmpdir->file("_dbictest_$locktype.lock"); + + #warn "$$ $0 $locktype GRABBING LOCK"; + my $lock_fh; + { + my $u = local_umask(0); # so that the file opens as 666, and any user can lock + sysopen ($lock_fh, $lockpath, O_RDWR|O_CREAT) or die "Unable to open $lockpath: $!"; + } + flock ($lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!"; + #warn "$$ $0 $locktype LOCK GRABBED"; + + # see if anyone was holding a lock before us, and wait up to 5 seconds for them to terminate + # if we do not do this we may end up trampling over some long-running END or somesuch + seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!"; + my $old_pid; + if ( + read ($lock_fh, $old_pid, 100) + and + ($old_pid) = $old_pid =~ /^(\d+)$/ + ) { + for (1..50) { + kill (0, $old_pid) or last; + sleep 0.1; + } + } + #warn "$$ $0 $locktype POST GRAB WAIT"; + + truncate $lock_fh, 0; + seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!"; + $lock_fh->autoflush(1); + print $lock_fh $$; + + $ENV{DBICTEST_LOCK_HOLDER} ||= $$; + + $locker = { + type => $locktype, + fh => $lock_fh, + lock_name => "$lockpath", + }; + } + } + + if ($INC{'Test/Builder.pm'}) { + populate_weakregistry ( $weak_registry, $self->storage ); + + my $cur_connect_call = $self->storage->on_connect_call; + + $self->storage->on_connect_call([ + (ref $cur_connect_call eq 'ARRAY' + ? @$cur_connect_call + : ($cur_connect_call || ()) + ), + [sub { + populate_weakregistry( $weak_registry, shift->_dbh ) + }], + ]); + } + + return $self; +} + +sub clone { + my $self = shift->next::method(@_); + populate_weakregistry ( $weak_registry, $self ) + if $INC{'Test/Builder.pm'}; + $self; +} + +END { + assert_empty_weakregistry($weak_registry, 'quiet'); +} 1; diff --git a/t/lib/DBICTest/Schema.pm b/t/lib/DBICTest/Schema.pm index b39ecbc7d..52906c7dd 100644 --- a/t/lib/DBICTest/Schema.pm +++ b/t/lib/DBICTest/Schema.pm @@ -7,13 +7,6 @@ no warnings 'qw'; use base 'DBICTest::BaseSchema'; -use Fcntl qw/:DEFAULT :seek :flock/; -use Time::HiRes 'sleep'; -use DBICTest::RunMode; -use DBICTest::Util::LeakTracer qw/populate_weakregistry assert_empty_weakregistry/; -use DBICTest::Util 'local_umask'; -use namespace::clean; - __PACKAGE__->mk_group_accessors(simple => 'custom_attr'); __PACKAGE__->load_classes(qw/ @@ -69,160 +62,4 @@ sub sqlt_deploy_hook { $sqlt_schema->drop_table('dummy'); } - -our $locker; -END { - # we need the $locker to be referenced here for delayed destruction - if ($locker->{lock_name} and ($ENV{DBICTEST_LOCK_HOLDER}||0) == $$) { - #warn "$$ $0 $locker->{type} LOCK RELEASED"; - } -} - -my $weak_registry = {}; - -sub connection { - my $self = shift->next::method(@_); - -# MASSIVE FIXME -# we can't really lock based on DSN, as we do not yet have a way to tell that e.g. -# DBICTEST_MSSQL_DSN=dbi:Sybase:server=192.168.0.11:1433;database=dbtst -# and -# DBICTEST_MSSQL_ODBC_DSN=dbi:ODBC:server=192.168.0.11;port=1433;database=dbtst;driver=FreeTDS;tds_version=8.0 -# are the same server -# hence we lock everything based on sqlt_type or just globally if not available -# just pretend we are python you know? :) - - - # when we get a proper DSN resolution sanitize to produce a portable lockfile name - # this may look weird and unnecessary, but consider running tests from - # windows over a samba share >.> - #utf8::encode($dsn); - #$dsn =~ s/([^A-Za-z0-9_\-\.\=])/ sprintf '~%02X', ord($1) /ge; - #$dsn =~ s/^dbi/dbi/i; - - # provide locking for physical (non-memory) DSNs, so that tests can - # safely run in parallel. While the harness (make -jN test) does set - # an envvar, we can not detect when a user invokes prove -jN. Hence - # perform the locking at all times, it shouldn't hurt. - # the lock fh *should* inherit across forks/subprocesses - # - # File locking is hard. Really hard. By far the best lock implementation - # I've seen is part of the guts of File::Temp. However it is sadly not - # reusable. Since I am not aware of folks doing NFS parallel testing, - # nor are we known to work on VMS, I am just going to punt this and - # use the portable-ish flock() provided by perl itself. If this does - # not work for you - patches more than welcome. - if ( - ! $DBICTest::global_exclusive_lock - and - ( ! $ENV{DBICTEST_LOCK_HOLDER} or $ENV{DBICTEST_LOCK_HOLDER} == $$ ) - and - ref($_[0]) ne 'CODE' - and - ($_[0]||'') !~ /^ (?i:dbi) \: SQLite \: (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x - ) { - - my $locktype = do { - # guard against infinite recursion - local $ENV{DBICTEST_LOCK_HOLDER} = -1; - - # we need to connect a forced fresh clone so that we do not upset any state - # of the main $schema (some tests examine it quite closely) - local $@; - my $storage = eval { - my $st = ref($self)->connect(@{$self->storage->connect_info})->storage; - $st->ensure_connected; # do connect here, to catch a possible throw - $st; - }; - $storage - ? do { - my $t = $storage->sqlt_type || 'generic'; - eval { $storage->disconnect }; - $t; - } - : undef - ; - }; - - # Never hold more than one lock. This solves the "lock in order" issues - # unrelated tests may have - # Also if there is no connection - there is no lock to be had - if ($locktype and (!$locker or $locker->{type} ne $locktype)) { - - # this will release whatever lock we may currently be holding - # which is fine since the type does not match as checked above - undef $locker; - - my $lockpath = DBICTest::RunMode->tmpdir->file("_dbictest_$locktype.lock"); - - #warn "$$ $0 $locktype GRABBING LOCK"; - my $lock_fh; - { - my $u = local_umask(0); # so that the file opens as 666, and any user can lock - sysopen ($lock_fh, $lockpath, O_RDWR|O_CREAT) or die "Unable to open $lockpath: $!"; - } - flock ($lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!"; - #warn "$$ $0 $locktype LOCK GRABBED"; - - # see if anyone was holding a lock before us, and wait up to 5 seconds for them to terminate - # if we do not do this we may end up trampling over some long-running END or somesuch - seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!"; - my $old_pid; - if ( - read ($lock_fh, $old_pid, 100) - and - ($old_pid) = $old_pid =~ /^(\d+)$/ - ) { - for (1..50) { - kill (0, $old_pid) or last; - sleep 0.1; - } - } - #warn "$$ $0 $locktype POST GRAB WAIT"; - - truncate $lock_fh, 0; - seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!"; - $lock_fh->autoflush(1); - print $lock_fh $$; - - $ENV{DBICTEST_LOCK_HOLDER} ||= $$; - - $locker = { - type => $locktype, - fh => $lock_fh, - lock_name => "$lockpath", - }; - } - } - - if ($INC{'Test/Builder.pm'}) { - populate_weakregistry ( $weak_registry, $self->storage ); - - my $cur_connect_call = $self->storage->on_connect_call; - - $self->storage->on_connect_call([ - (ref $cur_connect_call eq 'ARRAY' - ? @$cur_connect_call - : ($cur_connect_call || ()) - ), - [sub { - populate_weakregistry( $weak_registry, shift->_dbh ) - }], - ]); - } - - return $self; -} - -sub clone { - my $self = shift->next::method(@_); - populate_weakregistry ( $weak_registry, $self ) - if $INC{'Test/Builder.pm'}; - $self; -} - -END { - assert_empty_weakregistry($weak_registry, 'quiet'); -} - 1; From 113322e503f3c3f39c4652ac47417279c096db6a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 10:22:29 +0200 Subject: [PATCH 026/548] Less warning noise from sqlite test --- t/752sqlite.t | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/t/752sqlite.t b/t/752sqlite.t index fd7e8d716..0fbad3449 100644 --- a/t/752sqlite.t +++ b/t/752sqlite.t @@ -215,7 +215,20 @@ for my $bi ( qw( my $v_desc = sprintf '%s (%d bit signed int)', $bi, $v_bits; my @w; - local $SIG{__WARN__} = sub { $_[0] =~ /datatype mismatch/ ? push @w, @_ : warn @_ }; + local $SIG{__WARN__} = sub { + if ($_[0] =~ /datatype mismatch/) { + push @w, @_; + } + elsif ($_[0] =~ /An integer value occupying more than 32 bits was supplied .+ can not bind properly so DBIC will treat it as a string instead/ ) { + # do nothing, this warning will pop up here and there depending on + # DBD/bitness combination + # we don't want to test for it explicitly, we are just interested + # in the results matching at the end + } + else { + warn @_; + } + }; # some combinations of SQLite 1.35 and older 5.8 faimly is wonky # instead of a warning we get a full exception. Sod it From 56270bba4fb06051ea5262d99f858920c796562e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 13:34:31 +0200 Subject: [PATCH 027/548] Fix erroneous todoification in sqlite test --- lib/DBIx/Class/_Util.pm | 3 +++ t/752sqlite.t | 1 + 2 files changed, 4 insertions(+) diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 3e3b68f18..518457cd3 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -146,6 +146,9 @@ sub modver_gt_or_eq ($$) { local $SIG{__WARN__} = sigwarn_silencer( qr/\Qisn't numeric in subroutine entry/ ) if SPURIOUS_VERSION_CHECK_WARNINGS; + croak "$mod does not seem to provide a version (perhaps it never loaded)" + unless $mod->VERSION; + local $@; eval { $mod->VERSION($ver) } ? 1 : 0; } diff --git a/t/752sqlite.t b/t/752sqlite.t index 0fbad3449..70bd612f1 100644 --- a/t/752sqlite.t +++ b/t/752sqlite.t @@ -53,6 +53,7 @@ use DBIx::Class::_Util qw(sigwarn_silencer modver_gt_or_eq); # However DBD::SQLite 1.38_02 seems to fix this, with an accompanying test: # https://metacpan.org/source/ADAMK/DBD-SQLite-1.38_02/t/54_literal_txn.t +require DBD::SQLite; my $lit_txn_todo = modver_gt_or_eq('DBD::SQLite', '1.38_02') ? undef : "DBD::SQLite before 1.38_02 is retarded wrt detecting literal BEGIN/COMMIT statements" From 398215b170197df314fb0c40e0654c3a6860f19d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 13:15:45 +0200 Subject: [PATCH 028/548] Fix multiple savepointing transactions on DBD::SQLite Problem was missed during review of 86a51471ce (how the fuck did I let this abomination through anyway), and not encountered due to insufficient testing. The naive statement parser in DBD::SQLite when running against older libsqlite mistakenly treats ROLLBACK TRANSACTION TO... as an actual TXN rollback, and as a result desyncs the internal AutoCommit flag state [1] Fix by simply using the shorter (still valid) syntax [2], and by removing the sloppy workaround hiding the actual problem. [1] https://github.com/DBD-SQLite/DBD-SQLite/blob/1.42/dbdimp.c#L824:L852 [2] http://www.sqlite.org/lang_savepoint.html --- Changes | 2 ++ lib/DBIx/Class/Storage.pm | 2 ++ lib/DBIx/Class/Storage/DBI/SQLite.pm | 20 +++++++++++--- t/752sqlite.t | 40 ++++++++++++++++++++++++++++ 4 files changed, 60 insertions(+), 4 deletions(-) diff --git a/Changes b/Changes index e36363746..bbd5815bf 100644 --- a/Changes +++ b/Changes @@ -11,6 +11,8 @@ Revision history for DBIx::Class up by create() and populate() - Ensure definitive condition extractor handles bizarre corner cases without bombing out (RT#93244) + - Fix inability to handle multiple consecutive transactions with + savepoints on DBD::SQLite < 1.39 0.08270 2014-01-30 21:54 (PST) * Fixes diff --git a/lib/DBIx/Class/Storage.pm b/lib/DBIx/Class/Storage.pm index 1addeafb7..ad1770eb9 100644 --- a/lib/DBIx/Class/Storage.pm +++ b/lib/DBIx/Class/Storage.pm @@ -227,6 +227,7 @@ sub txn_commit { $self->debugobj->txn_commit() if $self->debug; $self->_exec_txn_commit; $self->{transaction_depth}--; + $self->savepoints([]); } elsif($self->transaction_depth > 1) { $self->{transaction_depth}--; @@ -252,6 +253,7 @@ sub txn_rollback { $self->debugobj->txn_rollback() if $self->debug; $self->_exec_txn_rollback; $self->{transaction_depth}--; + $self->savepoints([]); } elsif ($self->transaction_depth > 1) { $self->{transaction_depth}--; diff --git a/lib/DBIx/Class/Storage/DBI/SQLite.pm b/lib/DBIx/Class/Storage/DBI/SQLite.pm index 2e4e312c8..2778dbdfe 100644 --- a/lib/DBIx/Class/Storage/DBI/SQLite.pm +++ b/lib/DBIx/Class/Storage/DBI/SQLite.pm @@ -126,11 +126,23 @@ sub _exec_svp_release { sub _exec_svp_rollback { my ($self, $name) = @_; - # For some reason this statement changes the value of $dbh->{AutoCommit}, so - # we localize it here to preserve the original value. - local $self->_dbh->{AutoCommit} = $self->_dbh->{AutoCommit}; + $self->_dbh->do("ROLLBACK TO SAVEPOINT $name"); +} + +# older SQLite has issues here too - both of these are in fact +# completely benign warnings (or at least so say the tests) +sub _exec_txn_rollback { + local $SIG{__WARN__} = sigwarn_silencer( qr/rollback ineffective/ ) + unless $DBD::SQLite::__DBIC_TXN_SYNC_SANE__; - $self->_dbh->do("ROLLBACK TRANSACTION TO SAVEPOINT $name"); + shift->next::method(@_); +} + +sub _exec_txn_commit { + local $SIG{__WARN__} = sigwarn_silencer( qr/commit ineffective/ ) + unless $DBD::SQLite::__DBIC_TXN_SYNC_SANE__; + + shift->next::method(@_); } sub _ping { diff --git a/t/752sqlite.t b/t/752sqlite.t index 70bd612f1..273a1ed5d 100644 --- a/t/752sqlite.t +++ b/t/752sqlite.t @@ -125,6 +125,46 @@ DDL } } +# test blank begin/svp/commit/begin cycle +warnings_are { + my $schema = DBICTest->init_schema( no_populate => 1 ); + my $rs = $schema->resultset('Artist'); + is ($rs->count, 0, 'Start with empty table'); + + for my $do_commit (1, 0) { + $schema->txn_begin; + $schema->svp_begin; + $schema->svp_rollback; + + $schema->svp_begin; + $schema->svp_rollback; + + $schema->svp_release; + + $schema->svp_begin; + + $schema->txn_rollback; + + $schema->txn_begin; + $schema->svp_begin; + $schema->svp_rollback; + + $schema->svp_begin; + $schema->svp_rollback; + + $schema->svp_release; + + $schema->svp_begin; + + $do_commit ? $schema->txn_commit : $schema->txn_rollback; + + is_deeply $schema->storage->savepoints, [], 'Savepoint names cleared away' + } + + $schema->txn_do(sub { + ok (1, 'all seems fine'); + }); +} [], 'No warnings emitted'; my $schema = DBICTest->init_schema(); From cf1d16d808be1dc3d7270f9f6072b832d0dbb327 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 08:29:46 +0200 Subject: [PATCH 029/548] Consolidate pg/mysql and standalone SQLite savepoint tests to run on all 3 A larger consolidation will take place later with T::WV, but all in due time --- t/752sqlite.t | 34 -------- t/{98savepoints.t => storage/savepoints.t} | 98 ++++++++++++++++------ 2 files changed, 74 insertions(+), 58 deletions(-) rename t/{98savepoints.t => storage/savepoints.t} (53%) diff --git a/t/752sqlite.t b/t/752sqlite.t index 273a1ed5d..008b7f11c 100644 --- a/t/752sqlite.t +++ b/t/752sqlite.t @@ -11,40 +11,6 @@ use lib qw(t/lib); use DBICTest; use DBIx::Class::_Util qw(sigwarn_silencer modver_gt_or_eq); -# savepoints test -{ - my $schema = DBICTest->init_schema(auto_savepoint => 1); - - my $ars = $schema->resultset('Artist'); - - # test two-phase commit and inner transaction rollback from nested transactions - $schema->txn_do(sub { - $ars->create({ name => 'in_outer_transaction' }); - $schema->txn_do(sub { - $ars->create({ name => 'in_inner_transaction' }); - }); - ok($ars->search({ name => 'in_inner_transaction' })->first, - 'commit from inner transaction visible in outer transaction'); - throws_ok { - $schema->txn_do(sub { - $ars->create({ name => 'in_inner_transaction_rolling_back' }); - die 'rolling back inner transaction'; - }); - } qr/rolling back inner transaction/, 'inner transaction rollback executed'; - $ars->create({ name => 'in_outer_transaction2' }); - }); - - ok($ars->search({ name => 'in_outer_transaction' })->first, - 'commit from outer transaction'); - ok($ars->search({ name => 'in_outer_transaction2' })->first, - 'second commit from outer transaction'); - ok($ars->search({ name => 'in_inner_transaction' })->first, - 'commit from inner transaction'); - is $ars->search({ name => 'in_inner_transaction_rolling_back' })->first, - undef, - 'rollback from inner transaction'; -} - # check that we work somewhat OK with braindead SQLite transaction handling # # As per https://metacpan.org/source/ADAMK/DBD-SQLite-1.37/lib/DBD/SQLite.pm#L921 diff --git a/t/98savepoints.t b/t/storage/savepoints.t similarity index 53% rename from t/98savepoints.t rename to t/storage/savepoints.t index a195b8580..fab7036cb 100644 --- a/t/98savepoints.t +++ b/t/storage/savepoints.t @@ -2,45 +2,56 @@ use strict; use warnings; use Test::More; +use Test::Exception; use DBIx::Class::Optional::Dependencies (); my $env2optdep = { - DBICTEST_PG => 'rdbms_pg', + DBICTEST_PG => 'test_rdbms_pg', DBICTEST_MYSQL => 'test_rdbms_mysql', }; -plan skip_all => join (' ', - 'Set $ENV{DBICTEST_PG_DSN} and/or $ENV{DBICTEST_MYSQL_DSN} _USER and _PASS to run these tests.', -) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep; - use lib qw(t/lib); use DBICTest; use DBICTest::Stats; my $schema; -for my $prefix (keys %$env2optdep) { SKIP: { - my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/; +for ('', keys %$env2optdep) { SKIP: { - skip ("Skipping tests with $prefix: set \$ENV{${prefix}_DSN} _USER and _PASS", 1) - unless $dsn; + my $prefix; - skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1) - unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix}); + if ($prefix = $_) { + my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/; - $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 }); + skip ("Skipping tests with $prefix: set \$ENV{${prefix}_DSN} _USER and _PASS", 1) + unless $dsn; - my $create_sql; - $schema->storage->ensure_connected; - if ($schema->storage->isa('DBIx::Class::Storage::DBI::Pg')) { - $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))"; - $schema->storage->dbh->do('SET client_min_messages=WARNING'); - } - elsif ($schema->storage->isa('DBIx::Class::Storage::DBI::mysql')) { - $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB"; + skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1) + unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix}); + + $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 }); + + my $create_sql; + $schema->storage->ensure_connected; + if ($schema->storage->isa('DBIx::Class::Storage::DBI::Pg')) { + $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))"; + $schema->storage->dbh->do('SET client_min_messages=WARNING'); + } + elsif ($schema->storage->isa('DBIx::Class::Storage::DBI::mysql')) { + $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB"; + } + else { + skip( 'Untested driver ' . $schema->storage, 1 ); + } + + $schema->storage->dbh_do (sub { + $_[1]->do('DROP TABLE IF EXISTS artist'); + $_[1]->do($create_sql); + }); } else { - skip( 'Untested driver ' . $schema->storage, 1 ); + $prefix = 'SQLite Internal DB'; + $schema = DBICTest->init_schema( no_populate => 1, auto_savepoint => 1 ); } note "Testing $prefix"; @@ -49,9 +60,6 @@ for my $prefix (keys %$env2optdep) { SKIP: { $schema->storage->debugobj($stats); $schema->storage->debug(1); - $schema->storage->dbh->do ('DROP TABLE IF EXISTS artist'); - $schema->storage->dbh->do ($create_sql); - $schema->resultset('Artist')->create({ name => 'foo' }); $schema->txn_begin; @@ -94,6 +102,7 @@ for my $prefix (keys %$env2optdep) { SKIP: { $arty->update({ name => 'gphat' }); $arty->discard_changes; cmp_ok($arty->name, 'eq', 'gphat', 'name changed'); + # Active: 0 1 2 # Rollback doesn't DESTROY the savepoint, it just rolls back to the value # at its conception @@ -104,6 +113,7 @@ for my $prefix (keys %$env2optdep) { SKIP: { # Active: 0 1 2 3 $schema->svp_begin('testing3'); $arty->update({ name => 'coryg' }); + # Active: 0 1 2 3 4 $schema->svp_begin('testing4'); $arty->update({ name => 'watson' }); @@ -111,11 +121,14 @@ for my $prefix (keys %$env2optdep) { SKIP: { # Release 3, which implicitly releases 4 # Active: 0 1 2 $schema->svp_release('testing3'); + $arty->discard_changes; cmp_ok($arty->name, 'eq', 'watson', 'release left data'); + # This rolls back savepoint 2 # Active: 0 1 2 $schema->svp_rollback; + $arty->discard_changes; cmp_ok($arty->name, 'eq', 'yourmom', 'rolled back to 2'); @@ -127,6 +140,8 @@ for my $prefix (keys %$env2optdep) { SKIP: { $schema->txn_commit; + is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' ); + # And now to see if txn_do will behave correctly $schema->txn_do (sub { my $artycp = $arty; @@ -157,6 +172,8 @@ for my $prefix (keys %$env2optdep) { SKIP: { $arty->update; }); + is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' ); + $arty->discard_changes; is($arty->name,'Miff','auto_savepoint worked'); @@ -167,6 +184,39 @@ for my $prefix (keys %$env2optdep) { SKIP: { cmp_ok($stats->{'SVP_ROLLBACK'},'==',5,'Correct number of savepoint rollbacks'); +### test originally written for SQLite exclusively (git blame -w -C -M) + # test two-phase commit and inner transaction rollback from nested transactions + my $ars = $schema->resultset('Artist'); + + $schema->txn_do(sub { + $ars->create({ name => 'in_outer_transaction' }); + $schema->txn_do(sub { + $ars->create({ name => 'in_inner_transaction' }); + }); + ok($ars->search({ name => 'in_inner_transaction' })->first, + 'commit from inner transaction visible in outer transaction'); + throws_ok { + $schema->txn_do(sub { + $ars->create({ name => 'in_inner_transaction_rolling_back' }); + die 'rolling back inner transaction'; + }); + } qr/rolling back inner transaction/, 'inner transaction rollback executed'; + $ars->create({ name => 'in_outer_transaction2' }); + }); + + is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' ); + + ok($ars->search({ name => 'in_outer_transaction' })->first, + 'commit from outer transaction'); + ok($ars->search({ name => 'in_outer_transaction2' })->first, + 'second commit from outer transaction'); + ok($ars->search({ name => 'in_inner_transaction' })->first, + 'commit from inner transaction'); + is $ars->search({ name => 'in_inner_transaction_rolling_back' })->first, + undef, + 'rollback from inner transaction'; + +### cleanupz $schema->storage->dbh->do ("DROP TABLE artist"); }} From 680e2ac9adda36197b8b880c25fa344376f5914e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 18:58:12 +0200 Subject: [PATCH 030/548] Excise live test left over after ac0c082542 No access to a live test system currently, and likely the test is incorrect anyway (no mention of significant parenthesis) --- t/751msaccess.t | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/t/751msaccess.t b/t/751msaccess.t index 8d8aa7e8b..bf4cdacfa 100644 --- a/t/751msaccess.t +++ b/t/751msaccess.t @@ -8,8 +8,6 @@ use Try::Tiny; use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj (); -use DBIC::SqlMakerTest; my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSACCESS_ODBC_${_}" } qw/DSN USER PASS/}; my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSACCESS_ADO_${_}" } qw/DSN USER PASS/}; @@ -144,12 +142,7 @@ EOF title => 'my track', }); - my ($sql, @bind); - my $joined_track = try { - local $schema->storage->{debug} = 1; - local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind); - $schema->resultset('Artist')->search({ artistid => $first_artistid, }, { @@ -162,27 +155,10 @@ EOF diag "Could not execute two-step left join: $_"; }; - s/^'//, s/'\z// for @bind; - - # test is duplicated in t/sqlmaker/msaccess.t, keep a duplicate here anyway, just to be safe - # -- ribasushi - is_same_sql_bind( - $sql, - \@bind, - 'SELECT [me].[artistid], [me].[name], [me].[rank], [me].[charfield], [tracks].[title] FROM ( ( [artist] [me] LEFT JOIN cd [cds] ON [cds].[artist] = [me].[artistid] ) LEFT JOIN [track] [tracks] ON [tracks].[cd] = [cds].[cdid] ) WHERE ( [artistid] = ? )', - [1], - 'correct SQL for two-step left join', - ); - is try { $joined_track->get_column('track_title') }, 'my track', 'two-step left join works'; - ($sql, @bind) = (); - $joined_artist = try { - local $schema->storage->{debug} = 1; - local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind); - $schema->resultset('Track')->search({ trackid => $track->trackid, }, { @@ -195,18 +171,6 @@ EOF diag "Could not execute two-step inner join: $_"; }; - s/^'//, s/'\z// for @bind; - - # test is duplicated in t/sqlmaker/msaccess.t, keep a duplicate here anyway, just to be safe - # -- ribasushi - is_same_sql_bind( - $sql, - \@bind, - 'SELECT [me].[trackid], [me].[cd], [me].[position], [me].[title], [me].[last_updated_on], [me].[last_updated_at], [artist].[name] FROM ( ( [track] [me] INNER JOIN cd [cd] ON [cd].[cdid] = [me].[cd] ) INNER JOIN [artist] [artist] ON [artist].[artistid] = [cd].[artist] ) WHERE ( [trackid] = ? )', - [$track->trackid], - 'correct SQL for two-step inner join', - ); - is try { $joined_artist->get_column('artist_name') }, 'foo', 'two-step inner join works'; From 2cfc22ddff9cb35524031dfc9d429d294b5e3d6e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 27 May 2014 19:17:26 +0200 Subject: [PATCH 031/548] Retire DBIC::DebugObj, replace with $dbictest_schema->is_executed_sql_bind() This cuts down on a lot of the silliness with debugcb/debugobj, and makes for more precise tests as a whole. Went through great pains to not disturb any existing tests, fingercross this is indeed the case Read under -w for sanity --- Makefile.PL | 2 +- t/18insert_default.t | 28 +-- t/80unique.t | 25 +-- t/85utf8.t | 53 +++--- t/93autocast.t | 46 ++--- t/count/count_rs.t | 70 +++---- t/lib/DBIC/DebugObj.pm | 50 ----- t/lib/DBICTest/BaseSchema.pm | 87 ++++++++- t/lib/DBICTest/Stats.pm | 63 ------- t/resultset/update_delete.t | 318 +++++++++++++++----------------- t/row/find_one_has_many.t | 32 ++-- t/search/preserve_original_rs.t | 3 +- t/sqlmaker/mysql.t | 88 ++++----- t/sqlmaker/quotes.t | 63 ++++--- t/storage/debug.t | 58 ++++-- t/storage/nobindvars.t | 42 ++--- t/storage/savepoints.t | 28 ++- 17 files changed, 473 insertions(+), 583 deletions(-) delete mode 100644 t/lib/DBIC/DebugObj.pm delete mode 100644 t/lib/DBICTest/Stats.pm diff --git a/Makefile.PL b/Makefile.PL index 492368ef2..108624a9f 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -84,7 +84,7 @@ my $runtime_requires = { 'namespace::clean' => '0.24', 'Path::Class' => '0.18', 'Scope::Guard' => '0.03', - 'SQL::Abstract' => '1.77', + 'SQL::Abstract' => '1.78', 'Try::Tiny' => '0.07', # Technically this is not a core dependency - it is only required diff --git a/t/18insert_default.t b/t/18insert_default.t index cd49cecb9..17657cc66 100644 --- a/t/18insert_default.t +++ b/t/18insert_default.t @@ -2,11 +2,8 @@ use strict; use warnings; use Test::More; -use Test::Exception; use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); $schema->storage->sql_maker->quote_char('"'); @@ -15,27 +12,12 @@ my $rs = $schema->resultset ('Artist'); my $last_obj = $rs->search ({}, { order_by => { -desc => 'artistid' }, rows => 1})->single; my $last_id = $last_obj ? $last_obj->artistid : 0; - -my ($sql, @bind); -my $orig_debugobj = $schema->storage->debugobj; -my $orig_debug = $schema->storage->debug; - -$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) ); -$schema->storage->debug (1); - my $obj; -lives_ok { $obj = $rs->create ({}) } 'Default insert successful'; - -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, - 'INSERT INTO "artist" DEFAULT VALUES', - [], - 'Default-value insert correct SQL', -); +$schema->is_executed_sql_bind( sub { + $obj = $rs->create ({}) +}, [[ + 'INSERT INTO "artist" DEFAULT VALUES' +]], 'Default-value insert correct SQL' ); ok ($obj, 'Insert defaults ( $rs->create ({}) )' ); diff --git a/t/80unique.t b/t/80unique.t index ba5a1817b..b38022504 100644 --- a/t/80unique.t +++ b/t/80unique.t @@ -6,8 +6,6 @@ use Test::Exception; use Test::Warn; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; -use DBIC::DebugObj; my $schema = DBICTest->init_schema(); @@ -228,23 +226,12 @@ is($row->baz, 3, 'baz is correct'); { my $artist = $schema->resultset('Artist')->find(1); - my ($sql, @bind); - my $old_debugobj = $schema->storage->debugobj; - my $old_debug = $schema->storage->debug; - $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)), - $schema->storage->debug(1); - - $artist->discard_changes; - - is_same_sql_bind ( - $sql, - \@bind, - 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?', - [qw/'1'/], - ); - - $schema->storage->debug($old_debug); - $schema->storage->debugobj($old_debugobj); + $schema->is_executed_sql_bind( sub { $artist->discard_changes }, [ + [ + 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?', + [ { dbic_colname => "me.artistid", sqlt_datatype => "integer" } => 1 ], + ] + ], 'Expected query on discard_changes'); } { diff --git a/t/85utf8.t b/t/85utf8.t index a07e42ade..64b49941e 100644 --- a/t/85utf8.t +++ b/t/85utf8.t @@ -5,7 +5,6 @@ use Test::More; use Test::Warn; use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj; { package A::Comp; @@ -97,24 +96,22 @@ my $bytestream_title = my $utf8_title = "weird \x{466} stuff"; utf8::encode($bytestream_title); cmp_ok ($bytestream_title, 'ne', $utf8_title, 'unicode/raw differ (sanity check)'); -my $storage = $schema->storage; -my ($sql, @bind); -my $debugobj = DBIC::DebugObj->new (\$sql, \@bind); -my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj); -$storage->debugobj ($debugobj); -$storage->debug (1); - -my $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } ); - -$storage->debugobj ($orig_debugobj); -$storage->debug ($orig_debug); - -# bind values are always alphabetically ordered by column, thus [1] -# the single quotes are an artefact of the debug-system +my $cd; { local $TODO = "This has been broken since rev 1191, Mar 2006"; - is ($bind[1], "'$bytestream_title'", 'INSERT: raw bytes sent to the database'); -} + + $schema->is_executed_sql_bind( sub { + $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } ) + }, [[ + 'INSERT INTO cd ( artist, title, year) VALUES ( ?, ?, ? )', + [ { dbic_colname => "artist", sqlt_datatype => "integer" } + => 1 ], + [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } + => $bytestream_title ], + [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } + => 2048 ], + ]], 'INSERT: raw bytes sent to the database' ); +}; # this should be using the cursor directly, no inflation/processing of any sort my ($raw_db_title) = $schema->resultset('CD') @@ -149,16 +146,20 @@ ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'reloaded utf8-less title' ); $bytestream_title = $utf8_title = "something \x{219} else"; utf8::encode($bytestream_title); +$schema->is_executed_sql_bind( sub { + $cd->update ({ title => $utf8_title }); +}, [ + [ 'BEGIN' ], + [ + 'UPDATE cd SET title = ? WHERE cdid = ?', + [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } + => $bytestream_title ], + [ { dbic_colname => "cdid", sqlt_datatype => "integer" } + => 6 ], + ], + [ 'COMMIT' ], +], 'UPDATE: raw bytes sent to the database'); -$storage->debugobj ($debugobj); -$storage->debug (1); - -$cd->update ({ title => $utf8_title }); - -$storage->debugobj ($orig_debugobj); -$storage->debug ($orig_debug); - -is ($bind[0], "'$bytestream_title'", 'UPDATE: raw bytes sent to the database'); ($raw_db_title) = $schema->resultset('CD') ->search ($cd->ident_condition) ->get_column('title') diff --git a/t/93autocast.t b/t/93autocast.t index 95d2b9210..49c1f5710 100644 --- a/t/93autocast.t +++ b/t/93autocast.t @@ -4,8 +4,6 @@ use warnings; use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; -use DBIC::DebugObj; { # Fake storage driver for sqlite with autocast package DBICTest::SQLite::AutoCast; @@ -37,22 +35,18 @@ my $rs = $schema->resultset ('CD')->search ({ 'me.single_track' => \[ '= ?', [ single_track => 1 ] ], }, { join => 'tracks' }); -my ($sql, @bind); -my $debugobj = DBIC::DebugObj->new (\$sql, \@bind); -my $storage = $schema->storage; -my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj); -$storage->debugobj ($debugobj); -$storage->debug (1); - -# the quoting is a debugobj thing, not dbic-internals -my $bind = [ map { "'$_'" } qw/ - 5 1 2009 4 -/]; +my @bind = ( + [ { dbic_colname => "cdid", sqlt_datatype => "integer" } + => 5 ], + [ { dbic_colname => "single_track", sqlt_datatype => "integer" } + => 1 ], + [ { dbic_colname => "tracks.last_updated_on", sqlt_datatype => "datetime" } + => 2009 ], + [ { dbic_colname => "tracks.position", sqlt_datatype => "int" } + => 4 ], +); -$rs->all; -is_same_sql_bind ( - $sql, - \@bind, +$schema->is_executed_sql_bind( sub { $rs->all }, [[ ' SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me @@ -64,16 +58,12 @@ is_same_sql_bind ( AND tracks.last_updated_on < ? AND tracks.position = ? ', - $bind, - 'expected sql with casting off', -); + @bind, +]], 'expected sql with casting off' ); $schema->storage->auto_cast (1); -$rs->all; -is_same_sql_bind ( - $sql, - \@bind, +$schema->is_executed_sql_bind( sub { $rs->all }, [[ ' SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me @@ -85,11 +75,7 @@ is_same_sql_bind ( AND tracks.last_updated_on < CAST (? AS DateTime) AND tracks.position = ? ', - $bind, - 'expected sql with casting on', -); - -$storage->debugobj ($orig_debugobj); -$storage->debug ($orig_debug); + @bind, +]], 'expected sql with casting on' ); done_testing; diff --git a/t/count/count_rs.t b/t/count/count_rs.t index 83b625755..5883daf8c 100644 --- a/t/count/count_rs.t +++ b/t/count/count_rs.t @@ -6,7 +6,6 @@ use lib qw(t/lib); use Test::More; use DBICTest; use DBIC::SqlMakerTest; -use DBIC::DebugObj; use DBIx::Class::SQLMaker::LimitDialects; my ($ROWS, $OFFSET) = ( @@ -23,27 +22,25 @@ my $schema = DBICTest->init_schema(); { position => [1,2] }, { prefetch => [qw/disc lyrics/], rows => 3, offset => 8 }, ); - is ($rs->all, 2, 'Correct number of objects'); - - - my ($sql, @bind); - $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)); - $schema->storage->debug(1); + my @wherebind = ( + [ { sqlt_datatype => 'int', dbic_colname => 'position' } + => 1 ], + [ { sqlt_datatype => 'int', dbic_colname => 'position' } + => 2 ], + ); - is ($rs->count, 2, 'Correct count via count()'); + is ($rs->all, 2, 'Correct number of objects'); - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + is ($rs->count, 2, 'Correct count via count()'); + }, [[ 'SELECT COUNT( * ) FROM cd me JOIN track tracks ON tracks.cd = me.cdid JOIN cd disc ON disc.cdid = tracks.cd WHERE ( ( position = ? OR position = ? ) ) - ', - [ qw/'1' '2'/ ], - 'count softlimit applied', - ); + ', @wherebind + ]], 'count softlimit applied'); my $crs = $rs->count_rs; is ($crs->next, 2, 'Correct count via count_rs()'); @@ -60,14 +57,7 @@ my $schema = DBICTest->init_schema(); LIMIT ? OFFSET ? ) tracks )', - [ - [ { sqlt_datatype => 'int', dbic_colname => 'position' } - => 1 ], - [ { sqlt_datatype => 'int', dbic_colname => 'position' } - => 2 ], - [$ROWS => 3], - [$OFFSET => 8], - ], + [ @wherebind, [$ROWS => 3], [$OFFSET => 8] ], 'count_rs db-side limit applied', ); } @@ -79,17 +69,18 @@ my $schema = DBICTest->init_schema(); { 'tracks.position' => [1,2] }, { prefetch => [qw/tracks artist/], rows => 3, offset => 4 }, ); - is ($rs->all, 1, 'Correct number of objects'); - - my ($sql, @bind); - $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)); - $schema->storage->debug(1); + my @wherebind = ( + [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' } + => 1 ], + [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' } + => 2 ], + ); - is ($rs->count, 1, 'Correct count via count()'); + is ($rs->all, 1, 'Correct number of objects'); - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + is ($rs->count, 1, 'Correct count via count()'); + }, [ [ 'SELECT COUNT( * ) FROM ( SELECT cds.cdid @@ -100,10 +91,8 @@ my $schema = DBICTest->init_schema(); WHERE tracks.position = ? OR tracks.position = ? GROUP BY cds.cdid ) cds - ', - [ qw/'1' '2'/ ], - 'count softlimit applied', - ); + ', @wherebind + ]], 'count softlimit applied' ); my $crs = $rs->count_rs; is ($crs->next, 1, 'Correct count via count_rs()'); @@ -122,14 +111,7 @@ my $schema = DBICTest->init_schema(); LIMIT ? OFFSET ? ) cds )', - [ - [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' } - => 1 ], - [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' } - => 2 ], - [ $ROWS => 3], - [$OFFSET => 4], - ], + [ @wherebind, [$ROWS => 3], [$OFFSET => 4], ], 'count_rs db-side limit applied', ); } diff --git a/t/lib/DBIC/DebugObj.pm b/t/lib/DBIC/DebugObj.pm deleted file mode 100644 index c43bae9b7..000000000 --- a/t/lib/DBIC/DebugObj.pm +++ /dev/null @@ -1,50 +0,0 @@ -package DBIC::DebugObj; - -use strict; -use warnings; - -use Class::C3; - -use base qw/DBIx::Class::Storage::Statistics Exporter Class::Accessor::Fast/; - -__PACKAGE__->mk_accessors( qw/dbictest_sql_ref dbictest_bind_ref/ ); - - -=head2 new(PKG, SQL_REF, BIND_REF, ...) - -Creates a new instance that on subsequent queries will store -the generated SQL to the scalar pointed to by SQL_REF and bind -values to the array pointed to by BIND_REF. - -=cut - -sub new { - my $pkg = shift; - my $sql_ref = shift; - my $bind_ref = shift; - - my $self = $pkg->SUPER::new(@_); - - $self->debugfh(undef); - - $self->dbictest_sql_ref($sql_ref); - $self->dbictest_bind_ref($bind_ref || []); - - return $self; -} - -sub query_start { - my $self = shift; - - (${$self->dbictest_sql_ref}, @{$self->dbictest_bind_ref}) = @_; -} - -sub query_end { } - -sub txn_begin { } - -sub txn_commit { } - -sub txn_rollback { } - -1; diff --git a/t/lib/DBICTest/BaseSchema.pm b/t/lib/DBICTest/BaseSchema.pm index 0e1e5e232..ae8d74a69 100644 --- a/t/lib/DBICTest/BaseSchema.pm +++ b/t/lib/DBICTest/BaseSchema.pm @@ -3,7 +3,6 @@ package #hide from pause use strict; use warnings; - use base qw(DBICTest::Base DBIx::Class::Schema); use Fcntl qw(:DEFAULT :seek :flock); @@ -12,6 +11,92 @@ use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistr use DBICTest::Util 'local_umask'; use namespace::clean; +{ + package # moar hide + DBICTest::SQLTracerObj; + use base 'DBIx::Class::Storage::Statistics'; + + sub query_start { push @{$_[0]{sqlbinds}}, [ ($_[1] =~ /^\s*(\S+)/)[0], [ $_[1], @{ $_[2]||[] } ] ] } + + # who the hell came up with this API >:( + for my $txn (qw(begin rollback commit)) { + no strict 'refs'; + *{"txn_$txn"} = sub { push @{$_[0]{sqlbinds}}, [ uc $txn => [ uc $txn ] ] }; + } + + sub svp_begin { push @{$_[0]{sqlbinds}}, [ SAVEPOINT => [ "SAVEPOINT $_[1]" ] ] } + sub svp_release { push @{$_[0]{sqlbinds}}, [ RELEASE_SAVEPOINT => [ "RELEASE $_[1]" ] ] } + sub svp_rollback { push @{$_[0]{sqlbinds}}, [ ROLLBACK_TO_SAVEPOINT => [ "ROLLBACK TO $_[1]" ] ] } + +} + +sub capture_executed_sql_bind { + my ($self, $cref) = @_; + + $self->throw_exception("Expecting a coderef to run") unless ref $cref eq 'CODE'; + + # hack around stupid, stupid API + no warnings 'redefine'; + local *DBIx::Class::Storage::DBI::_format_for_trace = sub { $_[1] }; + Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO; + + local $self->storage->{debugcb}; + local $self->storage->{debugobj} = my $tracer_obj = DBICTest::SQLTracerObj->new; + local $self->storage->{debug} = 1; + + + $cref->(); + + return $tracer_obj->{sqlbinds} || []; +} + +sub is_executed_sql_bind { + my ($self, $cref, $sqlbinds, $msg) = @_; + + local $Test::Builder::Level = $Test::Builder::Level + 1; + + $self->throw_exception("Expecting an arrayref of SQL/Bind pairs") unless ref $sqlbinds eq 'ARRAY'; + + my @expected = @$sqlbinds; + + my @got = map { $_->[1] } @{ $self->capture_executed_sql_bind($cref) }; + + + return Test::Builder->new->ok(1, $msg || "No queries executed while running $cref") + if !@got and !@expected; + + require SQL::Abstract::Test; + my $ret = 1; + while (@expected or @got) { + my $left = shift @got; + my $right = shift @expected; + + # allow the right side to "simplify" the entire shebang + if ($left and $right) { + $left = [ @$left ]; + for my $i (1..$#$right) { + if ( + ! ref $right->[$i] + and + ref $left->[$i] eq 'ARRAY' + and + @{$left->[$i]} == 2 + ) { + $left->[$i] = $left->[$i][1] + } + } + } + + $ret &= SQL::Abstract::Test::is_same_sql_bind( + \( $left || [] ), + \( $right || [] ), + $msg, + ); + } + + return $ret; +} + our $locker; END { # we need the $locker to be referenced here for delayed destruction diff --git a/t/lib/DBICTest/Stats.pm b/t/lib/DBICTest/Stats.pm deleted file mode 100644 index 5a4544fe5..000000000 --- a/t/lib/DBICTest/Stats.pm +++ /dev/null @@ -1,63 +0,0 @@ -package DBICTest::Stats; -use strict; -use warnings; - -use base qw/DBIx::Class::Storage::Statistics/; - -sub txn_begin { - my $self = shift; - - $self->{'TXN_BEGIN'}++; - return $self->{'TXN_BEGIN'}; -} - -sub txn_rollback { - my $self = shift; - - $self->{'TXN_ROLLBACK'}++; - return $self->{'TXN_ROLLBACK'}; -} - -sub txn_commit { - my $self = shift; - - $self->{'TXN_COMMIT'}++; - return $self->{'TXN_COMMIT'}; -} - -sub svp_begin { - my ($self, $name) = @_; - - $self->{'SVP_BEGIN'}++; - return $self->{'SVP_BEGIN'}; -} - -sub svp_release { - my ($self, $name) = @_; - - $self->{'SVP_RELEASE'}++; - return $self->{'SVP_RELEASE'}; -} - -sub svp_rollback { - my ($self, $name) = @_; - - $self->{'SVP_ROLLBACK'}++; - return $self->{'SVP_ROLLBACK'}; -} - -sub query_start { - my ($self, $string, @bind) = @_; - - $self->{'QUERY_START'}++; - return $self->{'QUERY_START'}; -} - -sub query_end { - my ($self, $string) = @_; - - $self->{'QUERY_END'}++; - return $self->{'QUERY_START'}; -} - -1; diff --git a/t/resultset/update_delete.t b/t/resultset/update_delete.t index 917e12f6d..ee32717dd 100644 --- a/t/resultset/update_delete.t +++ b/t/resultset/update_delete.t @@ -12,16 +12,9 @@ BEGIN { } use DBICTest; -use DBIC::DebugObj; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema; -my ($sql, @bind); -my $debugobj = DBIC::DebugObj->new (\$sql, \@bind); -my $orig_debugobj = $schema->storage->debugobj; -my $orig_debug = $schema->storage->debug; - my $tkfks = $schema->resultset('FourKeys_to_TwoKeys'); my ($fa, $fb, $fc) = $tkfks->related_resultset ('fourkeys')->populate ([ @@ -64,23 +57,16 @@ my $fks = $schema->resultset ('FourKeys')->search ( ); is ($fks->count, 4, 'Joined FourKey count correct (2x2)'); - -$schema->storage->debugobj ($debugobj); -$schema->storage->debug (1); -$fks->update ({ read_count => \ 'read_count + 1' }); -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, +$schema->is_executed_sql_bind( sub { + $fks->update ({ read_count => \ 'read_count + 1' }) +}, [[ 'UPDATE fourkeys SET read_count = read_count + 1 WHERE ( ( ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? ) ) ', - [ ("'1'", "'2'") x 4, "'c'" ], - 'Correct update-SQL with multijoin with pruning', -); + (1, 2) x 4, + 'c', +]], 'Correct update-SQL with multijoin with pruning' ); is ($fa->discard_changes->read_count, 11, 'Update ran only once on discard-join resultset'); is ($fb->discard_changes->read_count, 21, 'Update ran only once on discard-join resultset'); @@ -88,40 +74,44 @@ is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); # make the multi-join stick my $fks_multi = $fks->search({ 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } }); - -$schema->storage->debugobj ($debugobj); -$schema->storage->debug (1); -$fks_multi->update ({ read_count => \ 'read_count + 1' }); -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, - 'UPDATE fourkeys - SET read_count = read_count + 1 - WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )', - [ map { "'$_'" } ( (1) x 4, (2) x 4 ) ], - 'Correct update-SQL with multijoin without pruning', -); +$schema->is_executed_sql_bind( sub { + $fks_multi->update ({ read_count => \ 'read_count + 1' }) +}, [ + [ 'BEGIN' ], + [ + 'SELECT me.foo, me.bar, me.hello, me.goodbye + FROM fourkeys me + LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys + ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello + WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? + GROUP BY me.foo, me.bar, me.hello, me.goodbye + ', + (1, 2) x 2, + 666, + (1, 2) x 2, + 'c', + ], + [ + 'UPDATE fourkeys + SET read_count = read_count + 1 + WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) + ', + ( (1) x 4, (2) x 4 ), + ], + [ 'COMMIT' ], +], 'Correct update-SQL with multijoin without pruning' ); is ($fa->discard_changes->read_count, 12, 'Update ran only once on joined resultset'); is ($fb->discard_changes->read_count, 22, 'Update ran only once on joined resultset'); is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); # try the same sql with forced multicolumn in -$schema->storage->_use_multicolumn_in (1); -$schema->storage->debugobj ($debugobj); -$schema->storage->debug (1); -throws_ok { $fks_multi->update ({ read_count => \ 'read_count + 1' }) } # this can't actually execute, we just need the "as_query" - qr/\QDBI Exception:/ or do { $sql = ''; @bind = () }; -$schema->storage->_use_multicolumn_in (undef); -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, +$schema->is_executed_sql_bind( sub { + local $schema->storage->{_use_multicolumn_in} = 1; + + # this can't actually execute on sqlite + eval { $fks_multi->update ({ read_count => \ 'read_count + 1' }) }; +}, [[ 'UPDATE fourkeys SET read_count = read_count + 1 WHERE ( @@ -137,39 +127,44 @@ is_same_sql_bind ( ) ) ', + ( 1, 2) x 2, + 666, + ( 1, 2) x 2, + 'c', +]], 'Correct update-SQL with multicolumn in support' ); + +$schema->is_executed_sql_bind( sub { + $fks->search({ 'twokeys.artist' => { '!=' => 666 } })->update({ read_count => \ 'read_count + 1' }); +}, [ + [ 'BEGIN' ], [ - ("'1'", "'2'") x 2, - "'666'", - ("'1'", "'2'") x 2, - "'c'", + 'SELECT me.foo, me.bar, me.hello, me.goodbye + FROM fourkeys me + LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys + ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello + LEFT JOIN twokeys twokeys + ON twokeys.artist = fourkeys_to_twokeys.t_artist AND twokeys.cd = fourkeys_to_twokeys.t_cd + WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? AND twokeys.artist != ? + GROUP BY me.foo, me.bar, me.hello, me.goodbye + ', + (1, 2) x 4, + 'c', + 666, ], - 'Correct update-SQL with multicolumn in support', -); - -# make a *premultiplied* join stick -my $fks_premulti = $fks->search({ 'twokeys.artist' => { '!=' => 666 } }); - -$schema->storage->debugobj ($debugobj); -$schema->storage->debug (1); -$fks_premulti->update ({ read_count => \ 'read_count + 1' }); -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, - 'UPDATE fourkeys - SET read_count = read_count + 1 - WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )', - [ map { "'$_'" } ( (1) x 4, (2) x 4 ) ], - 'Correct update-SQL with premultiplied restricting join without pruning', -); + [ + 'UPDATE fourkeys + SET read_count = read_count + 1 + WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) + ', + ( (1) x 4, (2) x 4 ), + ], + [ 'COMMIT' ], +], 'Correct update-SQL with premultiplied restricting join without pruning' ); is ($fa->discard_changes->read_count, 13, 'Update ran only once on joined resultset'); is ($fb->discard_changes->read_count, 23, 'Update ran only once on joined resultset'); is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); - # # Make sure multicolumn in or the equivalent functions correctly # @@ -253,43 +248,34 @@ is ($tkfks->count, $tkfk_cnt -= 1, 'Only one row deleted'); # check with sql-equality, as sqlite will accept most bad sql just fine -$schema->storage->debugobj ($debugobj); -$schema->storage->debug (1); - { my $rs = $schema->resultset('CD')->search( { 'me.year' => { '!=' => 2010 } }, ); - $rs->search({}, { join => 'liner_notes' })->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $rs->search({}, { join => 'liner_notes' })->delete; + }, [[ 'DELETE FROM cd WHERE ( year != ? )', - ["'2010'"], - 'Non-restricting multijoins properly thrown out' - ); + 2010, + ]], 'Non-restricting multijoins properly thrown out' ); - $rs->search({}, { prefetch => 'liner_notes' })->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $rs->search({}, { prefetch => 'liner_notes' })->delete; + }, [[ 'DELETE FROM cd WHERE ( year != ? )', - ["'2010'"], - 'Non-restricting multiprefetch thrown out' - ); + 2010, + ]], 'Non-restricting multiprefetch thrown out' ); - $rs->search({}, { prefetch => 'artist' })->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $rs->search({}, { prefetch => 'artist' })->delete; + }, [[ 'DELETE FROM cd WHERE ( cdid IN ( SELECT me.cdid FROM cd me JOIN artist artist ON artist.artistid = me.artist WHERE ( me.year != ? ) ) )', - ["'2010'"], - 'Restricting prefetch left in, selector thrown out' - ); + 2010, + ]], 'Restricting prefetch left in, selector thrown out'); - # switch artist and cd to fully qualified table names - # make sure nothing is stripped out +### switch artist and cd to fully qualified table names +### make sure nothing is stripped out my $cd_rsrc = $schema->source('CD'); $cd_rsrc->name('main.cd'); $cd_rsrc->relationship_info($_)->{attrs}{cascade_delete} = 0 @@ -300,85 +286,80 @@ $schema->storage->debug (1); $art_rsrc->relationship_info($_)->{attrs}{cascade_delete} = 0 for $art_rsrc->relationships; - $rs->delete; - is_same_sql_bind ( - $sql, - \@bind, - 'DELETE FROM main.cd WHERE ( year != ? )', - ["'2010'"], - 'delete with fully qualified table name' - ); + $schema->is_executed_sql_bind( sub { + $rs->delete + }, [[ + 'DELETE FROM main.cd WHERE year != ?', + 2010, + ]], 'delete with fully qualified table name' ); $rs->create({ title => 'foo', artist => 1, year => 2000 }); - $rs->delete_all; - is_same_sql_bind ( - $sql, - \@bind, - 'DELETE FROM main.cd WHERE ( cdid = ? )', - ["'1'"], - 'delete_all with fully qualified table name' - ); - - $rs->create({ cdid => 42, title => 'foo', artist => 2, year => 2000 }); - $rs->find(42)->delete; - is_same_sql_bind ( - $sql, - \@bind, - 'DELETE FROM main.cd WHERE ( cdid = ? )', - ["'42'"], - 'delete of object from table with fully qualified name' - ); + $schema->is_executed_sql_bind( sub { + $rs->delete_all + }, [ + [ 'BEGIN' ], + [ + 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM main.cd me WHERE me.year != ?', + 2010, + ], + [ + 'DELETE FROM main.cd WHERE ( cdid = ? )', + 1, + ], + [ 'COMMIT' ], + ], 'delete_all with fully qualified table name' ); $rs->create({ cdid => 42, title => 'foo', artist => 2, year => 2000 }); - $rs->find(42)->related_resultset('artist')->delete; - is_same_sql_bind ( - $sql, - \@bind, + my $cd42 = $rs->find(42); + + $schema->is_executed_sql_bind( sub { + $cd42->delete + }, [[ + 'DELETE FROM main.cd WHERE cdid = ?', + 42, + ]], 'delete of object from table with fully qualified name' ); + + $schema->is_executed_sql_bind( sub { + $cd42->related_resultset('artist')->delete + }, [[ 'DELETE FROM main.artist WHERE ( artistid IN ( SELECT me.artistid FROM main.artist me WHERE ( me.artistid = ? ) ) )', - ["'2'"], - 'delete of related object from scalarref fully qualified named table', - ); + 2, + ]], 'delete of related object from scalarref fully qualified named table' ); + + my $art3 = $schema->resultset('Artist')->find(3); - $schema->resultset('Artist')->find(3)->related_resultset('cds')->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $art3->related_resultset('cds')->delete; + }, [[ 'DELETE FROM main.cd WHERE ( artist = ? )', - ["'3'"], - 'delete of related object from fully qualified named table', - ); + 3, + ]], 'delete of related object from fully qualified named table' ); - $schema->resultset('Artist')->find(3)->cds_unordered->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $art3->cds_unordered->delete; + }, [[ 'DELETE FROM main.cd WHERE ( artist = ? )', - ["'3'"], - 'delete of related object from fully qualified named table via relaccessor', - ); + 3, + ]], 'delete of related object from fully qualified named table via relaccessor' ); - $rs->search({}, { prefetch => 'artist' })->delete; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $rs->search({}, { prefetch => 'artist' })->delete; + }, [[ 'DELETE FROM main.cd WHERE ( cdid IN ( SELECT me.cdid FROM main.cd me JOIN main.artist artist ON artist.artistid = me.artist WHERE ( me.year != ? ) ) )', - ["'2010'"], - 'delete with fully qualified table name and subquery correct' - ); + 2010, + ]], 'delete with fully qualified table name and subquery correct' ); # check that as_subselect_rs works ok # inner query is untouched, then a selector # and an IN condition - $schema->resultset('CD')->search({ - 'me.cdid' => 1, - 'artist.name' => 'partytimecity', - }, { - join => 'artist', - })->as_subselect_rs->delete; - - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + $schema->resultset('CD')->search({ + 'me.cdid' => 1, + 'artist.name' => 'partytimecity', + }, { + join => 'artist', + })->as_subselect_rs->delete; + }, [[ ' DELETE FROM main.cd WHERE ( @@ -393,12 +374,9 @@ $schema->storage->debug (1); ) ) ', - ["'partytimecity'", "'1'"], - 'Delete from as_subselect_rs works correctly' - ); + 'partytimecity', + 1, + ]], 'Delete from as_subselect_rs works correctly' ); } -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - done_testing; diff --git a/t/row/find_one_has_many.t b/t/row/find_one_has_many.t index 5e1e953af..ea7767f73 100644 --- a/t/row/find_one_has_many.t +++ b/t/row/find_one_has_many.t @@ -4,8 +4,6 @@ use warnings; use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); @@ -15,20 +13,20 @@ $schema->resultset('CD')->delete; my $artist = $schema->resultset("Artist")->create({ artistid => 21, name => 'Michael Jackson', rank => 20 }); my $cd = $artist->create_related('cds', { year => 1975, title => 'Compilation from 1975' }); -my ($sql, @bind); -local $schema->storage->{debug} = 1; -local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind); - -my $find_cd = $artist->find_related('cds',{title => 'Compilation from 1975'}); - -s/^'//, s/'\z// for @bind; # why does DBIC::DebugObj not do this? - -is_same_sql_bind ( - $sql, - \@bind, - 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( me.artist = ? AND me.title = ? ) ) ORDER BY year ASC', - [21, 'Compilation from 1975'], - 'find_related only uses foreign key condition once', -); +$schema->is_executed_sql_bind(sub { + my $find_cd = $artist->find_related('cds',{title => 'Compilation from 1975'}); +}, [ + [ + ' SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track + FROM cd me + WHERE me.artist = ? AND me.title = ? + ORDER BY year ASC + ', + [ { dbic_colname => "me.artist", sqlt_datatype => "integer" } + => 21 ], + [ { dbic_colname => "me.title", sqlt_datatype => "varchar", sqlt_size => 100 } + => "Compilation from 1975" ], + ] +], 'find_related only uses foreign key condition once' ); done_testing; diff --git a/t/search/preserve_original_rs.t b/t/search/preserve_original_rs.t index cb9a30624..04dc9a870 100644 --- a/t/search/preserve_original_rs.t +++ b/t/search/preserve_original_rs.t @@ -7,9 +7,8 @@ use Test::Exception; use lib qw(t/lib); use DBICTest; use DBIC::SqlMakerTest; -use DBIC::DebugObj; -use Storable qw/dclone/; +use Storable 'dclone'; my $schema = DBICTest->init_schema(); diff --git a/t/sqlmaker/mysql.t b/t/sqlmaker/mysql.t index b5ce8a59a..5b3f33039 100644 --- a/t/sqlmaker/mysql.t +++ b/t/sqlmaker/mysql.t @@ -7,7 +7,6 @@ use lib qw(t/lib); use DBICTest; use DBICTest::Schema; use DBIC::SqlMakerTest; -use DBIC::DebugObj; my $schema = DBICTest::Schema->connect (DBICTest->_database, { quote_char => '`' }); # cheat @@ -17,54 +16,39 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); # check that double-subqueries are properly wrapped { - my ($sql, @bind); - my $debugobj = DBIC::DebugObj->new (\$sql, \@bind); - my $orig_debugobj = $schema->storage->debugobj; - my $orig_debug = $schema->storage->debug; - - $schema->storage->debugobj ($debugobj); - $schema->storage->debug (1); - # the expected SQL may seem wastefully nonsensical - this is due to # CD's tablename being \'cd', which triggers the "this can be anything" # mode, and forces a subquery. This in turn forces *another* subquery # because mysql is being mysql # Also we know it will fail - never deployed. All we care about is the - # SQL to compare - eval { $schema->resultset ('CD')->update({ genreid => undef }) }; - is_same_sql_bind ( - $sql, - \@bind, + # SQL to compare, hence the eval + $schema->is_executed_sql_bind( sub { + eval { $schema->resultset ('CD')->update({ genreid => undef }) } + },[[ 'UPDATE cd SET `genreid` = ? WHERE `cdid` IN ( SELECT * FROM ( SELECT `me`.`cdid` FROM cd `me` ) `_forced_double_subquery` )', - [ 'NULL' ], - 'Correct update-SQL with double-wrapped subquery', - ); + [ { dbic_colname => "genreid", sqlt_datatype => "integer" } => undef ], + ]], 'Correct update-SQL with double-wrapped subquery' ); # same comment as above - eval { $schema->resultset ('CD')->delete }; - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + eval { $schema->resultset ('CD')->delete } + }, [[ 'DELETE FROM cd WHERE `cdid` IN ( SELECT * FROM ( SELECT `me`.`cdid` FROM cd `me` ) `_forced_double_subquery` )', - [], - 'Correct delete-SQL with double-wrapped subquery', - ); + ]], 'Correct delete-SQL with double-wrapped subquery' ); # and a couple of really contrived examples (we test them live in t/71mysql.t) my $rs = $schema->resultset('Artist')->search({ name => { -like => 'baby_%' } }); my ($count_sql, @count_bind) = @${$rs->count_rs->as_query}; - eval { - $schema->resultset('Artist')->search( - { artistid => { - -in => $rs->get_column('artistid') - ->as_query - } }, - )->update({ name => \[ "CONCAT( `name`, '_bell_out_of_', $count_sql )", @count_bind ] }); - }; - - is_same_sql_bind ( - $sql, - \@bind, + $schema->is_executed_sql_bind( sub { + eval { + $schema->resultset('Artist')->search( + { artistid => { + -in => $rs->get_column('artistid') + ->as_query + } }, + )->update({ name => \[ "CONCAT( `name`, '_bell_out_of_', $count_sql )", @count_bind ] }); + } + }, [[ q( UPDATE `artist` SET `name` = CONCAT(`name`, '_bell_out_of_', ( @@ -84,18 +68,18 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); WHERE `name` LIKE ? ) `_forced_double_subquery` ) ), - [ ("'baby_%'") x 2 ], - ); - - eval { - $schema->resultset('CD')->search_related('artist', - { 'artist.name' => { -like => 'baby_with_%' } } - )->delete - }; - - is_same_sql_bind ( - $sql, - \@bind, + ( [ { dbic_colname => "name", sqlt_datatype => "varchar", sqlt_size => 100 } + => 'baby_%' ] + ) x 2 + ]]); + + $schema->is_executed_sql_bind( sub { + eval { + $schema->resultset('CD')->search_related('artist', + { 'artist.name' => { -like => 'baby_with_%' } } + )->delete + } + }, [[ q( DELETE FROM `artist` WHERE `artistid` IN ( @@ -109,11 +93,9 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' ); ) `_forced_double_subquery` ) ), - [ "'baby_with_%'" ], - ); - - $schema->storage->debugobj ($orig_debugobj); - $schema->storage->debug ($orig_debug); + [ { dbic_colname => "artist.name", sqlt_datatype => "varchar", sqlt_size => 100 } + => 'baby_with_%' ], + ]] ); } # Test support for straight joins diff --git a/t/sqlmaker/quotes.t b/t/sqlmaker/quotes.t index 84f2a3f97..d3a8c8f93 100644 --- a/t/sqlmaker/quotes.t +++ b/t/sqlmaker/quotes.t @@ -6,27 +6,29 @@ use Test::More; use lib qw(t/lib); use DBICTest; use DBIC::SqlMakerTest; -use DBIC::DebugObj; -my $schema = DBICTest->init_schema(); +my $schema = DBICTest->init_schema( no_deploy => 1 ); $schema->connection( @{ $schema->storage->_dbi_connect_info }, { AutoCommit => 1, quote_char => [qw/[ ]/] } ); -my ($sql, @bind); -$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)); -$schema->storage->debug(1); +my $rs = $schema->resultset('CD')->search( + { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, + { join => 'artist' } +)->count_rs; + +my $expected_bind = [ + [ { dbic_colname => "artist.name", sqlt_datatype => "varchar", sqlt_size => 100 } + => 'Caterwauler McCrae' ], + [ { dbic_colname => "me.year", sqlt_datatype => "varchar", sqlt_size => 100 } + => 2001 ], +]; -my $rs = $schema->resultset('CD')->search( - { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, - { join => 'artist' }); -my $expected_bind = ["'Caterwauler McCrae'", "'2001'"]; -eval { $rs->count }; is_same_sql_bind( - $sql, \@bind, - "SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", + $rs->as_query, + "(SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON [artist].[artistid] = [me].[artist] WHERE ( [artist].[name] = ? AND [me].[year] = ? ))", $expected_bind, 'got correct SQL for count query with bracket quoting' ); @@ -34,27 +36,32 @@ is_same_sql_bind( $schema->storage->sql_maker->quote_char('`'); $schema->storage->sql_maker->name_sep('.'); -eval { $rs->count }; -is_same_sql_bind( - $sql, \@bind, - "SELECT COUNT( * ) FROM cd `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", +is_same_sql_bind ( + $rs->as_query, + "(SELECT COUNT( * ) FROM cd `me` JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? ))", $expected_bind, - 'got correct SQL for count query with quoting' + 'got correct SQL for count query with mysql quoting' ); -my $order = 'year DESC'; -$rs = $schema->resultset('CD')->search({}, - { 'order_by' => $order }); -eval { $rs->first }; -like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)'); +# !!! talk to ribasushi *explicitly* before modfying these tests !!! +{ + is_same_sql_bind( + $schema->resultset('CD')->search({}, { order_by => 'year DESC', columns => 'cdid' })->as_query, + '(SELECT `me`.`cdid` FROM cd `me` ORDER BY `year DESC`)', + [], + 'quoted ORDER BY with DESC (should use a scalarref anyway)' + ); -$rs = $schema->resultset('CD')->search({}, - { 'order_by' => \$order }); -eval { $rs->first }; -like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref'); + is_same_sql_bind( + $schema->resultset('CD')->search({}, { order_by => \'year DESC', columns => 'cdid' })->as_query, + '(SELECT `me`.`cdid` FROM cd `me` ORDER BY year DESC)', + [], + 'did not quote ORDER BY with scalarref', + ); +} -is( - $schema->storage->sql_maker->update('group', { name => 'Bill', order => 12 }), +is_same_sql( + scalar $schema->storage->sql_maker->update('group', { order => 12, name => 'Bill' }), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE' ); diff --git a/t/storage/debug.t b/t/storage/debug.t index 6d8e94cf8..514b43bd7 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -6,7 +6,6 @@ use Test::More; use Test::Exception; use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj; use DBIC::SqlMakerTest; use Path::Class qw/file/; @@ -19,6 +18,7 @@ unlink $lfn or die $! if -e $lfn; # make sure we are testing the vanilla debugger and not ::PrettyPrint +require DBIx::Class::Storage::Statistics; $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new); ok ( $schema->storage->debug(1), 'debug' ); @@ -61,25 +61,45 @@ dies_ok { open(STDERR, '>&STDERRCOPY'); -# test trace output correctness for bind params +# test debugcb and debugobj protocol { - my ($sql, @bind); - $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)); - - my @cds = $schema->resultset('CD')->search( { artist => 1, cdid => { -between => [ 1, 3 ] }, } ); - is_same_sql_bind( - $sql, \@bind, - "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) )", - [qw/'1' '1' '3'/], - 'got correct SQL with all bind parameters (debugcb)' - ); - - @cds = $schema->resultset('CD')->search( { artist => 1, cdid => { -between => [ 1, 3 ] }, } ); - is_same_sql_bind( - $sql, \@bind, - "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) )", ["'1'", "'1'", "'3'"], - 'got correct SQL with all bind parameters (debugobj)' - ); + my $rs = $schema->resultset('CD')->search( { + artist => 1, + cdid => { -between => [ 1, 3 ] }, + title => { '!=' => \[ '?', undef ] } + }); + + my $sql_trace = 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( artist = ? AND ( cdid BETWEEN ? AND ? ) AND title != ? ) )'; + my @bind_trace = qw( '1' '1' '3' NULL ); # quotes are in fact part of the trace + + + my @args; + $schema->storage->debugcb(sub { push @args, @_ } ); + + $rs->all; + + is_deeply( \@args, [ + "SELECT", + sprintf( "%s: %s\n", $sql_trace, join ', ', @bind_trace ), + ]); + + { + package DBICTest::DebugObj; + our @ISA = 'DBIx::Class::Storage::Statistics'; + + sub query_start { + my $self = shift; + ( $self->{_traced_sql}, @{$self->{_traced_bind}} ) = @_; + } + } + + my $do = $schema->storage->debugobj(DBICTest::DebugObj->new); + + $rs->all; + + is( $do->{_traced_sql}, $sql_trace ); + + is_deeply ( $do->{_traced_bind}, \@bind_trace ); } done_testing; diff --git a/t/storage/nobindvars.t b/t/storage/nobindvars.t index d2dd8401f..b22975638 100644 --- a/t/storage/nobindvars.t +++ b/t/storage/nobindvars.t @@ -4,17 +4,14 @@ use warnings; use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::DebugObj; -use DBIC::SqlMakerTest; -use DBI::Const::GetInfoType; { # Fake storage driver for SQLite + no bind variables package DBICTest::SQLite::NoBindVars; - use Class::C3; - use base qw/ - DBIx::Class::Storage::DBI::NoBindVars - DBIx::Class::Storage::DBI::SQLite - /; + use base qw( + DBIx::Class::Storage::DBI::NoBindVars + DBIx::Class::Storage::DBI::SQLite + ); + use mro 'c3'; } my $schema = DBICTest->init_schema (storage_type => 'DBICTest::SQLite::NoBindVars', no_populate => 1); @@ -35,26 +32,13 @@ my $it = $schema->resultset('Artist')->search( {}, is( $it->count, 3, "LIMIT count ok" ); # ask for 3 rows out of 7 artists -my ($sql, @bind); -my $orig_debugobj = $schema->storage->debugobj; -my $orig_debug = $schema->storage->debug; -$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) ); -$schema->storage->debug (1); - -is( $it->next->name, "Artist 2", "iterator->next ok" ); -$it->next; -$it->next; -is( $it->next, undef, "next past end of resultset ok" ); - -$schema->storage->debugobj ($orig_debugobj); -$schema->storage->debug ($orig_debug); - -is_same_sql_bind ( - $sql, - \@bind, - 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me ORDER BY artistid LIMIT 3 OFFSET 2', - [], - 'Correctly interpolated SQL' -); +$schema->is_executed_sql_bind( sub { + is( $it->next->name, "Artist 2", "iterator->next ok" ); + $it->next; + $it->next; + is( $it->next, undef, "next past end of resultset ok" ); +}, [ + [ 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me ORDER BY artistid LIMIT 3 OFFSET 2' ], +], 'Correctly interpolated SQL' ); done_testing; diff --git a/t/storage/savepoints.t b/t/storage/savepoints.t index fab7036cb..0c56afc90 100644 --- a/t/storage/savepoints.t +++ b/t/storage/savepoints.t @@ -3,17 +3,30 @@ use warnings; use Test::More; use Test::Exception; -use DBIx::Class::Optional::Dependencies (); + +use lib qw(t/lib); +use DBICTest; + +{ + package # moar hide + DBICTest::SVPTracerObj; + + use base 'DBIx::Class::Storage::Statistics'; + + sub query_start { 'do notning'} + sub callback { 'dummy '} + + for my $svpcall (map { "svp_$_" } qw(begin rollback release)) { + no strict 'refs'; + *$svpcall = sub { $_[0]{uc $svpcall}++ }; + } +} my $env2optdep = { DBICTEST_PG => 'test_rdbms_pg', DBICTEST_MYSQL => 'test_rdbms_mysql', }; -use lib qw(t/lib); -use DBICTest; -use DBICTest::Stats; - my $schema; for ('', keys %$env2optdep) { SKIP: { @@ -56,9 +69,8 @@ for ('', keys %$env2optdep) { SKIP: { note "Testing $prefix"; - my $stats = DBICTest::Stats->new; - $schema->storage->debugobj($stats); - $schema->storage->debug(1); + local $schema->storage->{debugobj} = my $stats = DBICTest::SVPTracerObj->new; + local $schema->storage->{debug} = 1; $schema->resultset('Artist')->create({ name => 'foo' }); From 49eeb48de3d8ff685926b595fa0f3f5e680eaee2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 28 May 2014 05:04:27 +0200 Subject: [PATCH 032/548] Switch most remaining debug-hooks to $dbictest_schema->is_executed_querycount() --- t/71mysql.t | 26 ++-- t/72pg.t | 2 +- t/73oracle_blob.t | 11 +- t/746mssql.t | 21 +-- t/747mssql_ado.t | 6 +- t/76joins.t | 4 +- t/83cache.t | 136 +++++++----------- t/84serialize.t | 20 +-- t/86might_have.t | 38 ++--- t/lib/DBICTest/BaseSchema.pm | 25 +++- t/prefetch/correlated.t | 34 ++--- t/prefetch/count.t | 30 ++-- t/prefetch/false_colvalues.t | 39 ++---- t/prefetch/grouped.t | 61 +++----- t/prefetch/manual.t | 18 +-- t/prefetch/multiple_hasmany.t | 81 +++++------ t/prefetch/one_to_many_to_one.t | 16 +-- t/prefetch/refined_search_on_relation.t | 12 +- t/prefetch/standard.t | 178 ++++++++++-------------- t/prefetch/via_search_related.t | 57 +++----- t/relationship/core.t | 16 +-- t/relationship/update_or_create_multi.t | 47 ++++--- t/resultset/as_subselect_rs.t | 3 - 23 files changed, 342 insertions(+), 539 deletions(-) diff --git a/t/71mysql.t b/t/71mysql.t index e1e68ee83..52d097dba 100644 --- a/t/71mysql.t +++ b/t/71mysql.t @@ -285,15 +285,9 @@ NULLINSEARCH: { is ($rs->count, 10, '10 artists present'); - my $orig_debug = $schema->storage->debug; - $schema->storage->debug(1); - my $query_count; - $schema->storage->debugcb(sub { $query_count++ }); - - $query_count = 0; - $complex_rs->delete; - - is ($query_count, 1, 'One delete query fired'); + $schema->is_executed_querycount( sub { + $complex_rs->delete; + }, 1, 'One delete query fired' ); is ($rs->count, 0, '10 Artists correctly deleted'); $rs->create({ @@ -302,15 +296,13 @@ NULLINSEARCH: { }); is ($rs->count, 1, 'Artist with cd created'); - $query_count = 0; - $schema->resultset('CD')->search_related('artist', - { 'artist.name' => { -like => 'baby_with_%' } } - )->delete; - is ($query_count, 1, 'And one more delete query fired'); - is ($rs->count, 0, 'Artist with cd deleted'); - $schema->storage->debugcb(undef); - $schema->storage->debug($orig_debug); + $schema->is_executed_querycount( sub { + $schema->resultset('CD')->search_related('artist', + { 'artist.name' => { -like => 'baby_with_%' } } + )->delete; + }, 1, 'And one more delete query fired'); + is ($rs->count, 0, 'Artist with cd deleted'); } ZEROINSEARCH: { diff --git a/t/72pg.t b/t/72pg.t index 44b723cd1..6e1ca7dbf 100644 --- a/t/72pg.t +++ b/t/72pg.t @@ -248,7 +248,7 @@ for my $use_insert_returning ($test_server_supports_insert_returning lives_ok { is_deeply ( $arr_rs->search({ arrayfield => { '=' => { -value => [3,4] }} })->first->arrayfield, - [3,4],, + [3,4], 'Array value matches explicit equal' ); } 'searching by arrayref (explicit equal sign)'; diff --git a/t/73oracle_blob.t b/t/73oracle_blob.t index ae5a359ce..c2fa413e9 100644 --- a/t/73oracle_blob.t +++ b/t/73oracle_blob.t @@ -78,17 +78,12 @@ SKIP: { . ': https://rt.cpan.org/Ticket/Display.html?id=64206' if $q; - # so we can disable BLOB mega-output - my $orig_debug = $schema->storage->debug; - my $id; foreach my $size (qw( small large )) { $id++; - local $schema->storage->{debug} = $size eq 'large' - ? 0 - : $orig_debug - ; + local $schema->storage->{debug} = 0 + if $size eq 'large'; my $str = $binstr{$size}; lives_ok { @@ -154,8 +149,6 @@ SKIP: { @objs = $rs->search({ blob => "re-updated blob", clob => 're-updated clob' })->all; is @objs, 0, 'row deleted successfully'; } - - $schema->storage->debug ($orig_debug); } do_clean ($dbh); diff --git a/t/746mssql.t b/t/746mssql.t index 2cc028198..5e062f606 100644 --- a/t/746mssql.t +++ b/t/746mssql.t @@ -331,20 +331,13 @@ SQL is ($limited_rs->count, 6, "$test_type: Correct count of limited right-sorted joined resultset"); is ($limited_rs->count_rs->next, 6, "$test_type: Correct count_rs of limited right-sorted joined resultset"); - my $queries; - my $orig_debug = $schema->storage->debug; - $schema->storage->debugcb(sub { $queries++; }); - $schema->storage->debug(1); - - is_deeply ( - [map { $_->owner->name } ($limited_rs->all) ], - [@owner_names[2 .. 7]], - "$test_type: Prefetch-limited rows were properly ordered" - ); - is ($queries, 1, "$test_type: Only one query with prefetch"); - - $schema->storage->debugcb(undef); - $schema->storage->debug($orig_debug); + $schema->is_executed_querycount( sub { + is_deeply ( + [map { $_->owner->name } ($limited_rs->all) ], + [@owner_names[2 .. 7]], + "$test_type: Prefetch-limited rows were properly ordered" + ); + }, 1, "$test_type: Only one query with prefetch" ); is_deeply ( [map { $_->name } ($limited_rs->search_related ('owner')->all) ], diff --git a/t/747mssql_ado.t b/t/747mssql_ado.t index 77a88dc4f..3fd7af68e 100644 --- a/t/747mssql_ado.t +++ b/t/747mssql_ado.t @@ -223,9 +223,7 @@ is $row->artistid, $current_artistid+1, my $rs = $schema->resultset('VaryingMAX'); foreach my $size (qw/small large/) { - my $orig_debug = $schema->storage->debug; - - $schema->storage->debug(0) if $size eq 'large'; + local $schema->storage->{debug} = 0 if $size eq 'large'; my $str = $binstr{$size}; my $row; @@ -242,8 +240,6 @@ foreach my $size (qw/small large/) { cmp_ok try { $row->varchar_max }, 'eq', $str, 'VARCHAR(MAX) matches'; cmp_ok try { $row->nvarchar_max }, 'eq', $str, 'NVARCHAR(MAX) matches'; cmp_ok try { $row->varbinary_max }, 'eq', $str, 'VARBINARY(MAX) matches'; - - $schema->storage->debug($orig_debug); } # test regular blobs diff --git a/t/76joins.t b/t/76joins.t index 0fd511fd4..934e387b0 100644 --- a/t/76joins.t +++ b/t/76joins.t @@ -8,10 +8,8 @@ use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); -my $orig_debug = $schema->storage->debug; - # test the abstract join => SQL generator -my $sa = new DBIx::Class::SQLMaker; +my $sa = DBIx::Class::SQLMaker->new; my @j = ( { child => 'person' }, diff --git a/t/83cache.t b/t/83cache.t index 9edfe7137..5812083c2 100644 --- a/t/83cache.t +++ b/t/83cache.t @@ -7,12 +7,6 @@ use DBICTest; my $schema = DBICTest->init_schema(); -my $queries; -my $debugcb = sub{ $queries++ }; -my $sdebug = $schema->storage->debug; - -plan tests => 23; - my $rs = $schema->resultset("Artist")->search( { artistid => 1 } ); @@ -43,18 +37,12 @@ my $cd = $schema->resultset('CD')->find(1); $rs->clear_cache; -$queries = 0; -$schema->storage->debug(1); -$schema->storage->debugcb ($debugcb); - -$rs = $schema->resultset('Artist')->search( undef, { cache => 1 } ); -while( $artist = $rs->next ) {} -$artist = $rs->first(); - -is( $queries, 1, 'revisiting a row does not issue a query when cache => 1' ); +$schema->is_executed_querycount( sub { -$schema->storage->debug($sdebug); -$schema->storage->debugcb (undef); + $rs = $schema->resultset('Artist')->search( undef, { cache => 1 } ); + while( $artist = $rs->next ) {} + $artist = $rs->first(); +}, 1, 'revisiting a row does not issue a query when cache => 1' ); my @a = $schema->resultset("Artist")->search( { }, @@ -74,33 +62,28 @@ $rs = $schema->resultset("Artist")->search( } ); -# start test for prefetch SELECT count -$queries = 0; -$schema->storage->debug(1); -$schema->storage->debugcb ($debugcb); - -$artist = $rs->first; -$rs->reset(); +# prefetch SELECT count +$schema->is_executed_querycount( sub { + $artist = $rs->first; + $rs->reset(); -# make sure artist contains a related resultset for cds -isa_ok( $artist->{related_resultsets}{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' ); + # make sure artist contains a related resultset for cds + isa_ok( $artist->{related_resultsets}{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' ); -# check if $artist->cds->get_cache is populated -is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records'); + # check if $artist->cds->get_cache is populated + is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records'); -# ensure that $artist->cds returns correct number of objects -is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' ); + # ensure that $artist->cds returns correct number of objects + is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' ); -# ensure that $artist->cds->count returns correct value -is( $artist->cds->count, 3, 'artist->cds->count returns correct value' ); + # ensure that $artist->cds->count returns correct value + is( $artist->cds->count, 3, 'artist->cds->count returns correct value' ); -# ensure that $artist->count_related('cds') returns correct value -is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' ); + # ensure that $artist->count_related('cds') returns correct value + is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' ); -is($queries, 1, 'only one SQL statement executed'); +}, 1, 'only one SQL statement executed'); -$schema->storage->debug($sdebug); -$schema->storage->debugcb (undef); # make sure related_resultset is deleted after object is updated $artist->set_column('name', 'New Name'); @@ -131,57 +114,44 @@ is($artist->cds, 0, 'No cds for this artist'); } # SELECT count for nested has_many prefetch -$queries = 0; -$schema->storage->debug(1); -$schema->storage->debugcb ($debugcb); - -$artist = ($rs->all)[0]; - -is($queries, 1, 'only one SQL statement executed'); - -$queries = 0; - -my @objs; -my $cds = $artist->cds; -my $tags = $cds->next->tags; -while( my $tag = $tags->next ) { - push @objs, $tag->tagid; #warn "tag:", $tag->ID, " => ", $tag->tag; -} - -is_deeply( \@objs, [ 3 ], 'first cd has correct tags' ); - -$tags = $cds->next->tags; -@objs = (); -while( my $tag = $tags->next ) { - push @objs, $tag->id; #warn "tag: ", $tag->ID; -} - -is_deeply( [ sort @objs] , [ 2, 5, 8 ], 'third cd has correct tags' ); - -$tags = $cds->next->tags; -@objs = (); -while( my $tag = $tags->next ) { - push @objs, $tag->id; #warn "tag: ", $tag->ID; -} - -is_deeply( \@objs, [ 1 ], 'second cd has correct tags' ); +$schema->is_executed_querycount( sub { + $artist = ($rs->all)[0]; +}, 1, 'only one SQL statement executed'); + +$schema->is_executed_querycount( sub { + my @objs; + my $cds = $artist->cds; + my $tags = $cds->next->tags; + while( my $tag = $tags->next ) { + push @objs, $tag->tagid; #warn "tag:", $tag->ID, " => ", $tag->tag; + } -is( $queries, 0, 'no additional SQL statements while checking nested data' ); + is_deeply( \@objs, [ 3 ], 'first cd has correct tags' ); -# start test for prefetch SELECT count -$queries = 0; + $tags = $cds->next->tags; + @objs = (); + while( my $tag = $tags->next ) { + push @objs, $tag->id; #warn "tag: ", $tag->ID; + } -$artist = $schema->resultset('Artist')->find(1, { prefetch => [qw/cds/] }); + is_deeply( [ sort @objs] , [ 2, 5, 8 ], 'third cd has correct tags' ); -is( $queries, 1, 'only one select statement on find with inline has_many prefetch' ); + $tags = $cds->next->tags; + @objs = (); + while( my $tag = $tags->next ) { + push @objs, $tag->id; #warn "tag: ", $tag->ID; + } -# start test for prefetch SELECT count -$queries = 0; + is_deeply( \@objs, [ 1 ], 'second cd has correct tags' ); +}, 0, 'no additional SQL statements while checking nested data' ); -$rs = $schema->resultset('Artist')->search(undef, { prefetch => [qw/cds/] }); -$artist = $rs->find(1); +$schema->is_executed_querycount( sub { + $artist = $schema->resultset('Artist')->find(1, { prefetch => [qw/cds/] }); +}, 1, 'only one select statement on find with inline has_many prefetch' ); -is( $queries, 1, 'only one select statement on find with has_many prefetch on resultset' ); +$schema->is_executed_querycount( sub { + $rs = $schema->resultset('Artist')->search(undef, { prefetch => [qw/cds/] }); + $artist = $rs->find(1); +}, 1, 'only one select statement on find with has_many prefetch on resultset' ); -$schema->storage->debug($sdebug); -$schema->storage->debugcb (undef); +done_testing; diff --git a/t/84serialize.t b/t/84serialize.t index ffe03687f..ffa63fa0f 100644 --- a/t/84serialize.t +++ b/t/84serialize.t @@ -138,17 +138,12 @@ for my $name (keys %stores) { # Test resultsource with cached rows - my $query_count; - $cd_rs = $cd_rs->search ({}, { cache => 1 }); + $schema->is_executed_querycount( sub { + $cd_rs = $cd_rs->search ({}, { cache => 1 }); - my $orig_debug = $schema->storage->debug; - $schema->storage->debug(1); - $schema->storage->debugcb(sub { $query_count++ } ); + # this will hit the database once and prime the cache + my @cds = $cd_rs->all; - # this will hit the database once and prime the cache - my @cds = $cd_rs->all; - - lives_ok { $copy = $store->($cd_rs); ref_ne($copy, $cd_rs, 'Cached resultset cloned'); is_deeply ( @@ -158,12 +153,7 @@ for my $name (keys %stores) { ); is ($copy->count, $cd_rs->count, 'Cached count identical'); - } "serialize cached resultset lives: $name"; - - is ($query_count, 1, 'Only one db query fired'); - - $schema->storage->debug($orig_debug); - $schema->storage->debugcb(undef); + }, 1, 'Only one db query fired'); } # test schema-less detached thaw diff --git a/t/86might_have.t b/t/86might_have.t index 0ca9a06f1..05ba5390d 100644 --- a/t/86might_have.t +++ b/t/86might_have.t @@ -8,36 +8,27 @@ use DBICTest; my $schema = DBICTest->init_schema(); -my $queries; -$schema->storage->debugcb( sub{ $queries++ } ); -my $sdebug = $schema->storage->debug; - my $cd = $schema->resultset("CD")->find(1); $cd->title('test'); -# SELECT count -$queries = 0; -$schema->storage->debug(1); - -$cd->update; - -is($queries, 1, 'liner_notes (might_have) not prefetched - do not load -liner_notes on update'); - -$schema->storage->debug($sdebug); - +$schema->is_executed_querycount( sub { + $cd->update; +}, { + BEGIN => 1, + UPDATE => 1, + COMMIT => 1, +}, 'liner_notes (might_have) not prefetched - do not load liner_notes on update' ); my $cd2 = $schema->resultset("CD")->find(2, {prefetch => 'liner_notes'}); $cd2->title('test2'); -# SELECT count -$queries = 0; -$schema->storage->debug(1); - -$cd2->update; - -is($queries, 1, 'liner_notes (might_have) prefetched - do not load -liner_notes on update'); +$schema->is_executed_querycount( sub { + $cd2->update; +}, { + BEGIN => 1, + UPDATE => 1, + COMMIT => 1, +}, 'liner_notes (might_have) prefetched - do not load liner_notes on update'); warning_like { local $ENV{DBIC_DONT_VALIDATE_RELS}; @@ -62,5 +53,4 @@ warning_like { 'Setting DBIC_DONT_VALIDATE_RELS suppresses nullable relation warnings'; } -$schema->storage->debug($sdebug); done_testing(); diff --git a/t/lib/DBICTest/BaseSchema.pm b/t/lib/DBICTest/BaseSchema.pm index ae8d74a69..5fb9022f3 100644 --- a/t/lib/DBICTest/BaseSchema.pm +++ b/t/lib/DBICTest/BaseSchema.pm @@ -44,12 +44,35 @@ sub capture_executed_sql_bind { local $self->storage->{debugobj} = my $tracer_obj = DBICTest::SQLTracerObj->new; local $self->storage->{debug} = 1; - + local $Test::Builder::Level = $Test::Builder::Level + 2; $cref->(); return $tracer_obj->{sqlbinds} || []; } +sub is_executed_querycount { + my ($self, $cref, $exp_counts, $msg) = @_; + + local $Test::Builder::Level = $Test::Builder::Level + 1; + + $self->throw_exception("Expecting an hashref of counts or an integer representing total query count") + unless ref $exp_counts eq 'HASH' or (defined $exp_counts and ! ref $exp_counts); + + my @got = map { $_->[0] } @{ $self->capture_executed_sql_bind($cref) }; + + return Test::More::is( @got, $exp_counts, $msg ) + unless ref $exp_counts; + + my $got_counts = { map { $_ => 0 } keys %$exp_counts }; + $got_counts->{$_}++ for @got; + + return Test::More::is_deeply( + $got_counts, + $exp_counts, + $msg, + ); +} + sub is_executed_sql_bind { my ($self, $cref, $sqlbinds, $msg) = @_; diff --git a/t/prefetch/correlated.t b/t/prefetch/correlated.t index df349fc79..ac809f3f9 100644 --- a/t/prefetch/correlated.t +++ b/t/prefetch/correlated.t @@ -8,7 +8,6 @@ use DBICTest; use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); -my $orig_debug = $schema->storage->debug; my $cdrs = $schema->resultset('CD')->search({ 'me.artist' => { '!=', 2 }}); @@ -63,26 +62,19 @@ is_same_sql_bind( 'Expected SQL on correlated realiased subquery' ); -my $queries = 0; -$schema->storage->debugcb(sub { $queries++; }); -$schema->storage->debug(1); - -cmp_deeply ( - { map - { $_->cdid => { - track_titles => [ map { $_->title } ($_->tracks->all) ], - siblings => $_->get_column ('sibling_count'), - } } - $c_rs->all - }, - $cd_data, - 'Proper information retrieved from correlated subquery' -); - -is ($queries, 1, 'Only 1 query fired to retrieve everything'); - -$schema->storage->debug($orig_debug); -$schema->storage->debugcb(undef); +$schema->is_executed_querycount( sub { + cmp_deeply ( + { map + { $_->cdid => { + track_titles => [ map { $_->title } ($_->tracks->all) ], + siblings => $_->get_column ('sibling_count'), + } } + $c_rs->all + }, + $cd_data, + 'Proper information retrieved from correlated subquery' + ); +}, 1, 'Only 1 query fired to retrieve everything'); # now add an unbalanced select/as pair $c_rs = $c_rs->search ({}, { diff --git a/t/prefetch/count.t b/t/prefetch/count.t index 4311e8080..8fb2442a5 100644 --- a/t/prefetch/count.t +++ b/t/prefetch/count.t @@ -6,8 +6,6 @@ use lib qw(t/lib); use DBICTest; use DBIC::SqlMakerTest; -plan tests => 23; - my $schema = DBICTest->init_schema(); my $cd_rs = $schema->resultset('CD')->search ( @@ -15,7 +13,6 @@ my $cd_rs = $schema->resultset('CD')->search ( { prefetch => ['tracks', 'artist'] }, ); - is($cd_rs->count, 5, 'CDs with tracks count'); is($cd_rs->search_related('tracks')->count, 15, 'Tracks associated with CDs count (before SELECT()ing)'); @@ -77,26 +74,23 @@ is_same_sql_bind ( => 4 ] ], ); - { local $TODO = "Chaining with prefetch is fundamentally broken"; + $schema->is_executed_querycount( sub { - my $queries; - $schema->storage->debugcb ( sub { $queries++ } ); - $schema->storage->debug (1); - - my $cds = $cd2->search_related ('artist', {}, { prefetch => { cds => 'tracks' }, join => 'twokeys' }) + my $cds = $cd2->search_related ('artist', {}, { prefetch => { cds => 'tracks' }, join => 'twokeys' }) ->search_related ('cds'); - my $tracks = $cds->search_related ('tracks'); - - is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds"); - is(scalar($tracks->all), 2, "2 Tracks prefetched on cd via artist via one of the cds"); - is($tracks->count, 2, "Cached 2 Tracks counted on cd via artist via one of the cds"); + my $tracks = $cds->search_related ('tracks'); - is($cds->count, 2, "2 CDs counted on artist via one of the cds"); - is(scalar($cds->all), 2, "2 CDs prefetched on artist via one of the cds"); - is($cds->count, 2, "Cached 2 CDs counted on artist via one of the cds"); + is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds"); + is(scalar($tracks->all), 2, "2 Tracks prefetched on cd via artist via one of the cds"); + is($tracks->count, 2, "Cached 2 Tracks counted on cd via artist via one of the cds"); - is ($queries, 3, '2 counts + 1 prefetch?'); + is($cds->count, 2, "2 CDs counted on artist via one of the cds"); + is(scalar($cds->all), 2, "2 CDs prefetched on artist via one of the cds"); + is($cds->count, 2, "Cached 2 CDs counted on artist via one of the cds"); + }, 3, '2 counts + 1 prefetch?' ); } + +done_testing; diff --git a/t/prefetch/false_colvalues.t b/t/prefetch/false_colvalues.t index 5213e73af..468a27a85 100644 --- a/t/prefetch/false_colvalues.t +++ b/t/prefetch/false_colvalues.t @@ -15,29 +15,20 @@ $schema->resultset('CD')->create({ }, }); -my $orig_debug = $schema->storage->debug; - -my $queries = 0; -$schema->storage->debugcb(sub { $queries++; }); -$schema->storage->debug(1); - -my $cd = $schema->resultset('CD')->search( {}, { prefetch => 'artist' })->next; - -cmp_deeply - { $cd->get_columns }, - { artist => 0, cdid => 0, genreid => 0, single_track => 0, title => '', year => 0 }, - 'Expected CD columns present', -; - -cmp_deeply - { $cd->artist->get_columns }, - { artistid => 0, charfield => 0, name => "", rank => 0 }, - 'Expected Artist columns present', -; - -is $queries, 1, 'Only one query fired - prefetch worked'; - -$schema->storage->debugcb(undef); -$schema->storage->debug($orig_debug); +$schema->is_executed_querycount( sub { + my $cd = $schema->resultset('CD')->search( {}, { prefetch => 'artist' })->next; + + cmp_deeply + { $cd->get_columns }, + { artist => 0, cdid => 0, genreid => 0, single_track => 0, title => '', year => 0 }, + 'Expected CD columns present', + ; + + cmp_deeply + { $cd->artist->get_columns }, + { artistid => 0, charfield => 0, name => "", rank => 0 }, + 'Expected Artist columns present', + ; +}, 1, 'Only one query fired - prefetch worked' ); done_testing; diff --git a/t/prefetch/grouped.t b/t/prefetch/grouped.t index 0eed0a7d8..c9a91c411 100644 --- a/t/prefetch/grouped.t +++ b/t/prefetch/grouped.t @@ -12,7 +12,6 @@ my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype; my $schema = DBICTest->init_schema(); -my $sdebug = $schema->storage->debug; my $cd_rs = $schema->resultset('CD')->search ( { 'tracks.cd' => { '!=', undef } }, @@ -49,21 +48,13 @@ for ($cd_rs->all) { is($track_rs->count, 5, 'Prefetched count with groupby'); is($track_rs->all, 5, 'Prefetched objects with groupby'); - { - my $query_cnt = 0; - $schema->storage->debugcb ( sub { $query_cnt++ } ); - $schema->storage->debug (1); - + $schema->is_executed_querycount( sub { while (my $collapsed_track = $track_rs->next) { my $cdid = $collapsed_track->get_column('cd'); is($collapsed_track->get_column('track_count'), 3, "Correct count of tracks for CD $cdid" ); ok($collapsed_track->cd->title, "Prefetched title for CD $cdid" ); } - - is ($query_cnt, 1, 'Single query on prefetched titles'); - $schema->storage->debugcb (undef); - $schema->storage->debug ($sdebug); - } + }, 1, 'Single query on prefetched titles'); # Test sql by hand, as the sqlite db will simply paper over # improper group/select combinations @@ -190,22 +181,16 @@ for ($cd_rs->all) { my ($top_cd) = $most_tracks_rs->all; is ($top_cd->id, 2, 'Correct cd fetched on top'); # 2 because of the slice(1,1) earlier - my $query_cnt = 0; - $schema->storage->debugcb ( sub { $query_cnt++ } ); - $schema->storage->debug (1); - - is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly'); - is ($top_cd->tracks->count, 4, 'Count of prefetched tracks rs still correct'); - is ($top_cd->tracks->all, 4, 'Number of prefetched track objects still correct'); - is ( - $top_cd->liner_notes->notes, - 'Buy Whiskey!', - 'Correct liner pre-fetched with top cd', - ); - - is ($query_cnt, 0, 'No queries executed during prefetched data access'); - $schema->storage->debugcb (undef); - $schema->storage->debug ($sdebug); + $schema->is_executed_querycount( sub { + is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly'); + is ($top_cd->tracks->count, 4, 'Count of prefetched tracks rs still correct'); + is ($top_cd->tracks->all, 4, 'Number of prefetched track objects still correct'); + is ( + $top_cd->liner_notes->notes, + 'Buy Whiskey!', + 'Correct liner pre-fetched with top cd', + ); + }, 0, 'No queries executed during prefetched data access'); } { @@ -256,20 +241,14 @@ for ($cd_rs->all) { my ($top_cd) = $most_tracks_rs->all; is ($top_cd->id, 2, 'Correct cd fetched on top'); # 2 because of the slice(1,1) earlier - my $query_cnt = 0; - $schema->storage->debugcb ( sub { $query_cnt++ } ); - $schema->storage->debug (1); - - is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly'); - is ( - $top_cd->liner_notes->notes, - 'Buy Whiskey!', - 'Correct liner pre-fetched with top cd', - ); - - is ($query_cnt, 0, 'No queries executed during prefetched data access'); - $schema->storage->debugcb (undef); - $schema->storage->debug ($sdebug); + $schema->is_executed_querycount( sub { + is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly'); + is ( + $top_cd->liner_notes->notes, + 'Buy Whiskey!', + 'Correct liner pre-fetched with top cd', + ); + }, 0, 'No queries executed during prefetched data access'); } diff --git a/t/prefetch/manual.t b/t/prefetch/manual.t index 22281429f..e051ce37c 100644 --- a/t/prefetch/manual.t +++ b/t/prefetch/manual.t @@ -257,24 +257,20 @@ if ($ENV{TEST_VERBOSE}) { for @lines; } -{ - my $queries = 0; - $schema->storage->debugcb(sub { $queries++ }); - my $orig_debug = $schema->storage->debug; - $schema->storage->debug (1); - +$schema->is_executed_querycount( sub { for my $use_next (0, 1) { my @random_cds; + my $rs_r = $rs_random; if ($use_next) { warnings_exist { - while (my $o = $rs_random->next) { + while (my $o = $rs_r->next) { push @random_cds, $o; } } qr/performed an eager cursor slurp underneath/, 'Warned on auto-eager cursor'; } else { - @random_cds = $rs_random->all; + @random_cds = $rs_r->all; } is (@random_cds, 6, 'object count matches'); @@ -306,11 +302,7 @@ if ($ENV{TEST_VERBOSE}) { } } } - - $schema->storage->debugcb(undef); - $schema->storage->debug($orig_debug); - is ($queries, 2, "Only two queries for two prefetch calls total"); -} +}, 2, "Only two queries for two prefetch calls total"); # can't cmp_deeply a random set - need *some* order my $ord_rs = $rs->search({}, { diff --git a/t/prefetch/multiple_hasmany.t b/t/prefetch/multiple_hasmany.t index 31b258581..665005b75 100644 --- a/t/prefetch/multiple_hasmany.t +++ b/t/prefetch/multiple_hasmany.t @@ -2,11 +2,11 @@ use strict; use warnings; use Test::More; +use Test::Warn; use lib qw(t/lib); use DBICTest; my $schema = DBICTest->init_schema(); -my $sdebug = $schema->storage->debug; #( 1 -> M + M ) my $cd_rs = $schema->resultset('CD')->search( { 'me.title' => 'Forkful of bees' } ); @@ -15,33 +15,24 @@ my $pr_cd_rs = $cd_rs->search( {}, { prefetch => [qw/tracks tags/], } ); my $tracks_rs = $cd_rs->first->tracks; my $tracks_count = $tracks_rs->count; -my ( $pr_tracks_rs, $pr_tracks_count ); +$schema->is_executed_querycount( sub { + my $pcr = $pr_cd_rs; + my $pr_tracks_rs; -my $queries = 0; -$schema->storage->debugcb( sub { $queries++ } ); -$schema->storage->debug(1); + warnings_exist { + $pr_tracks_rs = $pcr->first->tracks; + } [], 'no warning on attempt to prefetch several same level has_many\'s (1 -> M + M)' ; -my $o_mm_warn; -{ - local $SIG{__WARN__} = sub { $o_mm_warn = shift }; - $pr_tracks_rs = $pr_cd_rs->first->tracks; -}; -$pr_tracks_count = $pr_tracks_rs->count; + is( $pr_tracks_rs->count, $tracks_count, + 'equal count of prefetched relations over several same level has_many\'s (1 -> M + M)' + ); -ok( !$o_mm_warn, -'no warning on attempt to prefetch several same level has_many\'s (1 -> M + M)' -); + is( $pr_tracks_rs->all, $tracks_count, + 'equal amount of objects returned with and without prefetch over several same level has_many\'s (1 -> M + M)' + ); -is( $queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' ); -$schema->storage->debugcb(undef); -$schema->storage->debug($sdebug); +}, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' ); -is( $pr_tracks_count, $tracks_count, -'equal count of prefetched relations over several same level has_many\'s (1 -> M + M)' -); -is( $pr_tracks_rs->all, $tracks_rs->all, -'equal amount of objects returned with and without prefetch over several same level has_many\'s (1 -> M + M)' -); #( M -> 1 -> M + M ) my $note_rs = @@ -52,32 +43,22 @@ my $pr_note_rs = my $tags_rs = $note_rs->first->cd->tags; my $tags_count = $tags_rs->count; -my ( $pr_tags_rs, $pr_tags_count ); - -$queries = 0; -$schema->storage->debugcb( sub { $queries++ } ); -$schema->storage->debug(1); - -my $m_o_mm_warn; -{ - local $SIG{__WARN__} = sub { $m_o_mm_warn = shift }; - $pr_tags_rs = $pr_note_rs->first->cd->tags; -}; -$pr_tags_count = $pr_tags_rs->count; - -ok( !$m_o_mm_warn, -'no warning on attempt to prefetch several same level has_many\'s (M -> 1 -> M + M)' -); - -is( $queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' ); -$schema->storage->debugcb(undef); -$schema->storage->debug($sdebug); - -is( $pr_tags_count, $tags_count, -'equal count of prefetched relations over several same level has_many\'s (M -> 1 -> M + M)' -); -is( $pr_tags_rs->all, $tags_rs->all, -'equal amount of objects with and without prefetch over several same level has_many\'s (M -> 1 -> M + M)' -); +$schema->is_executed_querycount( sub { + my $pnr = $pr_note_rs; + my $pr_tags_rs; + + warnings_exist { + $pr_tags_rs = $pnr->first->cd->tags; + } [], 'no warning on attempt to prefetch several same level has_many\'s (M -> 1 -> M + M)'; + + is( $pr_tags_rs->count, $tags_count, + 'equal count of prefetched relations over several same level has_many\'s (M -> 1 -> M + M)' + ); + is( $pr_tags_rs->all, $tags_count, + 'equal amount of objects with and without prefetch over several same level has_many\'s (M -> 1 -> M + M)' + ); + +}, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' ); + done_testing; diff --git a/t/prefetch/one_to_many_to_one.t b/t/prefetch/one_to_many_to_one.t index 811942ee0..f79b38e2c 100644 --- a/t/prefetch/one_to_many_to_one.t +++ b/t/prefetch/one_to_many_to_one.t @@ -12,20 +12,14 @@ my $artist = $schema->resultset ('Artist')->find ({artistid => 1}); is ($artist->cds->count, 3, 'Correct number of CDs'); is ($artist->cds->search_related ('genre')->count, 1, 'Only one of the cds has a genre'); -my $queries = 0; -my $orig_cb = $schema->storage->debugcb; -$schema->storage->debugcb(sub { $queries++ }); -$schema->storage->debug(1); - -my $pref = $schema->resultset ('Artist') +$schema->is_executed_querycount( sub { + my $pref = $schema->resultset ('Artist') ->search ({ 'me.artistid' => $artist->id }, { prefetch => { cds => 'genre' } }) ->next; -is ($pref->cds->count, 3, 'Correct number of CDs prefetched'); -is ($pref->cds->search_related ('genre')->count, 1, 'Only one of the prefetched cds has a prefetched genre'); + is ($pref->cds->count, 3, 'Correct number of CDs prefetched'); + is ($pref->cds->search_related ('genre')->count, 1, 'Only one of the prefetched cds has a prefetched genre'); -is ($queries, 1, 'All happened within one query only'); -$schema->storage->debugcb($orig_cb); -$schema->storage->debug(0); +}, 1, 'All happened within one query only'); done_testing; diff --git a/t/prefetch/refined_search_on_relation.t b/t/prefetch/refined_search_on_relation.t index 8a7035c20..729dbdebd 100644 --- a/t/prefetch/refined_search_on_relation.t +++ b/t/prefetch/refined_search_on_relation.t @@ -25,11 +25,7 @@ is ( ); # this still should emit no queries: -{ - my $queries = 0; - my $orig_debug = $schema->storage->debug; - $schema->storage->debugcb(sub { $queries++; }); - $schema->storage->debug(1); +$schema->is_executed_querycount( sub { my $cds = $art->cds; is ( @@ -47,10 +43,6 @@ is ( ); } - $schema->storage->debug($orig_debug); - $schema->storage->debugcb(undef); - - is ($queries, 0, 'No queries on prefetched operations'); -} +}, 0, 'No queries on prefetched operations'); done_testing; diff --git a/t/prefetch/standard.t b/t/prefetch/standard.t index 26d335439..69574ab6a 100644 --- a/t/prefetch/standard.t +++ b/t/prefetch/standard.t @@ -7,33 +7,26 @@ use lib qw(t/lib); use DBICTest; my $schema = DBICTest->init_schema(); -my $orig_debug = $schema->storage->debug; -my $queries = 0; -$schema->storage->debugcb(sub { $queries++; }); -$schema->storage->debug(1); - -my $search = { 'artist.name' => 'Caterwauler McCrae' }; -my $attr = { prefetch => [ qw/artist liner_notes/ ], +my $rs; +$schema->is_executed_querycount( sub { + my $search = { 'artist.name' => 'Caterwauler McCrae' }; + my $attr = { prefetch => [ qw/artist liner_notes/ ], order_by => 'me.cdid' }; -my $rs = $schema->resultset("CD")->search($search, $attr); -my @cd = $rs->all; - -is($cd[0]->title, 'Spoonful of bees', 'First record returned ok'); + $rs = $schema->resultset("CD")->search($search, $attr); + my @cd = $rs->all; -ok(!defined $cd[0]->liner_notes, 'No prefetch for NULL LEFT join'); + is($cd[0]->title, 'Spoonful of bees', 'First record returned ok'); -is($cd[1]->{_relationship_data}{liner_notes}->notes, 'Buy Whiskey!', 'Prefetch for present LEFT JOIN'); + ok(!defined $cd[0]->liner_notes, 'No prefetch for NULL LEFT join'); -is(ref $cd[1]->liner_notes, 'DBICTest::LinerNotes', 'Prefetch returns correct class'); + is($cd[1]->{_relationship_data}{liner_notes}->notes, 'Buy Whiskey!', 'Prefetch for present LEFT JOIN'); -is($cd[2]->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'Prefetch on parent object ok'); + is(ref $cd[1]->liner_notes, 'DBICTest::LinerNotes', 'Prefetch returns correct class'); -is($queries, 1, 'prefetch ran only 1 select statement'); - -$schema->storage->debug($orig_debug); -$schema->storage->debugobj->callback(undef); + is($cd[2]->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'Prefetch on parent object ok'); +}, 1, 'prefetch ran only 1 select statement'); # test for partial prefetch via columns attr my $cd = $schema->resultset('CD')->find(1, @@ -42,66 +35,50 @@ my $cd = $schema->resultset('CD')->find(1, join => { 'artist' => {} } } ); -ok(eval { $cd->artist->name eq 'Caterwauler McCrae' }, 'single related column prefetched'); +is( $cd->artist->name, 'Caterwauler McCrae', 'single related column prefetched'); # start test for nested prefetch SELECT count -$queries = 0; -$schema->storage->debugcb(sub { $queries++ }); -$schema->storage->debug(1); - -$rs = $schema->resultset('Tag')->search( - { 'me.tagid' => 1 }, - { - prefetch => { cd => 'artist' } - } -); - -my $tag = $rs->first; +my $tag; +$schema->is_executed_querycount( sub { + $rs = $schema->resultset('Tag')->search( + { 'me.tagid' => 1 }, + { + prefetch => { cd => 'artist' } + } + ); -is( $tag->cd->title, 'Spoonful of bees', 'step 1 ok for nested prefetch' ); + $tag = $rs->first; -is( $tag->cd->artist->name, 'Caterwauler McCrae', 'step 2 ok for nested prefetch'); + is( $tag->cd->title, 'Spoonful of bees', 'step 1 ok for nested prefetch' ); -# count the SELECTs -#$selects++ if /SELECT(?!.*WHERE 1=0.*)/; -is($queries, 1, 'nested prefetch ran exactly 1 select statement (excluding column_info)'); + is( $tag->cd->artist->name, 'Caterwauler McCrae', 'step 2 ok for nested prefetch'); +}, 1, 'nested prefetch ran exactly 1 select statement'); -$queries = 0; -is($tag->search_related('cd')->search_related('artist')->first->name, +$schema->is_executed_querycount( sub { + is($tag->search_related('cd')->search_related('artist')->first->name, 'Caterwauler McCrae', 'chained belongs_to->belongs_to search_related ok'); +}, 0, 'chained search_related after belongs_to->belongs_to prefetch ran no queries'); -is($queries, 0, 'chained search_related after belontgs_to->belongs_to prefetch ran no queries'); - -$queries = 0; - -$cd = $schema->resultset('CD')->find(1, { prefetch => 'artist' }); -is($cd->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'artist prefetched correctly on find'); +$schema->is_executed_querycount( sub { + $cd = $schema->resultset('CD')->find(1, { prefetch => 'artist' }); -is($queries, 1, 'find with prefetch ran exactly 1 select statement (excluding column_info)'); + is($cd->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'artist prefetched correctly on find'); +}, 1, 'find with prefetch ran exactly 1 select statement (excluding column_info)'); -$queries = 0; +$schema->is_executed_querycount( sub { + $cd = $schema->resultset('CD')->find(1, { prefetch => { cd_to_producer => 'producer' }, order_by => 'producer.producerid' }); -$schema->storage->debugcb(sub { $queries++; }); + is($cd->producers->first->name, 'Matt S Trout', 'many_to_many accessor ok'); +}, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query'); -$cd = $schema->resultset('CD')->find(1, { prefetch => { cd_to_producer => 'producer' }, order_by => 'producer.producerid' }); +$schema->is_executed_querycount( sub { + my $producers = $cd->search_related('cd_to_producer')->search_related('producer'); -is($cd->producers->first->name, 'Matt S Trout', 'many_to_many accessor ok'); - -is($queries, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query'); - -$queries = 0; - -my $producers = $cd->search_related('cd_to_producer')->search_related('producer'); - -is($producers->first->name, 'Matt S Trout', 'chained many_to_many search_related ok'); - -is($queries, 0, 'chained search_related after many_to_many prefetch ran no queries'); - -$schema->storage->debug($orig_debug); -$schema->storage->debugobj->callback(undef); + is($producers->first->name, 'Matt S Trout', 'chained many_to_many search_related ok'); +}, 0, 'chained search_related after many_to_many prefetch ran no queries'); $rs = $schema->resultset('Tag')->search( {}, @@ -180,27 +157,22 @@ my $left_join = $schema->resultset('CD')->search( cmp_ok($left_join, '==', 1, 'prefetch with no join record present'); -$queries = 0; -$schema->storage->debugcb(sub { $queries++ }); -$schema->storage->debug(1); - -my $tree_like = - $schema->resultset('TreeLike')->find(5, - { join => { parent => { parent => 'parent' } }, +my $tree_like; +$schema->is_executed_querycount( sub { + $tree_like = + $schema->resultset('TreeLike')->find(5, + { join => { parent => { parent => 'parent' } }, prefetch => { parent => { parent => 'parent' } } }); -is($tree_like->name, 'quux', 'Bottom of tree ok'); -$tree_like = $tree_like->parent; -is($tree_like->name, 'baz', 'First level up ok'); -$tree_like = $tree_like->parent; -is($tree_like->name, 'bar', 'Second level up ok'); -$tree_like = $tree_like->parent; -is($tree_like->name, 'foo', 'Third level up ok'); + is($tree_like->name, 'quux', 'Bottom of tree ok'); + $tree_like = $tree_like->parent; + is($tree_like->name, 'baz', 'First level up ok'); + $tree_like = $tree_like->parent; + is($tree_like->name, 'bar', 'Second level up ok'); + $tree_like = $tree_like->parent; + is($tree_like->name, 'foo', 'Third level up ok'); -$schema->storage->debug($orig_debug); -$schema->storage->debugobj->callback(undef); - -cmp_ok($queries, '==', 1, 'Only one query run'); +}, 1, 'Only one query run'); $tree_like = $schema->resultset('TreeLike')->search({'me.id' => 2}); $tree_like = $tree_like->search_related('children')->search_related('children')->search_related('children')->first; @@ -210,15 +182,15 @@ $tree_like = $schema->resultset('TreeLike')->search_related('children', { 'children.id' => 3, 'children_2.id' => 4 }, { prefetch => { children => 'children' } } )->first; -is(eval { $tree_like->children->first->children->first->name }, 'quux', +is( $tree_like->children->first->children->first->name, 'quux', 'Tree search_related with prefetch ok'); -$tree_like = eval { $schema->resultset('TreeLike')->search( +$tree_like = $schema->resultset('TreeLike')->search( { 'children.id' => 3, 'children_2.id' => 6 }, { join => [qw/children children children/] } )->search_related('children', { 'children_4.id' => 7 }, { prefetch => 'children' } - )->first->children->first; }; -is(eval { $tree_like->name }, 'fong', 'Tree with multiple has_many joins ok'); + )->first->children->first; +is( $tree_like->name, 'fong', 'Tree with multiple has_many joins ok'); $rs = $schema->resultset('Artist'); $rs->create({ artistid => 4, name => 'Unknown singer-songwriter' }); @@ -274,32 +246,24 @@ sub make_hash_struc { return $struc; } -$queries = 0; -$schema->storage->debugcb(sub { $queries++ }); -$schema->storage->debug(1); - -my $prefetch_result = make_hash_struc($art_rs_pr); -is($queries, 1, 'nested prefetch across has_many->has_many ran exactly 1 query'); - -my $nonpre_result = make_hash_struc($art_rs); +my $prefetch_result; +$schema->is_executed_querycount( sub { + $prefetch_result = make_hash_struc($art_rs_pr); +}, 1, 'nested prefetch across has_many->has_many ran exactly 1 query'); +my $nonpre_result = make_hash_struc($art_rs); is_deeply( $prefetch_result, $nonpre_result, 'Compare 2 level prefetch result to non-prefetch result' ); -$queries = 0; - -is_deeply( - [ sort map { $_->title } $art_rs_pr->search_related('cds')->search_related('tracks')->all ], - [ 'Apiary', 'Beehind You', 'Boring Name', 'Boring Song', 'Fowlin', 'Howlin', - 'No More Ideas', 'Sad', 'Sticky Honey', 'Stripy', 'Stung with Success', - 'Suicidal', 'The Bees Knees', 'Under The Weather', 'Yowlin' ], - 'chained has_many->has_many search_related ok' -); - -is($queries, 0, 'chained search_related after has_many->has_many prefetch ran no queries'); - -$schema->storage->debug($orig_debug); -$schema->storage->debugobj->callback(undef); +$schema->is_executed_querycount( sub { + is_deeply( + [ sort map { $_->title } $art_rs_pr->search_related('cds')->search_related('tracks')->all ], + [ 'Apiary', 'Beehind You', 'Boring Name', 'Boring Song', 'Fowlin', 'Howlin', + 'No More Ideas', 'Sad', 'Sticky Honey', 'Stripy', 'Stung with Success', + 'Suicidal', 'The Bees Knees', 'Under The Weather', 'Yowlin' ], + 'chained has_many->has_many search_related ok' + ); +}, 0, 'chained search_related after has_many->has_many prefetch ran no queries'); done_testing; diff --git a/t/prefetch/via_search_related.t b/t/prefetch/via_search_related.t index 588b12505..f1aa3d000 100644 --- a/t/prefetch/via_search_related.t +++ b/t/prefetch/via_search_related.t @@ -9,10 +9,6 @@ use DBICTest; my $schema = DBICTest->init_schema(); -my $queries; -my $debugcb = sub { $queries++; }; -my $orig_debug = $schema->storage->debug; - lives_ok ( sub { my $no_prefetch = $schema->resultset('Track')->search_related(cd => { @@ -73,16 +69,12 @@ lives_ok ( sub { { my $cd = $schema->resultset('CD')->search({}, { prefetch => 'cd_to_producer' })->find(1); - $queries = 0; - $schema->storage->debugcb ($debugcb); - $schema->storage->debug (1); - - is( $cd->cd_to_producer->count, 3 ,'Count of prefetched m2m links via accessor' ); - is( scalar $cd->cd_to_producer->all, 3, 'Amount of prefetched m2m link objects via accessor' ); - is( $cd->search_related('cd_to_producer')->count, 3, 'Count of prefetched m2m links via search_related' ); - is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Amount of prefetched m2m links via search_related' ); - - is($queries, 0, 'No queries ran so far'); + $schema->is_executed_querycount( sub { + is( $cd->cd_to_producer->count, 3 ,'Count of prefetched m2m links via accessor' ); + is( scalar $cd->cd_to_producer->all, 3, 'Amount of prefetched m2m link objects via accessor' ); + is( $cd->search_related('cd_to_producer')->count, 3, 'Count of prefetched m2m links via search_related' ); + is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Amount of prefetched m2m links via search_related' ); + }, 0, 'No queries ran so far'); is( scalar $cd->cd_to_producer->search_related('producer')->all, 3, 'Amount of objects via search_related off prefetched linker' ); @@ -97,16 +89,12 @@ lives_ok ( sub { is( $cd->producers->count, 3, 'Count via m2m accessor' ); - $queries = 0; - - is( $cd->cd_to_producer->count, 3 ,'Review count of prefetched m2m links via accessor' ); - is( scalar $cd->cd_to_producer->all, 3, 'Review amount of prefetched m2m link objects via accessor' ); - is( $cd->search_related('cd_to_producer')->count, 3, 'Review count of prefetched m2m links via search_related' ); - is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Rreview amount of prefetched m2m links via search_related' ); - - is($queries, 0, 'Still no queries on prefetched linker'); - $schema->storage->debugcb (undef); - $schema->storage->debug ($orig_debug); + $schema->is_executed_querycount( sub { + is( $cd->cd_to_producer->count, 3 ,'Review count of prefetched m2m links via accessor' ); + is( scalar $cd->cd_to_producer->all, 3, 'Review amount of prefetched m2m link objects via accessor' ); + is( $cd->search_related('cd_to_producer')->count, 3, 'Review count of prefetched m2m links via search_related' ); + is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Rreview amount of prefetched m2m links via search_related' ); + }, 0, 'Still no queries on prefetched linker'); } # tests with distinct => 1 @@ -169,21 +157,18 @@ lives_ok (sub { is($rs->all, 1, 'distinct with prefetch (objects)'); is($rs->count, 1, 'distinct with prefetch (count)'); - $queries = 0; - $schema->storage->debugcb ($debugcb); - $schema->storage->debug (1); + local $TODO = "This makes another 2 trips to the database, it can't be right"; + $schema->is_executed_querycount( sub { - # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2 - is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)'); - is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)'); + # the is() calls are not todoified + local $TODO; - { - local $TODO = "This makes another 2 trips to the database, it can't be right"; - is ($queries, 0, 'No extra queries fired (prefetch survives search_related)'); - } + # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2 + is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)'); + is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)'); + + }, 0, 'No extra queries fired (prefetch survives search_related)'); - $schema->storage->debugcb (undef); - $schema->storage->debug ($orig_debug); }, 'distinct generally works with prefetch on deep search_related chains'); # pathological "user knows what they're doing" case diff --git a/t/relationship/core.t b/t/relationship/core.t index e86dfc6d7..504993da6 100644 --- a/t/relationship/core.t +++ b/t/relationship/core.t @@ -8,7 +8,6 @@ use DBICTest; use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); -my $sdebug = $schema->storage->debug; # has_a test my $cd = $schema->resultset("CD")->find(4); @@ -33,17 +32,14 @@ $artist->create_related( 'cds', { my $big_flop_cd = ($artist->search_related('cds'))[3]; is( $big_flop_cd->title, 'Big Flop', 'create_related ok' ); -{ # make sure we are not making pointless select queries when a FK IS NULL - my $queries = 0; - $schema->storage->debugcb(sub { $queries++; }); - $schema->storage->debug(1); +# make sure we are not making pointless select queries when a FK IS NULL +$schema->is_executed_querycount( sub { $big_flop_cd->genre; #should not trigger a select query - is($queries, 0, 'No SELECT made for belongs_to if key IS NULL'); +}, 0, 'No SELECT made for belongs_to if key IS NULL'); + +$schema->is_executed_querycount( sub { $big_flop_cd->genre_inefficient; #should trigger a select query - is($queries, 1, 'SELECT made for belongs_to if key IS NULL when undef_on_null_fk disabled'); - $schema->storage->debug($sdebug); - $schema->storage->debugcb(undef); -} +}, 1, 'SELECT made for belongs_to if key IS NULL when undef_on_null_fk disabled'); my( $rs_from_list ) = $artist->search_related_rs('cds'); isa_ok( $rs_from_list, 'DBIx::Class::ResultSet', 'search_related_rs in list context returns rs' ); diff --git a/t/relationship/update_or_create_multi.t b/t/relationship/update_or_create_multi.t index dd022a169..040baf711 100644 --- a/t/relationship/update_or_create_multi.t +++ b/t/relationship/update_or_create_multi.t @@ -9,7 +9,6 @@ use DBICTest; use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); -my $sdebug = $schema->storage->debug; my $artist = $schema->resultset ('Artist')->find(1); @@ -79,28 +78,30 @@ throws_ok { # expect a create, after a failed search using *only* the # *current* relationship and the unique column constraints # (so no year) -my @sql; -$schema->storage->debugcb(sub { push @sql, $_[1] }); -$schema->storage->debug (1); - -$genre->update_or_create_related ('cds', { - title => 'the best thing since vertical toasters', - artist => $artist, - year => 2012, -}); - -$schema->storage->debugcb(undef); -$schema->storage->debug ($sdebug); - -my ($search_sql) = $sql[0] =~ /^(SELECT .+?)\:/; -is_same_sql ( - $search_sql, - 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track - FROM cd me - WHERE ( me.artist = ? AND me.genreid = ? AND me.title = ? ) - ', - 'expected select issued', -); +$schema->is_executed_sql_bind( sub { + $genre->update_or_create_related ('cds', { + title => 'the best thing since vertical toasters', + artist => $artist, + year => 2012, + }); +}, [ + [ + 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track + FROM cd me + WHERE ( me.artist = ? AND me.genreid = ? AND me.title = ? ) + ', + 1, + 2, + "the best thing since vertical toasters", + ], + [ + 'INSERT INTO cd ( artist, genreid, title, year) VALUES ( ?, ?, ?, ? )', + 1, + 2, + "the best thing since vertical toasters", + 2012, + ], +], 'expected select issued' ); # a has_many search without a unique constraint makes no sense # but I am not sure what to test for - leaving open diff --git a/t/resultset/as_subselect_rs.t b/t/resultset/as_subselect_rs.t index c0f811051..250785cd4 100644 --- a/t/resultset/as_subselect_rs.t +++ b/t/resultset/as_subselect_rs.t @@ -40,8 +40,6 @@ is_same_sql_bind ( 'Resultset-class attributes do not seep outside of the subselect', ); -$schema->storage->debug(1); - is_same_sql_bind( $schema->resultset('CD')->search ({}, { rows => 2, @@ -70,5 +68,4 @@ is_same_sql_bind( [ [{ sqlt_datatype => 'integer' } => 2 ] ], ); - done_testing; From a5a7bb733a940db710b7408508374833683a2e79 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 26 May 2014 13:47:52 +0200 Subject: [PATCH 033/548] Retire DBIC/SqlMakerTest.pm now that SQLA::Test provides the same function --- t/60core.t | 3 +- t/71mysql.t | 1 - t/73oracle.t | 1 - t/73oracle_blob.t | 1 - t/76joins.t | 3 +- t/76select.t | 3 +- t/88result_set_column.t | 3 +- t/90join_torture.t | 3 +- t/count/count_rs.t | 3 +- t/count/distinct.t | 3 +- t/count/in_subquery.t | 1 - t/count/prefetch.t | 3 +- t/lib/DBIC/SqlMakerTest.pm | 165 ---------------------- t/lib/DBICTest.pm | 51 +++++-- t/prefetch/correlated.t | 3 +- t/prefetch/count.t | 3 +- t/prefetch/double_prefetch.t | 3 +- t/prefetch/grouped.t | 3 +- t/prefetch/incomplete.t | 3 +- t/prefetch/join_type.t | 3 +- t/prefetch/o2m_o2m_order_by_with_limit.t | 3 +- t/prefetch/with_limit.t | 3 +- t/relationship/core.t | 3 +- t/relationship/custom.t | 3 +- t/relationship/update_or_create_multi.t | 1 - t/resultset/as_query.t | 3 +- t/resultset/as_subselect_rs.t | 3 +- t/resultset/bind_attr.t | 3 +- t/search/distinct.t | 3 +- t/search/preserve_original_rs.t | 3 +- t/search/related_has_many.t | 1 - t/search/related_strip_prefetch.t | 3 +- t/search/select_chains.t | 3 +- t/search/select_chains_unbalanced.t | 3 +- t/search/subquery.t | 3 +- t/sqlmaker/bind_transport.t | 3 +- t/sqlmaker/core.t | 3 +- t/sqlmaker/core_quoted.t | 3 +- t/sqlmaker/dbihacks_internals.t | 3 +- t/sqlmaker/hierarchical/oracle.t | 4 +- t/sqlmaker/limit_dialects/custom.t | 4 +- t/sqlmaker/limit_dialects/fetch_first.t | 3 +- t/sqlmaker/limit_dialects/first_skip.t | 3 +- t/sqlmaker/limit_dialects/generic_subq.t | 3 +- t/sqlmaker/limit_dialects/mssql_torture.t | 3 +- t/sqlmaker/limit_dialects/rno.t | 3 +- t/sqlmaker/limit_dialects/rownum.t | 3 +- t/sqlmaker/limit_dialects/skip_first.t | 3 +- t/sqlmaker/limit_dialects/toplimit.t | 3 +- t/sqlmaker/limit_dialects/torture.t | 3 +- t/sqlmaker/msaccess.t | 3 +- t/sqlmaker/mysql.t | 4 +- t/sqlmaker/nest_deprec.t | 3 +- t/sqlmaker/oracle.t | 3 +- t/sqlmaker/oraclejoin.t | 3 +- t/sqlmaker/order_by_bindtransport.t | 3 +- t/sqlmaker/order_by_func.t | 3 +- t/sqlmaker/quotes.t | 3 +- t/sqlmaker/sqlite.t | 3 +- t/storage/debug.t | 1 - t/storage/ping_count.t | 1 - 61 files changed, 89 insertions(+), 291 deletions(-) delete mode 100644 t/lib/DBIC/SqlMakerTest.pm diff --git a/t/60core.t b/t/60core.t index f2e363abd..62299c322 100644 --- a/t/60core.t +++ b/t/60core.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use Test::Warn; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/71mysql.t b/t/71mysql.t index 52d097dba..bebbc4b2f 100644 --- a/t/71mysql.t +++ b/t/71mysql.t @@ -11,7 +11,6 @@ use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql') unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql'); diff --git a/t/73oracle.t b/t/73oracle.t index 40dcaac01..bdcc732fb 100644 --- a/t/73oracle.t +++ b/t/73oracle.t @@ -9,7 +9,6 @@ use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/}; diff --git a/t/73oracle_blob.t b/t/73oracle_blob.t index c2fa413e9..2a78d36fd 100644 --- a/t/73oracle_blob.t +++ b/t/73oracle_blob.t @@ -9,7 +9,6 @@ use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/}; diff --git a/t/76joins.t b/t/76joins.t index 934e387b0..66e9fb7d9 100644 --- a/t/76joins.t +++ b/t/76joins.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/76select.t b/t/76select.t index dca565460..81ecf0d94 100644 --- a/t/76select.t +++ b/t/76select.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/88result_set_column.t b/t/88result_set_column.t index 226c20965..9a04bd16d 100644 --- a/t/88result_set_column.t +++ b/t/88result_set_column.t @@ -5,8 +5,7 @@ use Test::More; use Test::Warn; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/90join_torture.t b/t/90join_torture.t index 2bc86b288..27111e447 100644 --- a/t/90join_torture.t +++ b/t/90join_torture.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); lives_ok (sub { diff --git a/t/count/count_rs.t b/t/count/count_rs.t index 5883daf8c..174f6307f 100644 --- a/t/count/count_rs.t +++ b/t/count/count_rs.t @@ -4,8 +4,7 @@ use warnings; use lib qw(t/lib); use Test::More; -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($ROWS, $OFFSET) = ( diff --git a/t/count/distinct.t b/t/count/distinct.t index 1b44b9a5f..e916ab941 100644 --- a/t/count/distinct.t +++ b/t/count/distinct.t @@ -6,8 +6,7 @@ use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/count/in_subquery.t b/t/count/in_subquery.t index 8f56d8389..85f48d083 100644 --- a/t/count/in_subquery.t +++ b/t/count/in_subquery.t @@ -5,7 +5,6 @@ use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); diff --git a/t/count/prefetch.t b/t/count/prefetch.t index 25ae8569f..eb18236d8 100644 --- a/t/count/prefetch.t +++ b/t/count/prefetch.t @@ -4,8 +4,7 @@ use warnings; use lib qw(t/lib); use Test::More; -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/lib/DBIC/SqlMakerTest.pm b/t/lib/DBIC/SqlMakerTest.pm deleted file mode 100644 index 8fd047c36..000000000 --- a/t/lib/DBIC/SqlMakerTest.pm +++ /dev/null @@ -1,165 +0,0 @@ -package DBIC::SqlMakerTest; - -use strict; -use warnings; - -use base qw/Exporter/; - -use Carp; -use SQL::Abstract::Test; - -our @EXPORT = qw/ - is_same_sql_bind - is_same_sql - is_same_bind -/; -our @EXPORT_OK = qw/ - eq_sql - eq_bind - eq_sql_bind -/; - -sub is_same_sql_bind { - # unroll possible as_query arrayrefrefs - my @args; - - for (1,2) { - my $chunk = shift @_; - - if ( ref $chunk eq 'REF' and ref $$chunk eq 'ARRAY' ) { - my ($sql, @bind) = @$$chunk; - push @args, ($sql, \@bind); - } - else { - push @args, $chunk, shift @_; - } - - } - - push @args, shift @_; - - croak "Unexpected argument(s) supplied to is_same_sql_bind: " . join ('; ', @_) - if @_; - - @_ = @args; - goto &SQL::Abstract::Test::is_same_sql_bind; -} - -*is_same_sql = \&SQL::Abstract::Test::is_same_sql; -*is_same_bind = \&SQL::Abstract::Test::is_same_bind; -*eq_sql = \&SQL::Abstract::Test::eq_sql; -*eq_bind = \&SQL::Abstract::Test::eq_bind; -*eq_sql_bind = \&SQL::Abstract::Test::eq_sql_bind; - -1; - -__END__ - - -=head1 NAME - -DBIC::SqlMakerTest - Helper package for testing sql_maker component of DBIC - -=head1 SYNOPSIS - - use Test::More; - use DBIC::SqlMakerTest; - - my ($sql, @bind) = $schema->storage->sql_maker->select(%args); - is_same_sql_bind( - $sql, \@bind, - $expected_sql, \@expected_bind, - 'foo bar works' - ); - -=head1 DESCRIPTION - -Exports functions that can be used to compare generated SQL and bind values. - -This is a thin wrapper around L, which makes it easier -to compare as_query sql/bind arrayrefrefs directly. - -=head1 FUNCTIONS - -=head2 is_same_sql_bind - - is_same_sql_bind( - $given_sql, \@given_bind, - $expected_sql, \@expected_bind, - $test_msg - ); - - is_same_sql_bind( - $rs->as_query - $expected_sql, \@expected_bind, - $test_msg - ); - - is_same_sql_bind( - \[$given_sql, @given_bind], - $expected_sql, \@expected_bind, - $test_msg - ); - -Compares given and expected pairs of C<($sql, \@bind)>, and calls -L on the result, with C<$test_msg> as message. - -=head2 is_same_sql - - is_same_sql( - $given_sql, - $expected_sql, - $test_msg - ); - -Compares given and expected SQL statement, and calls L on the -result, with C<$test_msg> as message. - -=head2 is_same_bind - - is_same_bind( - \@given_bind, - \@expected_bind, - $test_msg - ); - -Compares given and expected bind value lists, and calls L on -the result, with C<$test_msg> as message. - -=head2 eq_sql - - my $is_same = eq_sql($given_sql, $expected_sql); - -Compares the two SQL statements. Returns true IFF they are equivalent. - -=head2 eq_bind - - my $is_same = eq_sql(\@given_bind, \@expected_bind); - -Compares two lists of bind values. Returns true IFF their values are the same. - -=head2 eq_sql_bind - - my $is_same = eq_sql_bind( - $given_sql, \@given_bind, - $expected_sql, \@expected_bind - ); - -Compares the two SQL statements and the two lists of bind values. Returns true -IFF they are equivalent and the bind values are the same. - - -=head1 SEE ALSO - -L, L, L. - -=head1 AUTHOR - -Norbert Buchmuller, - -=head1 COPYRIGHT AND LICENSE - -Copyright 2008 by Norbert Buchmuller. - -This library is free software; you can redistribute it and/or modify -it under the same terms as Perl itself. diff --git a/t/lib/DBICTest.pm b/t/lib/DBICTest.pm index 4a9307d93..432eb8714 100644 --- a/t/lib/DBICTest.pm +++ b/t/lib/DBICTest.pm @@ -20,7 +20,7 @@ BEGIN { use Module::Runtime 'module_notional_filename'; BEGIN { - for my $mod (qw( DBIC::SqlMakerTest SQL::Abstract )) { + for my $mod (qw( SQL::Abstract::Test SQL::Abstract )) { if ( $INC{ module_notional_filename($mod) } ) { # FIXME this does not seem to work in BEGIN - why?! #require Carp; @@ -48,7 +48,7 @@ use Config; =head1 NAME -DBICTest - Library to be used by DBIx::Class test scripts. +DBICTest - Library to be used by DBIx::Class test scripts =head1 SYNOPSIS @@ -63,6 +63,26 @@ DBICTest - Library to be used by DBIx::Class test scripts. This module provides the basic utilities to write tests against DBIx::Class. +=head1 EXPORTS + +The module does not export anything by default, nor provides individual +function exports in the conventional sense. Instead the following tags are +recognized: + +=head2 :DiffSQL + +Same as C +L +L)> + +=head2 :GlobalLock + +Some tests are very time sensitive and need to run on their own, without +being disturbed by anything else grabbing CPU or disk IO. Hence why everything +using C grabs a shared lock, and the few tests that request a +C<:GlobalLock> will ask for an exclusive one and block until they can get it. + =head1 METHODS =head2 init_schema @@ -79,18 +99,15 @@ DBIx::Class. This method removes the test SQLite database in t/var/DBIxClass.db and then creates a new, empty database. -This method will call deploy_schema() by default, unless the -no_deploy flag is set. +This method will call L by default, unless the +C flag is set. -Also, by default, this method will call populate_schema() by -default, unless the no_deploy or no_populate flags are set. +Also, by default, this method will call L +by default, unless the C or C flags are set. =cut -# some tests are very time sensitive and need to run on their own, without -# being disturbed by anything else grabbing CPU or disk IO. Hence why everything -# using DBICTest grabs a shared lock, and the few tests that request a :GlobalLock -# will ask for an exclusive one and block until they can get it +# see L our ($global_lock_fh, $global_exclusive_lock); sub import { my $self = shift; @@ -103,13 +120,21 @@ sub import { or die "Unable to open $lockpath: $!"; } - for (@_) { - if ($_ eq ':GlobalLock') { + for my $exp (@_) { + if ($exp eq ':GlobalLock') { flock ($global_lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!"; $global_exclusive_lock = 1; } + elsif ($exp eq ':DiffSQL') { + require SQL::Abstract::Test; + my $into = caller(0); + for (qw(is_same_sql_bind is_same_sql is_same_bind)) { + no strict 'refs'; + *{"${into}::$_"} = \&{"SQL::Abstract::Test::$_"}; + } + } else { - croak "Unknown export $_ requested from $self"; + croak "Unknown export $exp requested from $self"; } } diff --git a/t/prefetch/correlated.t b/t/prefetch/correlated.t index ac809f3f9..6c2e0b769 100644 --- a/t/prefetch/correlated.t +++ b/t/prefetch/correlated.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use Test::Deep; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/prefetch/count.t b/t/prefetch/count.t index 8fb2442a5..f973575f1 100644 --- a/t/prefetch/count.t +++ b/t/prefetch/count.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/prefetch/double_prefetch.t b/t/prefetch/double_prefetch.t index d4b50aa24..fa0b79f51 100644 --- a/t/prefetch/double_prefetch.t +++ b/t/prefetch/double_prefetch.t @@ -3,8 +3,7 @@ use strict; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/prefetch/grouped.t b/t/prefetch/grouped.t index c9a91c411..a1b986057 100644 --- a/t/prefetch/grouped.t +++ b/t/prefetch/grouped.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; diff --git a/t/prefetch/incomplete.t b/t/prefetch/incomplete.t index 09df99c63..63e431aa9 100644 --- a/t/prefetch/incomplete.t +++ b/t/prefetch/incomplete.t @@ -5,8 +5,7 @@ use Test::More; use Test::Deep; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/prefetch/join_type.t b/t/prefetch/join_type.t index ac839f09e..f2980e788 100644 --- a/t/prefetch/join_type.t +++ b/t/prefetch/join_type.t @@ -3,8 +3,7 @@ use strict; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/prefetch/o2m_o2m_order_by_with_limit.t b/t/prefetch/o2m_o2m_order_by_with_limit.t index b8a447707..65a2c3986 100644 --- a/t/prefetch/o2m_o2m_order_by_with_limit.t +++ b/t/prefetch/o2m_o2m_order_by_with_limit.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($ROWS, $OFFSET) = ( diff --git a/t/prefetch/with_limit.t b/t/prefetch/with_limit.t index 4d08cd06a..28b3b8a89 100644 --- a/t/prefetch/with_limit.t +++ b/t/prefetch/with_limit.t @@ -6,8 +6,7 @@ use warnings; use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; diff --git a/t/relationship/core.t b/t/relationship/core.t index 504993da6..46e655c3e 100644 --- a/t/relationship/core.t +++ b/t/relationship/core.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 98b8b450e..3d47fb17d 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/relationship/update_or_create_multi.t b/t/relationship/update_or_create_multi.t index 040baf711..5dde83db1 100644 --- a/t/relationship/update_or_create_multi.t +++ b/t/relationship/update_or_create_multi.t @@ -6,7 +6,6 @@ use Test::Exception; use Test::Warn; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); diff --git a/t/resultset/as_query.t b/t/resultset/as_query.t index 39bf88ce7..3b43e9c2f 100644 --- a/t/resultset/as_query.t +++ b/t/resultset/as_query.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); my $art_rs = $schema->resultset('Artist'); diff --git a/t/resultset/as_subselect_rs.t b/t/resultset/as_subselect_rs.t index 250785cd4..6d7597756 100644 --- a/t/resultset/as_subselect_rs.t +++ b/t/resultset/as_subselect_rs.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/resultset/bind_attr.t b/t/resultset/bind_attr.t index 71d1b9772..7f25d99b1 100644 --- a/t/resultset/bind_attr.t +++ b/t/resultset/bind_attr.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema; diff --git a/t/search/distinct.t b/t/search/distinct.t index 07ac2095d..4a8026769 100644 --- a/t/search/distinct.t +++ b/t/search/distinct.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/search/preserve_original_rs.t b/t/search/preserve_original_rs.t index 04dc9a870..abb654404 100644 --- a/t/search/preserve_original_rs.t +++ b/t/search/preserve_original_rs.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use Storable 'dclone'; diff --git a/t/search/related_has_many.t b/t/search/related_has_many.t index 1834d5317..91b1fb7da 100644 --- a/t/search/related_has_many.t +++ b/t/search/related_has_many.t @@ -5,7 +5,6 @@ use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my $schema = DBICTest->init_schema(); diff --git a/t/search/related_strip_prefetch.t b/t/search/related_strip_prefetch.t index 76336b1db..5e34fe980 100644 --- a/t/search/related_strip_prefetch.t +++ b/t/search/related_strip_prefetch.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; diff --git a/t/search/select_chains.t b/t/search/select_chains.t index 257b0b36b..ed8f23b45 100644 --- a/t/search/select_chains.t +++ b/t/search/select_chains.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/search/select_chains_unbalanced.t b/t/search/select_chains_unbalanced.t index d742fd6a7..63de73cf9 100644 --- a/t/search/select_chains_unbalanced.t +++ b/t/search/select_chains_unbalanced.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/search/subquery.t b/t/search/subquery.t index a281fe95a..87195fdc0 100644 --- a/t/search/subquery.t +++ b/t/search/subquery.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; diff --git a/t/sqlmaker/bind_transport.t b/t/sqlmaker/bind_transport.t index bafe8e99e..2d33352fe 100644 --- a/t/sqlmaker/bind_transport.t +++ b/t/sqlmaker/bind_transport.t @@ -6,8 +6,7 @@ use Test::Exception; use Math::BigInt; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($ROWS, $OFFSET) = ( diff --git a/t/sqlmaker/core.t b/t/sqlmaker/core.t index 57cf48093..1c2a1c35c 100644 --- a/t/sqlmaker/core.t +++ b/t/sqlmaker/core.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(no_deploy => 1); diff --git a/t/sqlmaker/core_quoted.t b/t/sqlmaker/core_quoted.t index 53ce03bdb..e90befeb8 100644 --- a/t/sqlmaker/core_quoted.t +++ b/t/sqlmaker/core_quoted.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index f3d240f82..de5e49e81 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -4,9 +4,8 @@ use Test::More; use Test::Warn; use lib qw(t/lib); -use DBICTest; +use DBICTest ':DiffSQL'; -use DBIC::SqlMakerTest; use Data::Dumper; my $schema = DBICTest->init_schema( no_deploy => 1); diff --git a/t/sqlmaker/hierarchical/oracle.t b/t/sqlmaker/hierarchical/oracle.t index 128314080..33cfeee06 100644 --- a/t/sqlmaker/hierarchical/oracle.t +++ b/t/sqlmaker/hierarchical/oracle.t @@ -23,9 +23,7 @@ BEGIN { ); } -use DBICTest; -use DBICTest::Schema; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; diff --git a/t/sqlmaker/limit_dialects/custom.t b/t/sqlmaker/limit_dialects/custom.t index c5e61c670..89c4788bc 100644 --- a/t/sqlmaker/limit_dialects/custom.t +++ b/t/sqlmaker/limit_dialects/custom.t @@ -5,9 +5,7 @@ use Test::More; use Test::Warn; use lib qw(t/lib); -use DBICTest; -use DBICTest::Schema; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; # This is legacy stuff from SQL::Absract::Limit # Keep it around just in case someone is using it diff --git a/t/sqlmaker/limit_dialects/fetch_first.t b/t/sqlmaker/limit_dialects/fetch_first.t index c521b528a..ab3e17034 100644 --- a/t/sqlmaker/limit_dialects/fetch_first.t +++ b/t/sqlmaker/limit_dialects/fetch_first.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema; diff --git a/t/sqlmaker/limit_dialects/first_skip.t b/t/sqlmaker/limit_dialects/first_skip.t index 539855ca5..acaf770ef 100644 --- a/t/sqlmaker/limit_dialects/first_skip.t +++ b/t/sqlmaker/limit_dialects/first_skip.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($LIMIT, $OFFSET) = ( diff --git a/t/sqlmaker/limit_dialects/generic_subq.t b/t/sqlmaker/limit_dialects/generic_subq.t index ef899ff95..2d4bedad2 100644 --- a/t/sqlmaker/limit_dialects/generic_subq.t +++ b/t/sqlmaker/limit_dialects/generic_subq.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); use List::Util 'min'; -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($ROWS, $TOTAL, $OFFSET) = ( DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, diff --git a/t/sqlmaker/limit_dialects/mssql_torture.t b/t/sqlmaker/limit_dialects/mssql_torture.t index 7806dfbc3..e45295344 100644 --- a/t/sqlmaker/limit_dialects/mssql_torture.t +++ b/t/sqlmaker/limit_dialects/mssql_torture.t @@ -2,8 +2,7 @@ use strict; use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype; my $TOTAL = DBIx::Class::SQLMaker::LimitDialects->__total_bindtype; diff --git a/t/sqlmaker/limit_dialects/rno.t b/t/sqlmaker/limit_dialects/rno.t index 32f67c560..b3177926e 100644 --- a/t/sqlmaker/limit_dialects/rno.t +++ b/t/sqlmaker/limit_dialects/rno.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($TOTAL, $OFFSET) = ( diff --git a/t/sqlmaker/limit_dialects/rownum.t b/t/sqlmaker/limit_dialects/rownum.t index 6985c23ff..806bba493 100644 --- a/t/sqlmaker/limit_dialects/rownum.t +++ b/t/sqlmaker/limit_dialects/rownum.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($TOTAL, $OFFSET, $ROWS) = ( diff --git a/t/sqlmaker/limit_dialects/skip_first.t b/t/sqlmaker/limit_dialects/skip_first.t index ba2d8cfa7..a87b95e43 100644 --- a/t/sqlmaker/limit_dialects/skip_first.t +++ b/t/sqlmaker/limit_dialects/skip_first.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; my ($LIMIT, $OFFSET) = ( diff --git a/t/sqlmaker/limit_dialects/toplimit.t b/t/sqlmaker/limit_dialects/toplimit.t index 88c99a63f..3fb03d9eb 100644 --- a/t/sqlmaker/limit_dialects/toplimit.t +++ b/t/sqlmaker/limit_dialects/toplimit.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema; diff --git a/t/sqlmaker/limit_dialects/torture.t b/t/sqlmaker/limit_dialects/torture.t index 74e60a2f9..c14ea60cf 100644 --- a/t/sqlmaker/limit_dialects/torture.t +++ b/t/sqlmaker/limit_dialects/torture.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use Storable 'dclone'; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema; my $native_limit_dialect = $schema->storage->sql_maker->{limit_dialect}; diff --git a/t/sqlmaker/msaccess.t b/t/sqlmaker/msaccess.t index 2805d0309..0333cb28c 100644 --- a/t/sqlmaker/msaccess.t +++ b/t/sqlmaker/msaccess.t @@ -2,8 +2,7 @@ use strict; use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; # the entire point of the subclass is that parenthesis have to be # just right for ACCESS to be happy diff --git a/t/sqlmaker/mysql.t b/t/sqlmaker/mysql.t index 5b3f33039..6fb1526f8 100644 --- a/t/sqlmaker/mysql.t +++ b/t/sqlmaker/mysql.t @@ -4,9 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBICTest::Schema; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest::Schema->connect (DBICTest->_database, { quote_char => '`' }); # cheat diff --git a/t/sqlmaker/nest_deprec.t b/t/sqlmaker/nest_deprec.t index f00443ad1..a6edeeec3 100644 --- a/t/sqlmaker/nest_deprec.t +++ b/t/sqlmaker/nest_deprec.t @@ -5,8 +5,7 @@ use Test::More; use Test::Warn; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/sqlmaker/oracle.t b/t/sqlmaker/oracle.t index 69234f94a..b47460022 100644 --- a/t/sqlmaker/oracle.t +++ b/t/sqlmaker/oracle.t @@ -11,8 +11,7 @@ BEGIN { use Test::Exception; use Data::Dumper::Concise; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::Oracle; # diff --git a/t/sqlmaker/oraclejoin.t b/t/sqlmaker/oraclejoin.t index 3ba82abcc..c1725e053 100644 --- a/t/sqlmaker/oraclejoin.t +++ b/t/sqlmaker/oraclejoin.t @@ -10,9 +10,8 @@ BEGIN { } use lib qw(t/lib); -use DBICTest; +use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::OracleJoins; -use DBIC::SqlMakerTest; my $sa = DBIx::Class::SQLMaker::OracleJoins->new; diff --git a/t/sqlmaker/order_by_bindtransport.t b/t/sqlmaker/order_by_bindtransport.t index b612375f4..24da80ed9 100644 --- a/t/sqlmaker/order_by_bindtransport.t +++ b/t/sqlmaker/order_by_bindtransport.t @@ -5,8 +5,7 @@ use Test::More; use Test::Exception; use Data::Dumper::Concise; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; sub test_order { my $rs = shift; diff --git a/t/sqlmaker/order_by_func.t b/t/sqlmaker/order_by_func.t index 51968ed09..96092195b 100644 --- a/t/sqlmaker/order_by_func.t +++ b/t/sqlmaker/order_by_func.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); diff --git a/t/sqlmaker/quotes.t b/t/sqlmaker/quotes.t index d3a8c8f93..4a5357b7d 100644 --- a/t/sqlmaker/quotes.t +++ b/t/sqlmaker/quotes.t @@ -4,8 +4,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema( no_deploy => 1 ); diff --git a/t/sqlmaker/sqlite.t b/t/sqlmaker/sqlite.t index 86fcc82d9..9c0b904d4 100644 --- a/t/sqlmaker/sqlite.t +++ b/t/sqlmaker/sqlite.t @@ -3,8 +3,7 @@ use warnings; use Test::More; use lib qw(t/lib); -use DBICTest; -use DBIC::SqlMakerTest; +use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema; diff --git a/t/storage/debug.t b/t/storage/debug.t index 514b43bd7..d16e1292e 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -6,7 +6,6 @@ use Test::More; use Test::Exception; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; use Path::Class qw/file/; BEGIN { delete @ENV{qw(DBIC_TRACE DBIC_TRACE_PROFILE DBICTEST_SQLITE_USE_FILE)} } diff --git a/t/storage/ping_count.t b/t/storage/ping_count.t index a17c3823b..28af647fb 100644 --- a/t/storage/ping_count.t +++ b/t/storage/ping_count.t @@ -4,7 +4,6 @@ use warnings; use Test::More; use lib qw(t/lib); use DBICTest; -use DBIC::SqlMakerTest; my $ping_count = 0; From 12270263042d2650cca1b9693cdab83be7099f9a Mon Sep 17 00:00:00 2001 From: Karen Etheridge Date: Tue, 27 May 2014 16:09:20 -0700 Subject: [PATCH 034/548] spelling fix --- lib/DBIx/Class/ResultSource.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 8cc409ded..8e8da7f75 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -639,7 +639,7 @@ sub _pri_cols_or_die { } # same as above but mandating single-column PK (used by relationship condition -# inferrence) +# inference) sub _single_pri_col_or_die { my $self = shift; my ($pri, @too_many) = $self->_pri_cols_or_die; From b8a270548277cf47dbe171d66e9f1352e5d1dc0e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 28 May 2014 01:21:13 +0200 Subject: [PATCH 035/548] Use the correct $PROGRAM_NAME for the initial dbicadmin POD gneration --- maint/Makefile.PL.inc/53_autogen_pod.pl | 1 + 1 file changed, 1 insertion(+) diff --git a/maint/Makefile.PL.inc/53_autogen_pod.pl b/maint/Makefile.PL.inc/53_autogen_pod.pl index ec6c1a182..fb222eea0 100644 --- a/maint/Makefile.PL.inc/53_autogen_pod.pl +++ b/maint/Makefile.PL.inc/53_autogen_pod.pl @@ -56,6 +56,7 @@ my $great_success; { local @ARGV = ('--documentation-as-pod', $pod_fn); + local $0 = 'dbicadmin'; local *CORE::GLOBAL::exit = sub { $great_success++; die; }; do 'script/dbicadmin'; } From 3705e3b2801ea6a8f770b6f0c528b119bea92fe9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 4 Jun 2014 05:30:26 +0200 Subject: [PATCH 036/548] Consolidate handling of "is this a literal" and "is this a value" In the process fix inability of IC to deal with \[], and simplify the overal codepath bind value passing codepath Aside from the bugfixes there should be no functional changes Work inspired by a report and preliminary patch from dim0xff++ --- Changes | 6 +++ lib/DBIx/Class.pm | 2 + lib/DBIx/Class/InflateColumn.pm | 46 ++++++++++++++------- lib/DBIx/Class/ResultSet.pm | 16 +++----- lib/DBIx/Class/ResultSource.pm | 5 +-- lib/DBIx/Class/Row.pm | 8 ++++ lib/DBIx/Class/SQLMaker.pm | 1 - lib/DBIx/Class/Storage/DBI.pm | 38 ++++-------------- lib/DBIx/Class/Storage/DBI/SQLite.pm | 4 +- lib/DBIx/Class/Storage/DBIHacks.pm | 13 ++---- lib/DBIx/Class/_Util.pm | 45 ++++++++++++++++++++- t/100populate.t | 2 +- t/inflate/datetime.t | 48 ++++++++++++++++++++++ t/internals/is_plain_value.t | 60 ++++++++++++++++++++++++++++ 14 files changed, 222 insertions(+), 72 deletions(-) create mode 100644 t/internals/is_plain_value.t diff --git a/Changes b/Changes index bbd5815bf..0b971bd81 100644 --- a/Changes +++ b/Changes @@ -11,9 +11,15 @@ Revision history for DBIx::Class up by create() and populate() - Ensure definitive condition extractor handles bizarre corner cases without bombing out (RT#93244) + - Fix set_inflated_column incorrectly handling \[] literals (GH#44) + - Ensure that setting a column to a literal invariably marks it dirty - Fix inability to handle multiple consecutive transactions with savepoints on DBD::SQLite < 1.39 + * Misc + - Stop explicitly stringifying objects before passing them to DBI, + instead assume that the DBI/DBD combo will take care of things + 0.08270 2014-01-30 21:54 (PST) * Fixes - Fix 0.08260 regression in DBD::SQLite bound int handling. Inserted diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index ac9f58129..85450ac43 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -356,6 +356,8 @@ debolaz: Anders Nor Berle dew: Dan Thomas +dim0xff: Dmitry Latin + dkubb: Dan Kubb dnm: Justin Wheeler diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index 9214582a4..e9cc41707 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -3,7 +3,9 @@ package DBIx::Class::InflateColumn; use strict; use warnings; -use base qw/DBIx::Class::Row/; +use base 'DBIx::Class::Row'; +use DBIx::Class::_Util 'is_literal_value'; +use namespace::clean; =head1 NAME @@ -104,24 +106,36 @@ sub inflate_column { sub _inflated_column { my ($self, $col, $value) = @_; return $value unless defined $value; # NULL is NULL is NULL + my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); + return $value unless exists $info->{_inflate_info}; + my $inflate = $info->{_inflate_info}{inflate}; $self->throw_exception("No inflator for $col") unless defined $inflate; + return $inflate->($value, $self); } sub _deflated_column { my ($self, $col, $value) = @_; -# return $value unless ref $value && blessed($value); # If it's not an object, don't touch it - ## Leave scalar refs (ala SQL::Abstract literal SQL), untouched, deflate all other refs - return $value unless (ref $value && ref($value) ne 'SCALAR'); + + ## Deflate any refs except for literals, pass through plain values + return $value if ( + ! length ref $value + or + is_literal_value($value) + ); + my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); + return $value unless exists $info->{_inflate_info}; + my $deflate = $info->{_inflate_info}{deflate}; $self->throw_exception("No deflator for $col") unless defined $deflate; + return $deflate->($value, $self); } @@ -144,7 +158,8 @@ sub get_inflated_column { if exists $self->{_inflated_column}{$col}; my $val = $self->get_column($col); - return $val if ref $val eq 'SCALAR'; #that would be a not-yet-reloaded sclarref update + + return $val if is_literal_value($val); #that would be a not-yet-reloaded literal update return $self->{_inflated_column}{$col} = $self->_inflated_column($col, $val); } @@ -161,8 +176,8 @@ analogous to L. sub set_inflated_column { my ($self, $col, $inflated) = @_; $self->set_column($col, $self->_deflated_column($col, $inflated)); -# if (blessed $inflated) { - if (ref $inflated && ref($inflated) ne 'SCALAR') { + + if (length ref $inflated and ! is_literal_value($inflated) ) { $self->{_inflated_column}{$col} = $inflated; } else { delete $self->{_inflated_column}{$col}; @@ -181,14 +196,17 @@ as dirty. This is directly analogous to L. sub store_inflated_column { my ($self, $col, $inflated) = @_; -# unless (blessed $inflated) { - unless (ref $inflated && ref($inflated) ne 'SCALAR') { - delete $self->{_inflated_column}{$col}; - $self->store_column($col => $inflated); - return $inflated; + + if (is_literal_value($inflated)) { + delete $self->{_inflated_column}{$col}; + $self->store_column($col => $inflated); } - delete $self->{_column_data}{$col}; - return $self->{_inflated_column}{$col} = $inflated; + else { + delete $self->{_column_data}{$col}; + $self->{_inflated_column}{$col} = $inflated; + } + + return $inflated; } =head1 SEE ALSO diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 292dbc366..e2c87ddd6 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -6,7 +6,9 @@ use base qw/DBIx::Class/; use DBIx::Class::Carp; use DBIx::Class::ResultSetColumn; use Scalar::Util qw/blessed weaken reftype/; -use DBIx::Class::_Util 'fail_on_internal_wantarray'; +use DBIx::Class::_Util qw( + fail_on_internal_wantarray is_plain_value is_literal_value +); use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture @@ -2446,19 +2448,11 @@ sub _merge_with_rscond { for my $c (keys %$implied) { my $v = $implied->{$c}; - if ( - ! ref $v - or - overload::Method($v, '""') - ) { + if ( ! length ref $v or is_plain_value($v) ) { $new_data{$c} = $v; } elsif ( - ref $v eq 'HASH' and keys %$v == 1 and exists $v->{'='} and ( - ref $v->{'='} eq 'SCALAR' - or - ( ref $v->{'='} eq 'REF' and ref ${$v->{'='}} eq 'ARRAY' ) - ) + ref $v eq 'HASH' and keys %$v == 1 and exists $v->{'='} and is_literal_value($v->{'='}) ) { $new_data{$c} = $v->{'='}; } diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 8e8da7f75..e1ebbf723 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -9,6 +9,7 @@ use DBIx::Class::ResultSet; use DBIx::Class::ResultSourceHandle; use DBIx::Class::Carp; +use DBIx::Class::_Util 'is_literal_value'; use Devel::GlobalDestruction; use Try::Tiny; use List::Util 'first'; @@ -1741,9 +1742,7 @@ sub _resolve_condition { if ( ref $joinfree_cond->{$c} and - ref $joinfree_cond->{$c} ne 'SCALAR' - and - ref $joinfree_cond->{$c} ne 'REF' + ! is_literal_value( $joinfree_cond->{$c} ) ) { push @$cond_cols, $colname; next; diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index ce08fbd9b..05d19a5b6 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -9,6 +9,7 @@ use Scalar::Util 'blessed'; use List::Util 'first'; use Try::Tiny; use DBIx::Class::Carp; +use DBIx::Class::_Util 'is_literal_value'; ### ### Internal method @@ -985,6 +986,13 @@ sub _eq_column_values { elsif (not defined $old) { # both undef return 1; } + elsif ( + is_literal_value $old + or + is_literal_value $new + ) { + return 0; + } elsif ($old eq $new) { return 1; } diff --git a/lib/DBIx/Class/SQLMaker.pm b/lib/DBIx/Class/SQLMaker.pm index 319d3fb35..5b5181f18 100644 --- a/lib/DBIx/Class/SQLMaker.pm +++ b/lib/DBIx/Class/SQLMaker.pm @@ -270,7 +270,6 @@ sub _recurse_fields { return ($select, @rhs_bind); } - # Is the second check absolutely necessary? elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) { return @{$$fields}; } diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 75c843486..a213c9588 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -13,8 +13,8 @@ use List::Util qw/first/; use Sub::Name 'subname'; use Context::Preserve 'preserve_context'; use Try::Tiny; -use overload (); use Data::Compare (); # no imports!!! guard against insane architecture +use DBIx::Class::_Util qw(is_plain_value is_literal_value); use namespace::clean; # default cursor class, overridable in connect_info attributes @@ -1741,7 +1741,7 @@ sub _resolve_bindattrs { and length ref $resolved->[1] and - ! overload::Method($resolved->[1], '""') + ! is_plain_value $resolved->[1] ) { require Data::Dumper; local $Data::Dumper::Maxdepth = 1; @@ -1895,15 +1895,9 @@ sub _bind_sth_params { ); } else { - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - my $v = ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') ) - ? "$bind->[$i][1]" - : $bind->[$i][1] - ; $sth->bind_param( $i + 1, - $v, + $bind->[$i][1], $bind_attrs->[$i], ); } @@ -1923,9 +1917,7 @@ sub _prefetch_autovalues { ( ! exists $to_insert->{$col} or - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') + is_literal_value($to_insert->{$col}) ) ) { $values{$col} = $self->_sequence_fetch( @@ -1962,11 +1954,9 @@ sub insert { } # nothing to retrieve when explicit values are supplied - next if (defined $to_insert->{$col} and ! ( - ref $to_insert->{$col} eq 'SCALAR' - or - (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY') - )); + next if ( + defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col}) + ); # the 'scalar keys' is a trick to preserve the ->columns declaration order $retrieve_cols{$col} = scalar keys %retrieve_cols if ( @@ -2046,18 +2036,6 @@ sub insert_bulk { my @col_range = (0..$#$cols); - # FIXME SUBOPTIMAL - most likely this is not necessary at all - # confirm with dbi-dev whether explicit stringification is needed - # - # forcibly stringify whatever is stringifiable - # ResultSet::populate() hands us a copy - safe to mangle - for my $r (0 .. $#$data) { - for my $c (0 .. $#{$data->[$r]}) { - $data->[$r][$c] = "$data->[$r][$c]" - if ( length ref $data->[$r][$c] and overload::Method($data->[$r][$c], '""') ); - } - } - my $colinfos = $source->columns_info($cols); local $self->{_autoinc_supplied_for_op} = @@ -2184,7 +2162,7 @@ sub insert_bulk { } } elsif (! defined $value_type_by_col_idx->{$col_idx} ) { # regular non-literal value - if (ref $val eq 'SCALAR' or (ref $val eq 'REF' and ref $$val eq 'ARRAY') ) { + if (is_literal_value($val)) { $bad_slice_report_cref->("Literal SQL found where a plain bind value is expected", $row_idx, $col_idx); } } diff --git a/lib/DBIx/Class/Storage/DBI/SQLite.pm b/lib/DBIx/Class/Storage/DBI/SQLite.pm index 2778dbdfe..3024e890d 100644 --- a/lib/DBIx/Class/Storage/DBI/SQLite.pm +++ b/lib/DBIx/Class/Storage/DBI/SQLite.pm @@ -6,7 +6,7 @@ use warnings; use base qw/DBIx::Class::Storage::DBI/; use mro 'c3'; -use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer); +use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer is_plain_value); use DBIx::Class::Carp; use Try::Tiny; use namespace::clean; @@ -326,7 +326,7 @@ sub _dbi_attrs_for_bind { for my $i (0.. $#$bindattrs) { - $stringifiable++ if ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') ); + $stringifiable++ if ( length ref $bind->[$i][1] and is_plain_value($bind->[$i][1]) ); if ( defined $bindattrs->[$i] diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index aa1128615..7a3587d89 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -16,6 +16,7 @@ use mro 'c3'; use List::Util 'first'; use Scalar::Util 'blessed'; use Sub::Name 'subname'; +use DBIx::Class::_Util qw(is_plain_value is_literal_value); use namespace::clean; # @@ -1142,7 +1143,7 @@ sub _collapse_cond_unroll_pairs { my ($l, $r) = %$p; - push @conds, ( ! ref $r or overload::Method($r, '""' ) ) + push @conds, ( ! length ref $r or is_plain_value($r) ) ? { $l => $r } : { $l => { '=' => $r } } ; @@ -1204,16 +1205,10 @@ sub _extract_fixed_condition_columns { for my $c (keys %$where_hash) { if (defined (my $v = $where_hash->{$c}) ) { if ( - ! ref $v + ! length ref $v or (ref $v eq 'HASH' and keys %$v == 1 and defined $v->{'='} and ( - ! ref $v->{'='} - or - ref $v->{'='} eq 'SCALAR' - or - ( ref $v->{'='} eq 'REF' and ref ${$v->{'='}} eq 'ARRAY' ) - or - overload::Method($v->{'='}, '""') + is_literal_value($v->{'='}) or is_plain_value( $v->{'='}) )) ) { $res->{$c} = 1; diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 518457cd3..1407ddcbd 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -55,9 +55,15 @@ use DBIx::Class::Carp '^DBIx::Class|^DBICTest'; use Carp 'croak'; use Scalar::Util qw(weaken blessed reftype); +use List::Util qw(first); +use overload (); use base 'Exporter'; -our @EXPORT_OK = qw(sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray refcount hrefaddr is_exception); +our @EXPORT_OK = qw( + sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray + refcount hrefaddr is_exception + is_plain_value is_literal_value +); sub sigwarn_silencer ($) { my $pattern = shift; @@ -153,6 +159,43 @@ sub modver_gt_or_eq ($$) { eval { $mod->VERSION($ver) } ? 1 : 0; } +sub is_literal_value ($) { + ( + ref $_[0] eq 'SCALAR' + or + ( ref $_[0] eq 'REF' and ref ${$_[0]} eq 'ARRAY' ) + ) ? 1 : 0; +} + +# FIXME XSify - this can be done so much more efficiently +sub is_plain_value ($) { + no strict 'refs'; + ( + # plain scalar + (! length ref $_[0]) + or + ( + blessed $_[0] + and + # deliberately not using Devel::OverloadInfo - the checks we are + # intersted in are much more limited than the fullblown thing, and + # this is a relatively hot piece of code + ( + # either has stringification which DBI prefers out of the box + #first { *{$_ . '::(""'}{CODE} } @{ mro::get_linear_isa( ref $_[0] ) } + overload::Method($_[0], '""') + or + # has nummification and fallback is *not* disabled + ( + $_[1] = first { *{"${_}::(0+"}{CODE} } @{ mro::get_linear_isa( ref $_[0] ) } + and + ( ! defined ${"$_[1]::()"} or ${"$_[1]::()"} ) + ) + ) + ) + ) ? 1 : 0; +} + { my $list_ctx_ok_stack_marker; diff --git a/t/100populate.t b/t/100populate.t index 4a3f0ac7a..27eb3effc 100644 --- a/t/100populate.t +++ b/t/100populate.t @@ -416,7 +416,7 @@ warnings_like { ) ? () # one unique for populate() and create() each - : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 2 + : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 3 ], 'Data integrity warnings as planned'; lives_ok { diff --git a/t/inflate/datetime.t b/t/inflate/datetime.t index 7062563bb..f0c610299 100644 --- a/t/inflate/datetime.t +++ b/t/inflate/datetime.t @@ -98,4 +98,52 @@ is("$varchar_datetime", '2006-05-22T19:05:07', 'Correct date/time'); my $skip_inflation = $event->skip_inflation; is ("$skip_inflation", '2006-04-21 18:04:06', 'Correct date/time'); +# create and update with literals +{ + my $d = { + created_on => \ '2001-09-11', + starts_at => \[ '?' => '2001-10-26' ], + }; + + my $ev = $schema->resultset('Event')->create($d); + + for my $col (qw(created_on starts_at)) { + ok (ref $ev->$col, "literal untouched in $col"); + is_deeply( $ev->$col, $d->{$col}); + is_deeply( $ev->get_inflated_column($col), $d->{$col}); + is_deeply( $ev->get_column($col), $d->{$col}); + } + + $ev->discard_changes; + + is_deeply( + { $ev->get_dirty_columns }, + {} + ); + + for my $col (qw(created_on starts_at)) { + isa_ok ($ev->$col, "DateTime", "$col properly inflated on retrieve"); + } + + for my $meth (qw(set_inflated_columns set_columns)) { + + $ev->$meth({%$d}); + + is_deeply( + { $ev->get_dirty_columns }, + $d, + "Expected dirty cols after setting literals via $meth", + ); + + $ev->update; + + for my $col (qw(created_on starts_at)) { + ok (ref $ev->$col, "literal untouched in $col updated via $meth"); + is_deeply( $ev->$col, $d->{$col}); + is_deeply( $ev->get_inflated_column($col), $d->{$col}); + is_deeply( $ev->get_column($col), $d->{$col}); + } + } +} + done_testing; diff --git a/t/internals/is_plain_value.t b/t/internals/is_plain_value.t new file mode 100644 index 000000000..81fe902bd --- /dev/null +++ b/t/internals/is_plain_value.t @@ -0,0 +1,60 @@ +use warnings; +use strict; + +use Test::More; +use Test::Warn; + +use lib qw(t/lib); +use DBICTest; + +use DBIx::Class::_Util 'is_plain_value'; + +{ + package # hideee + DBICTest::SillyInt; + + use overload + # *DELIBERATELY* unspecified + #fallback => 1, + '0+' => sub { ${$_[0]} }, + ; + + + package # hideee + DBICTest::SillyInt::Subclass; + + our @ISA = 'DBICTest::SillyInt'; + + + package # hideee + DBICTest::CrazyInt; + + use overload + '0+' => sub { 666 }, + '""' => sub { 999 }, + fallback => 1, + ; +} + +# check DBI behavior when fed a stringifiable/nummifiable value +{ + my $crazynum = bless {}, 'DBICTest::CrazyInt'; + cmp_ok( $crazynum, '==', 666 ); + cmp_ok( $crazynum, 'eq', 999 ); + + my $schema = DBICTest->init_schema( no_populate => 1 ); + $schema->storage->dbh_do(sub { + $_[1]->do('INSERT INTO artist (name) VALUES (?)', {}, $crazynum ); + }); + + is( $schema->resultset('Artist')->next->name, 999, 'DBI preferred stringified version' ); +} + +# make sure we recognize overloaded stuff properly +{ + my $num = bless( \do { my $foo = 69 }, 'DBICTest::SillyInt::Subclass' ); + ok( is_plain_value $num, 'parent-fallback-provided stringification detected' ); + is("$num", 69, 'test overloaded object stringifies, without specified fallback'); +} + +done_testing; From 7638636bdb0c040452faa52d290ea65979f321d2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 4 Jun 2014 02:17:20 +0200 Subject: [PATCH 037/548] Fix incorrect handling of stringifiable objects in cond collapser Thinko introduced in the recent 8d005ad9 --- lib/DBIx/Class/Storage/DBIHacks.pm | 14 +++++++++++--- t/sqlmaker/dbihacks_internals.t | 26 ++++++++++++++++++++------ 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 7a3587d89..3164e5a4f 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1207,9 +1207,17 @@ sub _extract_fixed_condition_columns { if ( ! length ref $v or - (ref $v eq 'HASH' and keys %$v == 1 and defined $v->{'='} and ( - is_literal_value($v->{'='}) or is_plain_value( $v->{'='}) - )) + is_plain_value ($v) + or + ( + ref $v eq 'HASH' + and + keys %$v == 1 + and + ref $v->{'='} + and + is_literal_value($v->{'='}) + ) ) { $res->{$c} = 1; } diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index de5e49e81..81deb892e 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -11,6 +11,20 @@ use Data::Dumper; my $schema = DBICTest->init_schema( no_deploy => 1); my $sm = $schema->storage->sql_maker; +{ + package # hideee + DBICTest::SillyInt; + + use overload + fallback => 1, + '0+' => sub { ${$_[0]} }, + ; +} +my $num = bless( \do { my $foo = 69 }, 'DBICTest::SillyInt' ); + +is($num, 69, 'test overloaded object is "sane"'); +is("$num", 69, 'test overloaded object is "sane"'); + for my $t ( { where => { artistid => 1, charfield => undef }, @@ -49,14 +63,14 @@ for my $t ( efcc_result => [], }, { - where => { -and => [ \'foo=bar', [ { artistid => { '=', 3 } } ], { name => 'Caterwauler McCrae'} ] }, - cc_result => { '' => \'foo=bar', name => 'Caterwauler McCrae', artistid => 3 }, + where => { -and => [ \'foo=bar', [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'} ] }, + cc_result => { '' => \'foo=bar', name => 'Caterwauler McCrae', artistid => $num }, sql => 'WHERE foo=bar AND artistid = ? AND name = ?', efcc_result => [qw( artistid name )], }, { - where => { artistid => [ 1 ], rank => [ 13, 2, 3 ], charfield => [ undef ] }, - cc_result => { artistid => 1, charfield => undef, rank => [13, 2, 3] }, + where => { artistid => [ $num ], rank => [ 13, 2, 3 ], charfield => [ undef ] }, + cc_result => { artistid => $num, charfield => undef, rank => [13, 2, 3] }, sql => 'WHERE artistid = ? AND charfield IS NULL AND ( rank = ? OR rank = ? OR rank = ? )', efcc_result => [qw( artistid )], }, @@ -67,8 +81,8 @@ for my $t ( efcc_result => [qw( artistid )], }, { - where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ 1, 2 ] } }, - cc_result => { artistid => 1, charfield => [-and => { '=' => \'1' }, { '=' => \['?',2] } ], rank => { '=' => [1, 2] } }, + where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ $num, $num ] } }, + cc_result => { artistid => 1, charfield => [-and => { '=' => \'1' }, { '=' => \['?',2] } ], rank => { '=' => [$num, $num] } }, sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )', efcc_result => [qw( artistid charfield )], }, From cc8ffd7bfc8a375545f35c8be3c224cf365ee7e7 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 31 May 2014 11:34:29 +0200 Subject: [PATCH 038/548] Call mro reinit in tests only on 5.8 --- t/85utf8.t | 2 +- t/cdbi/68-inflate_has_a.t | 2 +- t/inflate/serialize.t | 2 +- t/row/filter_column.t | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/t/85utf8.t b/t/85utf8.t index 64b49941e..e1f2caef8 100644 --- a/t/85utf8.t +++ b/t/85utf8.t @@ -87,7 +87,7 @@ warnings_like ( my $schema = DBICTest->init_schema(); DBICTest::Schema::CD->load_components('UTF8Columns'); DBICTest::Schema::CD->utf8_columns('title'); -Class::C3->reinitialize(); +Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; # as per http://search.cpan.org/dist/Test-Simple/lib/Test/More.pm#utf8 binmode (Test::More->builder->$_, ':utf8') for qw/output failure_output todo_output/; diff --git a/t/cdbi/68-inflate_has_a.t b/t/cdbi/68-inflate_has_a.t index 04abccb4c..246a31954 100644 --- a/t/cdbi/68-inflate_has_a.t +++ b/t/cdbi/68-inflate_has_a.t @@ -18,7 +18,7 @@ DBICTest::Schema::CD->has_a( 'year', 'DateTime', inflate => sub { DateTime->new( year => shift ) }, deflate => sub { shift->year } ); -Class::C3->reinitialize; +Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO; # inflation test my $cd = $schema->resultset("CD")->find(3); diff --git a/t/inflate/serialize.t b/t/inflate/serialize.t index 30d63ec37..5b1a437bc 100644 --- a/t/inflate/serialize.t +++ b/t/inflate/serialize.t @@ -35,7 +35,7 @@ DBICTest::Schema::Serialized->inflate_column( 'serialized', deflate => $selected->{deflater}, }, ); -Class::C3->reinitialize; +Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO; my $struct_hash = { a => 1, diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 4720575e9..4aa2b2829 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -14,7 +14,7 @@ DBICTest::Schema::Artist->filter_column(rank => { filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 }, filter_to_storage => sub { $to_storage_ran++; $_[1] / 2 }, }); -Class::C3->reinitialize(); +Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; my $artist = $schema->resultset('Artist')->create( { rank => 20 } ); @@ -143,7 +143,7 @@ IC_DIE: { DBICTest::Schema::Artist->filter_column(rank => { filter_to_storage => sub { $to_storage_ran++; $_[1] }, }); -Class::C3->reinitialize(); +Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; ASYMMETRIC_TO_TEST: { # initialise value @@ -169,7 +169,7 @@ ASYMMETRIC_TO_TEST: { DBICTest::Schema::Artist->filter_column(rank => { filter_from_storage => sub { $from_storage_ran++; $_[1] }, }); -Class::C3->reinitialize(); +Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; ASYMMETRIC_FROM_TEST: { # initialise value From 85aee309cf0565da729d43bb0f5677da548ffe34 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 2 Jun 2014 12:20:45 +0200 Subject: [PATCH 039/548] Clarify FC/IC conflict exception --- lib/DBIx/Class/FilterColumn.pm | 5 ++--- lib/DBIx/Class/InflateColumn.pm | 5 ++--- t/row/filter_column.t | 8 ++++---- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index cee647e1e..687c278ad 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -9,9 +9,8 @@ sub filter_column { my $colinfo = $self->column_info($col); - $self->throw_exception('FilterColumn does not work with InflateColumn') - if $self->isa('DBIx::Class::InflateColumn') && - defined $colinfo->{_inflate_info}; + $self->throw_exception("FilterColumn can not be used on a column with a declared InflateColumn inflator") + if defined $colinfo->{_inflate_info} and $self->isa('DBIx::Class::InflateColumn'); $self->throw_exception("No such column $col to filter") unless $self->has_column($col); diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index e9cc41707..23bc4c93e 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -89,9 +89,8 @@ sub inflate_column { my $colinfo = $self->column_info($col); - $self->throw_exception("InflateColumn does not work with FilterColumn") - if $self->isa('DBIx::Class::FilterColumn') && - defined $colinfo->{_filter_info}; + $self->throw_exception("InflateColumn can not be used on a column with a declared FilterColumn filter") + if defined $colinfo->{_filter_info} and $self->isa('DBIx::Class::FilterColumn'); $self->throw_exception("No such column $col to inflate") unless $self->has_column($col); diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 4aa2b2829..0ed8edf49 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -121,22 +121,22 @@ CACHE_TEST: { } IC_DIE: { - dies_ok { + throws_ok { DBICTest::Schema::Artist->inflate_column(rank => { inflate => sub {}, deflate => sub {} } ); - } q(Can't inflate column after filter column); + } qr/InflateColumn can not be used on a column with a declared FilterColumn filter/, q(Can't inflate column after filter column); DBICTest::Schema::Artist->inflate_column(name => { inflate => sub {}, deflate => sub {} } ); - dies_ok { + throws_ok { DBICTest::Schema::Artist->filter_column(name => { filter_to_storage => sub {}, filter_from_storage => sub {} }); - } q(Can't filter column after inflate column); + } qr/FilterColumn can not be used on a column with a declared InflateColumn inflator/, q(Can't filter column after inflate column); } # test when we do not set both filter_from_storage/filter_to_storage From 5ae153d7a25b664e5dc33824a7efd690ac9786b5 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 2 Jun 2014 14:05:45 +0200 Subject: [PATCH 040/548] Rename some variables and reformat the FC/IC codepaths for clarity Zero functional changes --- lib/DBIx/Class/FilterColumn.pm | 42 +++++++++++++++++++-------------- lib/DBIx/Class/InflateColumn.pm | 42 +++++++++++++++++++-------------- lib/DBIx/Class/Row.pm | 33 ++++++++++++++++++-------- t/relationship/core.t | 2 +- 4 files changed, 72 insertions(+), 47 deletions(-) diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index 687c278ad..c48b6c1aa 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -66,13 +66,17 @@ sub get_filtered_column { my $val = $self->get_column($col); - return $self->{_filtered_column}{$col} = $self->_column_from_storage($col, $val); + return $self->{_filtered_column}{$col} = $self->_column_from_storage( + $col, $val + ); } sub get_column { my ($self, $col) = @_; if (exists $self->{_filtered_column}{$col}) { - return $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col}); + return $self->{_column_data}{$col} ||= $self->_column_to_storage ( + $col, $self->{_filtered_column}{$col} + ); } return $self->next::method ($col); @@ -83,8 +87,9 @@ sub get_columns { my $self = shift; foreach my $col (keys %{$self->{_filtered_column}||{}}) { - $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col}) - if exists $self->{_filtered_column}{$col}; + $self->{_column_data}{$col} ||= $self->_column_to_storage ( + $col, $self->{_filtered_column}{$col} + ) if exists $self->{_filtered_column}{$col}; } $self->next::method (@_); @@ -117,36 +122,37 @@ sub set_filtered_column { } sub update { - my ($self, $attrs, @rest) = @_; + my ($self, $data, @rest) = @_; - foreach my $key (keys %{$attrs||{}}) { + foreach my $col (keys %{$data||{}}) { if ( - $self->has_column($key) + $self->has_column($col) && - exists $self->column_info($key)->{_filter_info} + exists $self->column_info($col)->{_filter_info} ) { - $self->set_filtered_column($key, delete $attrs->{$key}); + $self->set_filtered_column($col, delete $data->{$col}); # FIXME update() reaches directly into the object-hash # and we may *not* have a filtered value there - thus # the void-ctx filter-trigger - $self->get_column($key) unless exists $self->{_column_data}{$key}; + $self->get_column($col) unless exists $self->{_column_data}{$col}; } } - return $self->next::method($attrs, @rest); + return $self->next::method($data, @rest); } sub new { - my ($class, $attrs, @rest) = @_; - my $source = $attrs->{-result_source} + my ($class, $data, @rest) = @_; + my $source = $data->{-result_source} or $class->throw_exception('Sourceless rows are not supported with DBIx::Class::FilterColumn'); - my $obj = $class->next::method($attrs, @rest); - foreach my $key (keys %{$attrs||{}}) { - if ($obj->has_column($key) && - exists $obj->column_info($key)->{_filter_info} ) { - $obj->set_filtered_column($key, $attrs->{$key}); + my $obj = $class->next::method($data, @rest); + + foreach my $col (keys %{$data||{}}) { + if ($obj->has_column($col) && + exists $obj->column_info($col)->{_filter_info} ) { + $obj->set_filtered_column($col, $data->{$col}); } } diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index 23bc4c93e..d84af86b1 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -111,10 +111,11 @@ sub _inflated_column { return $value unless exists $info->{_inflate_info}; - my $inflate = $info->{_inflate_info}{inflate}; - $self->throw_exception("No inflator for $col") unless defined $inflate; - - return $inflate->($value, $self); + return ( + $info->{_inflate_info}{inflate} + || + $self->throw_exception("No inflator found for '$col'") + )->($value, $self); } sub _deflated_column { @@ -132,10 +133,11 @@ sub _deflated_column { return $value unless exists $info->{_inflate_info}; - my $deflate = $info->{_inflate_info}{deflate}; - $self->throw_exception("No deflator for $col") unless defined $deflate; - - return $deflate->($value, $self); + return ( + $info->{_inflate_info}{deflate} + || + $self->throw_exception("No deflator found for '$col'") + )->($value, $self); } =head2 get_inflated_column @@ -151,8 +153,11 @@ Throws an exception if the column requested is not an inflated column. sub get_inflated_column { my ($self, $col) = @_; + $self->throw_exception("$col is not an inflated column") unless exists $self->column_info($col)->{_inflate_info}; + + # we take care of keeping things in sync return $self->{_inflated_column}{$col} if exists $self->{_inflated_column}{$col}; @@ -173,15 +178,16 @@ analogous to L. =cut sub set_inflated_column { - my ($self, $col, $inflated) = @_; - $self->set_column($col, $self->_deflated_column($col, $inflated)); + my ($self, $col, $value) = @_; + + $self->set_column($col, $self->_deflated_column($col, $value)); - if (length ref $inflated and ! is_literal_value($inflated) ) { - $self->{_inflated_column}{$col} = $inflated; + if (length ref $value and ! is_literal_value($value) ) { + $self->{_inflated_column}{$col} = $value; } else { delete $self->{_inflated_column}{$col}; } - return $inflated; + return $value; } =head2 store_inflated_column @@ -194,18 +200,18 @@ as dirty. This is directly analogous to L. =cut sub store_inflated_column { - my ($self, $col, $inflated) = @_; + my ($self, $col, $value) = @_; - if (is_literal_value($inflated)) { + if (is_literal_value($value)) { delete $self->{_inflated_column}{$col}; - $self->store_column($col => $inflated); + $self->store_column($col => $value); } else { delete $self->{_column_data}{$col}; - $self->{_inflated_column}{$col} = $inflated; + $self->{_inflated_column}{$col} = $value; } - return $inflated; + return $value; } =head1 SEE ALSO diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 05d19a5b6..9127c942e 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -662,12 +662,20 @@ To retrieve all loaded column values as a hash, use L. sub get_column { my ($self, $column) = @_; $self->throw_exception( "Can't fetch data as class method" ) unless ref $self; - return $self->{_column_data}{$column} if exists $self->{_column_data}{$column}; + + return $self->{_column_data}{$column} + if exists $self->{_column_data}{$column}; + if (exists $self->{_inflated_column}{$column}) { - return $self->store_column($column, - $self->_deflated_column($column, $self->{_inflated_column}{$column})); + # deflate+return cycle + return $self->store_column($column, $self->_deflated_column( + $column, $self->{_inflated_column}{$column} + )); } - $self->throw_exception( "No such column '${column}'" ) unless $self->has_column($column); + + $self->throw_exception( "No such column '${column}'" ) + unless $self->has_column($column); + return undef; } @@ -693,8 +701,12 @@ database (or set locally). sub has_column_loaded { my ($self, $column) = @_; $self->throw_exception( "Can't call has_column data as class method" ) unless ref $self; - return 1 if exists $self->{_inflated_column}{$column}; - return exists $self->{_column_data}{$column}; + + return ( + exists $self->{_inflated_column}{$column} + or + exists $self->{_column_data}{$column} + ) ? 1 : 0; } =head2 get_columns @@ -719,6 +731,7 @@ See L to get the inflated values. sub get_columns { my $self = shift; if (exists $self->{_inflated_column}) { + # deflate cycle for each inflation, including filter rels foreach my $col (keys %{$self->{_inflated_column}}) { unless (exists $self->{_column_data}{$col}) { @@ -920,15 +933,14 @@ sub set_column { my ($self, $column, $new_value) = @_; my $had_value = $self->has_column_loaded($column); - my ($old_value, $in_storage) = ($self->get_column($column), $self->in_storage) - if $had_value; + my $old_value = $self->get_column($column); $new_value = $self->store_column($column, $new_value); my $dirty = $self->{_dirty_columns}{$column} || - $in_storage # no point tracking dirtyness on uninserted data + $self->in_storage # no point tracking dirtyness on uninserted data ? ! $self->_eq_column_values ($column, $old_value, $new_value) : 1 ; @@ -963,7 +975,7 @@ sub set_column { $had_value and # no storage - no storage-value - $in_storage + $self->in_storage and # no value already stored (multiple changes before commit to storage) ! exists $self->{_column_data_in_storage}{$column} @@ -1074,6 +1086,7 @@ sub set_inflated_columns { if (ref $upd->{$key}) { my $info = $self->relationship_info($key); my $acc_type = $info->{attrs}{accessor} || ''; + if ($acc_type eq 'single') { my $rel_obj = delete $upd->{$key}; $self->set_from_related($key => $rel_obj); diff --git a/t/relationship/core.t b/t/relationship/core.t index 46e655c3e..c611142ec 100644 --- a/t/relationship/core.t +++ b/t/relationship/core.t @@ -227,7 +227,7 @@ is( $twokey->fourkeys_to_twokeys->count, 0, my $undef_artist_cd = $schema->resultset("CD")->new_result({ 'title' => 'badgers', 'year' => 2007 }); -is($undef_artist_cd->has_column_loaded('artist'), '', 'FK not loaded'); +ok(! $undef_artist_cd->has_column_loaded('artist'), 'FK not loaded'); is($undef_artist_cd->search_related('artist')->count, 0, '0=1 search when FK does not exist and object not yet in db'); lives_ok { $undef_artist_cd->related_resultset('artist')->new({name => 'foo'}); From b342451e92ea65394993accbf0a79cc53e2b4dc6 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 4 Jun 2014 13:28:42 +0200 Subject: [PATCH 041/548] Reorganize IC column setters Should again result in no functional changes --- lib/DBIx/Class/InflateColumn.pm | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index d84af86b1..3236b1cb3 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -104,7 +104,12 @@ sub inflate_column { sub _inflated_column { my ($self, $col, $value) = @_; - return $value unless defined $value; # NULL is NULL is NULL + + return $value if ( + ! defined $value # NULL is NULL is NULL + or + is_literal_value($value) #that would be a not-yet-reloaded literal update + ); my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); @@ -163,8 +168,6 @@ sub get_inflated_column { my $val = $self->get_column($col); - return $val if is_literal_value($val); #that would be a not-yet-reloaded literal update - return $self->{_inflated_column}{$col} = $self->_inflated_column($col, $val); } @@ -180,13 +183,19 @@ analogous to L. sub set_inflated_column { my ($self, $col, $value) = @_; - $self->set_column($col, $self->_deflated_column($col, $value)); - - if (length ref $value and ! is_literal_value($value) ) { - $self->{_inflated_column}{$col} = $value; - } else { + # pass through deflated stuff + if (! length ref $value or is_literal_value($value)) { + $self->set_column($col, $value); delete $self->{_inflated_column}{$col}; } + # need to call set_column with the deflate cycle so that + # relationship caches are nuked if any + # also does the compare-for-dirtyness and change tracking dance + else { + $self->set_column($col, $self->_deflated_column($col, $value)); + $self->{_inflated_column}{$col} = $value; + } + return $value; } @@ -202,7 +211,7 @@ as dirty. This is directly analogous to L. sub store_inflated_column { my ($self, $col, $value) = @_; - if (is_literal_value($value)) { + if (! length ref $value or is_literal_value($value)) { delete $self->{_inflated_column}{$col}; $self->store_column($col => $value); } From dc6dadae6312ce97e6d85c343805ce940671d6e9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 4 Jun 2014 13:55:30 +0200 Subject: [PATCH 042/548] A number of equivalent-logic ::FC refactors Stop invoking a comparison on the unfiltered values - delegate it properly to set_column() --- lib/DBIx/Class/FilterColumn.pm | 40 ++++++++++++++++++++++------------ t/row/filter_column.t | 22 ++++++++++++++++++- xt/podcoverage.t | 1 + 3 files changed, 48 insertions(+), 15 deletions(-) diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index c48b6c1aa..6d7e48c97 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -73,6 +73,7 @@ sub get_filtered_column { sub get_column { my ($self, $col) = @_; + if (exists $self->{_filtered_column}{$col}) { return $self->{_column_data}{$col} ||= $self->_column_to_storage ( $col, $self->{_filtered_column}{$col} @@ -86,11 +87,12 @@ sub get_column { sub get_columns { my $self = shift; - foreach my $col (keys %{$self->{_filtered_column}||{}}) { - $self->{_column_data}{$col} ||= $self->_column_to_storage ( - $col, $self->{_filtered_column}{$col} - ) if exists $self->{_filtered_column}{$col}; - } + $self->{_column_data}{$_} = $self->_column_to_storage ( + $_, $self->{_filtered_column}{$_} + ) for grep + { ! exists $self->{_column_data}{$_} } + keys %{$self->{_filtered_column}||{}} + ; $self->next::method (@_); } @@ -104,19 +106,29 @@ sub store_column { $self->next::method(@_); } +sub has_column_loaded { + my ($self, $col) = @_; + return 1 if exists $self->{_filtered_column}{$col}; + return $self->next::method($col); +} + sub set_filtered_column { my ($self, $col, $filtered) = @_; - # do not blow up the cache via set_column unless necessary - # (filtering may be expensive!) - if (exists $self->{_filtered_column}{$col}) { - return $filtered - if ($self->_eq_column_values ($col, $filtered, $self->{_filtered_column}{$col} ) ); - - $self->make_column_dirty ($col); # so the comparison won't run again + # unlike IC, FC does not need to deal with the 'filter' abomination + # thus we can short-curcuit filtering entirely and never call set_column + # in case this is already a dirty change OR the row never touched storage + if ( + ! $self->in_storage + or + $self->is_column_changed($col) + ) { + $self->make_column_dirty($col); + delete $self->{_column_data}{$col}; } - - $self->set_column($col, $self->_column_to_storage($col, $filtered)); + else { + $self->set_column($col, $self->_column_to_storage($col, $filtered)); + }; return $self->{_filtered_column}{$col} = $filtered; } diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 0ed8edf49..fb8dd00e1 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -111,7 +111,7 @@ CACHE_TEST: { ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same accessor-set value'); is ($artist->rank, '6', 'Column set properly'); is $from_storage_ran, $expected_from, 'from did not run'; - is $to_storage_ran, $expected_to, 'to did not run'; + is $to_storage_ran, ++$expected_to, 'to did run once (call in to set_column)'; $artist->store_column(rank => 4); ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on differing store_column value'); @@ -120,6 +120,26 @@ CACHE_TEST: { is $to_storage_ran, $expected_to, 'to did not run'; } +# test in-memory operations +for my $artist_maker ( + sub { $schema->resultset('Artist')->new({ rank => 42 }) }, + sub { my $art = $schema->resultset('Artist')->new({}); $art->rank(42); $art }, +) { + + my $expected_from = $from_storage_ran; + my $expected_to = $to_storage_ran; + + my $artist = $artist_maker->(); + + is $from_storage_ran, $expected_from, 'from has not run yet'; + is $to_storage_ran, $expected_to, 'to has not run yet'; + + ok( ! $artist->has_column_loaded('artistid'), 'pk not loaded' ); + ok( $artist->has_column_loaded('rank'), 'Filtered column marked as loaded under new' ); + is( $artist->rank, 42, 'Proper unfiltered value' ); + is( $artist->get_column('rank'), 21, 'Proper filtered value' ); +} + IC_DIE: { throws_ok { DBICTest::Schema::Artist->inflate_column(rank => diff --git a/xt/podcoverage.t b/xt/podcoverage.t index da4858047..a16a3653a 100644 --- a/xt/podcoverage.t +++ b/xt/podcoverage.t @@ -57,6 +57,7 @@ my $exceptions = { store_column get_column get_columns + has_column_loaded /], }, 'DBIx::Class::ResultSource' => { From a524980e87f8d0063f051a4f949e0a4a20cd4a8f Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 4 Jun 2014 14:53:48 +0200 Subject: [PATCH 043/548] Teach FC about literals --- Changes | 4 ++++ lib/DBIx/Class/FilterColumn.pm | 12 ++++++++++-- t/row/filter_column.t | 27 +++++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/Changes b/Changes index 0b971bd81..d1f8cdd8c 100644 --- a/Changes +++ b/Changes @@ -1,5 +1,9 @@ Revision history for DBIx::Class + * Notable Changes and Deprecations + - DBIC::FilterColumn now properly bypasses \'' and \[] literals, just + like the rest of DBIC + * Fixes - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index 6d7e48c97..6fdf1adee 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -2,7 +2,9 @@ package DBIx::Class::FilterColumn; use strict; use warnings; -use base qw/DBIx::Class::Row/; +use base 'DBIx::Class::Row'; +use DBIx::Class::_Util 'is_literal_value'; +use namespace::clean; sub filter_column { my ($self, $col, $attrs) = @_; @@ -30,7 +32,11 @@ sub filter_column { sub _column_from_storage { my ($self, $col, $value) = @_; - return $value unless defined $value; + return $value if ( + ! defined $value + or + is_literal_value($value) + ); my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); @@ -45,6 +51,8 @@ sub _column_from_storage { sub _column_to_storage { my ($self, $col, $value) = @_; + return $value if is_literal_value($value); + my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); diff --git a/t/row/filter_column.t b/t/row/filter_column.t index fb8dd00e1..785206d6f 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -140,6 +140,33 @@ for my $artist_maker ( is( $artist->get_column('rank'), 21, 'Proper filtered value' ); } +# test literals +for my $v ( \ '16', \[ '?', '16' ] ) { + my $art = $schema->resultset('Artist')->new({ rank => 10 }); + $art->rank($v); + + is_deeply( $art->rank, $v); + is_deeply( $art->get_filtered_column("rank"), $v); + is_deeply( $art->get_column("rank"), $v); + + $art->insert; + $art->discard_changes; + + is ($art->get_column("rank"), 16, "Literal inserted into database properly"); + is ($art->rank, 32, "filtering still works"); + + $art->update({ rank => $v }); + + is_deeply( $art->rank, $v); + is_deeply( $art->get_filtered_column("rank"), $v); + is_deeply( $art->get_column("rank"), $v); + + $art->discard_changes; + + is ($art->get_column("rank"), 16, "Literal inserted into database properly"); + is ($art->rank, 32, "filtering still works"); +} + IC_DIE: { throws_ok { DBICTest::Schema::Artist->inflate_column(rank => From 1bc006cfc0c25ea943851c7c0686eb6a37ef912d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 5 Jun 2014 01:05:31 +0200 Subject: [PATCH 044/548] Use a nullable column for the ::FC test This is just a retarget, no test was actually modified --- t/row/filter_column.t | 135 +++++++++++++++++++++--------------------- 1 file changed, 69 insertions(+), 66 deletions(-) diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 785206d6f..066fbfa80 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -8,65 +8,65 @@ use DBICTest; my $from_storage_ran = 0; my $to_storage_ran = 0; -my $schema = DBICTest->init_schema(); +my $schema = DBICTest->init_schema( no_populate => 1 ); DBICTest::Schema::Artist->load_components(qw(FilterColumn InflateColumn)); -DBICTest::Schema::Artist->filter_column(rank => { +DBICTest::Schema::Artist->filter_column(charfield => { filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 }, filter_to_storage => sub { $to_storage_ran++; $_[1] / 2 }, }); Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; -my $artist = $schema->resultset('Artist')->create( { rank => 20 } ); +my $artist = $schema->resultset('Artist')->create( { charfield => 20 } ); # this should be using the cursor directly, no inflation/processing of any sort -my ($raw_db_rank) = $schema->resultset('Artist') +my ($raw_db_charfield) = $schema->resultset('Artist') ->search ($artist->ident_condition) - ->get_column('rank') + ->get_column('charfield') ->_resultset ->cursor ->next; -is ($raw_db_rank, 10, 'INSERT: correctly unfiltered on insertion'); +is ($raw_db_charfield, 10, 'INSERT: correctly unfiltered on insertion'); for my $reloaded (0, 1) { my $test = $reloaded ? 'reloaded' : 'stored'; $artist->discard_changes if $reloaded; - is( $artist->rank , 20, "got $test filtered rank" ); + is( $artist->charfield , 20, "got $test filtered charfield" ); } $artist->update; $artist->discard_changes; -is( $artist->rank , 20, "got filtered rank" ); +is( $artist->charfield , 20, "got filtered charfield" ); -$artist->update ({ rank => 40 }); -($raw_db_rank) = $schema->resultset('Artist') +$artist->update ({ charfield => 40 }); +($raw_db_charfield) = $schema->resultset('Artist') ->search ($artist->ident_condition) - ->get_column('rank') + ->get_column('charfield') ->_resultset ->cursor ->next; -is ($raw_db_rank, 20, 'UPDATE: correctly unflitered on update'); +is ($raw_db_charfield, 20, 'UPDATE: correctly unflitered on update'); $artist->discard_changes; -$artist->rank(40); -ok( !$artist->is_column_changed('rank'), 'column is not dirty after setting the same value' ); +$artist->charfield(40); +ok( !$artist->is_column_changed('charfield'), 'column is not dirty after setting the same value' ); MC: { my $cd = $schema->resultset('CD')->create({ - artist => { rank => 20 }, + artist => { charfield => 20 }, title => 'fun time city!', year => 'forevertime', }); - ($raw_db_rank) = $schema->resultset('Artist') + ($raw_db_charfield) = $schema->resultset('Artist') ->search ($cd->artist->ident_condition) - ->get_column('rank') + ->get_column('charfield') ->_resultset ->cursor ->next; - is $raw_db_rank, 10, 'artist rank gets correctly unfiltered w/ MC'; - is $cd->artist->rank, 20, 'artist rank gets correctly filtered w/ MC'; + is $raw_db_charfield, 10, 'artist charfield gets correctly unfiltered w/ MC'; + is $cd->artist->charfield, 20, 'artist charfield gets correctly filtered w/ MC'; } CACHE_TEST: { @@ -79,51 +79,51 @@ CACHE_TEST: { is $from_storage_ran, $expected_from, 'from has not run yet'; is $to_storage_ran, $expected_to, 'to has not run yet'; - $artist->rank; + $artist->charfield; cmp_ok ( - $artist->get_filtered_column('rank'), + $artist->get_filtered_column('charfield'), '!=', - $artist->get_column('rank'), + $artist->get_column('charfield'), 'filter/unfilter differ' ); is $from_storage_ran, ++$expected_from, 'from ran once, therefor caches'; is $to_storage_ran, $expected_to, 'to did not run'; - $artist->rank(6); + $artist->charfield(6); is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, ++$expected_to, 'to ran once'; - ok ($artist->is_column_changed ('rank'), 'Column marked as dirty'); + ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty'); - $artist->rank; + $artist->charfield; is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, $expected_to, 'to did not run'; $artist->update; - $artist->set_column(rank => 3); - ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same set_column value'); - is ($artist->rank, '6', 'Column set properly (cache blown)'); + $artist->set_column(charfield => 3); + ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on same set_column value'); + is ($artist->charfield, '6', 'Column set properly (cache blown)'); is $from_storage_ran, ++$expected_from, 'from ran once (set_column blew cache)'; is $to_storage_ran, $expected_to, 'to did not run'; - $artist->rank(6); - ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same accessor-set value'); - is ($artist->rank, '6', 'Column set properly'); + $artist->charfield(6); + ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on same accessor-set value'); + is ($artist->charfield, '6', 'Column set properly'); is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, ++$expected_to, 'to did run once (call in to set_column)'; - $artist->store_column(rank => 4); - ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on differing store_column value'); - is ($artist->rank, '8', 'Cache properly blown'); + $artist->store_column(charfield => 4); + ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on differing store_column value'); + is ($artist->charfield, '8', 'Cache properly blown'); is $from_storage_ran, ++$expected_from, 'from did not run'; is $to_storage_ran, $expected_to, 'to did not run'; } # test in-memory operations for my $artist_maker ( - sub { $schema->resultset('Artist')->new({ rank => 42 }) }, - sub { my $art = $schema->resultset('Artist')->new({}); $art->rank(42); $art }, + sub { $schema->resultset('Artist')->new({ charfield => 42 }) }, + sub { my $art = $schema->resultset('Artist')->new({}); $art->charfield(42); $art }, ) { my $expected_from = $from_storage_ran; @@ -135,41 +135,44 @@ for my $artist_maker ( is $to_storage_ran, $expected_to, 'to has not run yet'; ok( ! $artist->has_column_loaded('artistid'), 'pk not loaded' ); - ok( $artist->has_column_loaded('rank'), 'Filtered column marked as loaded under new' ); - is( $artist->rank, 42, 'Proper unfiltered value' ); - is( $artist->get_column('rank'), 21, 'Proper filtered value' ); + ok( $artist->has_column_loaded('charfield'), 'Filtered column marked as loaded under new' ); + is( $artist->charfield, 42, 'Proper unfiltered value' ); + is( $artist->get_column('charfield'), 21, 'Proper filtered value' ); } # test literals for my $v ( \ '16', \[ '?', '16' ] ) { - my $art = $schema->resultset('Artist')->new({ rank => 10 }); - $art->rank($v); + my $rs = $schema->resultset('Artist'); + $rs->delete; - is_deeply( $art->rank, $v); - is_deeply( $art->get_filtered_column("rank"), $v); - is_deeply( $art->get_column("rank"), $v); + my $art = $rs->new({ charfield => 10 }); + $art->charfield($v); + + is_deeply( $art->charfield, $v); + is_deeply( $art->get_filtered_column("charfield"), $v); + is_deeply( $art->get_column("charfield"), $v); $art->insert; $art->discard_changes; - is ($art->get_column("rank"), 16, "Literal inserted into database properly"); - is ($art->rank, 32, "filtering still works"); + is ($art->get_column("charfield"), 16, "Literal inserted into database properly"); + is ($art->charfield, 32, "filtering still works"); - $art->update({ rank => $v }); + $art->update({ charfield => $v }); - is_deeply( $art->rank, $v); - is_deeply( $art->get_filtered_column("rank"), $v); - is_deeply( $art->get_column("rank"), $v); + is_deeply( $art->charfield, $v); + is_deeply( $art->get_filtered_column("charfield"), $v); + is_deeply( $art->get_column("charfield"), $v); $art->discard_changes; - is ($art->get_column("rank"), 16, "Literal inserted into database properly"); - is ($art->rank, 32, "filtering still works"); + is ($art->get_column("charfield"), 16, "Literal inserted into database properly"); + is ($art->charfield, 32, "filtering still works"); } IC_DIE: { throws_ok { - DBICTest::Schema::Artist->inflate_column(rank => + DBICTest::Schema::Artist->inflate_column(charfield => { inflate => sub {}, deflate => sub {} } ); } qr/InflateColumn can not be used on a column with a declared FilterColumn filter/, q(Can't inflate column after filter column); @@ -187,59 +190,59 @@ IC_DIE: { } # test when we do not set both filter_from_storage/filter_to_storage -DBICTest::Schema::Artist->filter_column(rank => { +DBICTest::Schema::Artist->filter_column(charfield => { filter_to_storage => sub { $to_storage_ran++; $_[1] }, }); Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; ASYMMETRIC_TO_TEST: { # initialise value - $artist->rank(20); + $artist->charfield(20); $artist->update; my $expected_from = $from_storage_ran; my $expected_to = $to_storage_ran; - $artist->rank(10); - ok ($artist->is_column_changed ('rank'), 'Column marked as dirty on accessor-set value'); - is ($artist->rank, '10', 'Column set properly'); + $artist->charfield(10); + ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty on accessor-set value'); + is ($artist->charfield, '10', 'Column set properly'); is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, ++$expected_to, 'to did run'; $artist->discard_changes; - is ($artist->rank, '20', 'Column set properly'); + is ($artist->charfield, '20', 'Column set properly'); is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, $expected_to, 'to did not run'; } -DBICTest::Schema::Artist->filter_column(rank => { +DBICTest::Schema::Artist->filter_column(charfield => { filter_from_storage => sub { $from_storage_ran++; $_[1] }, }); Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; ASYMMETRIC_FROM_TEST: { # initialise value - $artist->rank(23); + $artist->charfield(23); $artist->update; my $expected_from = $from_storage_ran; my $expected_to = $to_storage_ran; - $artist->rank(13); - ok ($artist->is_column_changed ('rank'), 'Column marked as dirty on accessor-set value'); - is ($artist->rank, '13', 'Column set properly'); + $artist->charfield(13); + ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty on accessor-set value'); + is ($artist->charfield, '13', 'Column set properly'); is $from_storage_ran, $expected_from, 'from did not run'; is $to_storage_ran, $expected_to, 'to did not run'; $artist->discard_changes; - is ($artist->rank, '23', 'Column set properly'); + is ($artist->charfield, '23', 'Column set properly'); is $from_storage_ran, ++$expected_from, 'from did run'; is $to_storage_ran, $expected_to, 'to did not run'; } -throws_ok { DBICTest::Schema::Artist->filter_column( rank => {} ) } +throws_ok { DBICTest::Schema::Artist->filter_column( charfield => {} ) } qr/\QAn invocation of filter_column() must specify either a filter_from_storage or filter_to_storage/, 'Correctly throws exception for empty attributes' ; From cfa1ab03f5bdd0f14f4eaca99cd002be0020d001 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 5 Jun 2014 01:10:50 +0200 Subject: [PATCH 045/548] Do not skip running from_storage filter when a NULL is returned --- Changes | 2 ++ lib/DBIx/Class/FilterColumn.pm | 6 +----- t/row/filter_column.t | 16 ++++++++++++++-- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/Changes b/Changes index d1f8cdd8c..2932f7128 100644 --- a/Changes +++ b/Changes @@ -3,6 +3,8 @@ Revision history for DBIx::Class * Notable Changes and Deprecations - DBIC::FilterColumn now properly bypasses \'' and \[] literals, just like the rest of DBIC + - DBIC::FilterColumn "from_storage" handler is now invoked on NULLs + returned from storage * Fixes - Fix on_connect_* not always firing in some cases - a race condition diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index 6fdf1adee..eb4666409 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -32,11 +32,7 @@ sub filter_column { sub _column_from_storage { my ($self, $col, $value) = @_; - return $value if ( - ! defined $value - or - is_literal_value($value) - ); + return $value if is_literal_value($value); my $info = $self->column_info($col) or $self->throw_exception("No column info for $col"); diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 066fbfa80..26631b44f 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -11,8 +11,8 @@ my $to_storage_ran = 0; my $schema = DBICTest->init_schema( no_populate => 1 ); DBICTest::Schema::Artist->load_components(qw(FilterColumn InflateColumn)); DBICTest::Schema::Artist->filter_column(charfield => { - filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 }, - filter_to_storage => sub { $to_storage_ran++; $_[1] / 2 }, + filter_from_storage => sub { $from_storage_ran++; defined $_[1] ? $_[1] * 2 : undef }, + filter_to_storage => sub { $to_storage_ran++; defined $_[1] ? $_[1] / 2 : undef }, }); Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; @@ -118,6 +118,18 @@ CACHE_TEST: { is ($artist->charfield, '8', 'Cache properly blown'); is $from_storage_ran, ++$expected_from, 'from did not run'; is $to_storage_ran, $expected_to, 'to did not run'; + + $artist->update({ charfield => undef }); + is $from_storage_ran, $expected_from, 'from did not run'; + is $to_storage_ran, ++$expected_to, 'to did run'; + + $artist->discard_changes; + is ( $artist->get_column('charfield'), undef, 'Got back null' ); + is ( $artist->charfield, undef, 'Got back null through filter' ); + + is $from_storage_ran, ++$expected_from, 'from did run'; + is $to_storage_ran, $expected_to, 'to did not run'; + } # test in-memory operations From 31160673f390e178ee347e7ebee1f56b3f54ba7a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 5 Jun 2014 01:44:37 +0200 Subject: [PATCH 046/548] Fix grave resultset-level delete/update bug In case of a read-modify cycle (usually due to multicolumn PK, without multicolumn IN support) DBIC would end up with a null condition and nuke the entire underlying table from orbit I have no words towards my younger self... --- Changes | 2 ++ lib/DBIx/Class/ResultSet.pm | 5 ++--- t/resultset/update_delete.t | 23 +++++++++++++++++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/Changes b/Changes index 2932f7128..c82b3556b 100644 --- a/Changes +++ b/Changes @@ -7,6 +7,8 @@ Revision history for DBIx::Class returned from storage * Fixes + - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases + of empty (due to conditions) resultsets with multi-column keys - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver routines, triggering a connection before the set-cycle is finished diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index e2c87ddd6..ca8a23e00 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -1944,7 +1944,6 @@ sub _rs_update_delete { $guard = $storage->txn_scope_guard; - $cond = []; for my $row ($subrs->cursor->all) { push @$cond, { map { $idcols->[$_] => $row->[$_] } @@ -1954,11 +1953,11 @@ sub _rs_update_delete { } } - my $res = $storage->$op ( + my $res = $cond ? $storage->$op ( $rsrc, $op eq 'update' ? $values : (), $cond, - ); + ) : '0E0'; $guard->commit if $guard; diff --git a/t/resultset/update_delete.t b/t/resultset/update_delete.t index ee32717dd..c3e8c2f58 100644 --- a/t/resultset/update_delete.t +++ b/t/resultset/update_delete.t @@ -105,6 +105,29 @@ is ($fa->discard_changes->read_count, 12, 'Update ran only once on joined result is ($fb->discard_changes->read_count, 22, 'Update ran only once on joined resultset'); is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); +$schema->is_executed_sql_bind( sub { + my $res = $fks_multi->search (\' "blah" = "bleh" ')->delete; + ok ($res, 'operation is true'); + cmp_ok ($res, '==', 0, 'zero rows affected'); +}, [ + [ 'BEGIN' ], + [ + 'SELECT me.foo, me.bar, me.hello, me.goodbye + FROM fourkeys me + LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys + ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello + WHERE "blah" = "bleh" AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? + GROUP BY me.foo, me.bar, me.hello, me.goodbye + ', + (1, 2) x 2, + 666, + (1, 2) x 2, + 'c', + ], + [ 'COMMIT' ], +], 'Correct null-delete-SQL with multijoin without pruning' ); + + # try the same sql with forced multicolumn in $schema->is_executed_sql_bind( sub { local $schema->storage->{_use_multicolumn_in} = 1; From 32250d01415849731328c292c4f77b25a88444e6 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 6 Jun 2014 18:38:52 +0200 Subject: [PATCH 047/548] Revert heading change from 06752a03d, centralize the URL spec There were several broken links referring to the old way of doing things, and help/support sounds better anyway. Reverting with added comments and whatnot --- lib/DBIx/Class.pm | 18 ++++++++++++------ lib/DBIx/Class/ResultSourceHandle.pm | 2 +- lib/DBIx/Class/Storage/DBI.pm | 2 +- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 85450ac43..4c7e298b7 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -59,8 +59,6 @@ sub _attr_cache { 1; -__END__ - =encoding UTF-8 =head1 NAME @@ -74,13 +72,21 @@ To get the most out of DBIx::Class with the least confusion it is strongly recommended to read (at the very least) the L in the order presented there. -=head1 HOW TO GET HELP +=cut + +# *DO NOT* change this heading - it is linked throughout the ecosystem + +sub DBIx::Class::_ENV_::HELP_URL () { + 'http://p3rl.org/DBIx::Class#GETTING_HELP/SUPPORT' +} + +=head1 GETTING HELP/SUPPORT -Due to the complexity of its problem domain, DBIx::Class is a relatively +Due to the sheer size of its problem domain, DBIx::Class is a relatively complex framework. After you start using DBIx::Class questions will inevitably arise. If you are stuck with a problem or have doubts about a particular -approach do not hesitate to contact the community with your questions. The -list below is sorted by "fastest response time": +approach do not hesitate to contact us via any of the following options (the +list is sorted by "fastest response time"): =over diff --git a/lib/DBIx/Class/ResultSourceHandle.pm b/lib/DBIx/Class/ResultSourceHandle.pm index 733db837d..abbac4c7b 100644 --- a/lib/DBIx/Class/ResultSourceHandle.pm +++ b/lib/DBIx/Class/ResultSourceHandle.pm @@ -68,7 +68,7 @@ sub resolve { # vague error message as this is never supposed to happen "Unable to resolve moniker '%s' - please contact the dev team at %s", $_[0]->source_moniker, - 'http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT', + DBIx::Class::_ENV_::HELP_URL, ), 'full_stacktrace'); } diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index a213c9588..0bf32949f 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1310,7 +1310,7 @@ sub _determine_driver { "Your storage subclass @{[ ref $self ]} provides (or inherits) the method " . 'source_bind_attributes() for which support has been removed as of Jan 2013. ' . 'If you are not sure how to proceed please contact the development team via ' - . 'http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT' + . DBIx::Class::_ENV_::HELP_URL ); } From f8193780f1e03bc6f1316204a03bf604faf9267b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 10 Jun 2014 13:54:32 +0200 Subject: [PATCH 048/548] Fix incorrect handling of custom relationship conditions containing literals --- Changes | 2 ++ lib/DBIx/Class/Relationship/Base.pm | 22 ++++++------ lib/DBIx/Class/ResultSource.pm | 55 ++++++++++------------------- lib/DBIx/Class/Row.pm | 2 +- t/lib/DBICTest/Schema/Artist.pm | 4 +-- t/relationship/custom.t | 2 +- 6 files changed, 37 insertions(+), 50 deletions(-) diff --git a/Changes b/Changes index c82b3556b..1ae385ce3 100644 --- a/Changes +++ b/Changes @@ -12,6 +12,8 @@ Revision history for DBIx::Class - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver routines, triggering a connection before the set-cycle is finished + - Fix incorrect handling of custom relationship conditions returning + SQLA literal expressions - Fix multi-value literal populate not working with simplified bind specifications - Massively improve the implied resultset condition parsing - now all diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index a41d6b4df..16d213dc3 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -483,11 +483,8 @@ sub related_resultset { $rsrc->_resolve_condition( $rel_info->{cond}, $rel, $self, $rel ) } catch { - if ($self->in_storage) { - $self->throw_exception ($_); - } - - $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION; # RV + $self->throw_exception ($_) if $self->in_storage; + $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION; # RV, no return() }; # keep in mind that the following if() block is part of a do{} - no return()s!!! @@ -642,14 +639,18 @@ sub new_related { my $rsrc = $self->result_source; my $rel_info = $rsrc->relationship_info($rel) or $self->throw_exception( "No such relationship '$rel'" ); - my (undef, $crosstable, $cond_targets) = $rsrc->_resolve_condition ( + my (undef, $crosstable, $nonequality_foreign_columns) = $rsrc->_resolve_condition ( $rel_info->{cond}, $rel, $self, $rel ); $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment") if $crosstable; - if (my @unspecified_rel_condition_chunks = grep { ! exists $values->{$_} } @{$cond_targets||[]} ) { + if ( + $nonequality_foreign_columns + and + my @unspecified_rel_condition_chunks = grep { ! exists $values->{$_} } @$nonequality_foreign_columns + ) { $self->throw_exception(sprintf ( "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s", $rel, @@ -818,16 +819,17 @@ sub set_from_related { # # sanity check - currently throw when a complex coderef rel is encountered # FIXME - should THROW MOAR! - my ($cond, $crosstable, $cond_targets) = $rsrc->_resolve_condition ( + my ($cond, $crosstable, $nonequality_foreign_columns) = $rsrc->_resolve_condition ( $rel_info->{cond}, $f_obj, $rel, $rel ); $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment") if $crosstable; + $self->throw_exception(sprintf ( "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s", $rel, - map { "'$_'" } @$cond_targets - )) if $cond_targets; + map { "'$_'" } @$nonequality_foreign_columns + )) if $nonequality_foreign_columns; $self->set_columns($cond); diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index e1ebbf723..00d257f80 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1697,7 +1697,7 @@ sub _resolve_condition { self_rowobj => $obj_rel ? $for : undef }); - my $cond_cols; + my @nonvalue_cols; if ($joinfree_cond) { # FIXME sanity check until things stabilize, remove at some point @@ -1705,51 +1705,34 @@ sub _resolve_condition { "A join-free condition returned for relationship '$rel_name' without a row-object to chain from" ) unless $obj_rel; + my $foreign_src_col_list = { map { ( "$relalias.$_" => 1 ) } $self->related_source($rel_name)->columns }; # FIXME another sanity check if ( ref $joinfree_cond ne 'HASH' or - first { $_ !~ /^\Q$relalias.\E.+/ } keys %$joinfree_cond + grep { ! $foreign_src_col_list } keys %$joinfree_cond ) { $self->throw_exception ( "The join-free condition returned for relationship '$rel_name' must be a hash " - .'reference with all keys being valid columns on the related result source' + .'reference with all keys being fully qualified column names of the foreign source' ); } - # normalize - for (values %$joinfree_cond) { - $_ = $_->{'='} if ( - ref $_ eq 'HASH' - and - keys %$_ == 1 - and - exists $_->{'='} - ); - } - - # see which parts of the joinfree cond are conditionals - my $relcol_list = { map { $_ => 1 } $self->related_source($rel_name)->columns }; - - for my $c (keys %$joinfree_cond) { - my ($colname) = $c =~ /^ (?: \Q$relalias.\E )? (.+)/x; - - unless ($relcol_list->{$colname}) { - push @$cond_cols, $colname; - next; - } - - if ( - ref $joinfree_cond->{$c} - and - ! is_literal_value( $joinfree_cond->{$c} ) - ) { - push @$cond_cols, $colname; - next; - } - } - - return wantarray ? ($joinfree_cond, 0, $cond_cols) : $joinfree_cond; + # see which parts of the joinfree cond are *NOT* foreign-source-column equalities + my $joinfree_cond_equality_columns = { map + {( $_ => 1 )} + @{ $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond) } + }; + @nonvalue_cols = map + { $_ =~ /^\Q$relalias.\E(.+)/ } + grep + { ! $joinfree_cond_equality_columns->{$_} } + keys %$joinfree_cond; + + return wantarray + ? ($joinfree_cond, 0, (@nonvalue_cols ? \@nonvalue_cols : undef)) + : $joinfree_cond + ; } else { return wantarray ? ($crosstable_cond, 1) : $crosstable_cond; diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 9127c942e..cb902d23f 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -200,7 +200,7 @@ sub new { my ($related,$inflated); foreach my $key (keys %$attrs) { - if (ref $attrs->{$key}) { + if (ref $attrs->{$key} and ! is_literal_value($attrs->{$key}) ) { ## Can we extract this lot to use with update(_or .. ) ? $new->throw_exception("Can't do multi-create without result source") unless $rsrc; diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index a99eb7ee1..2bc7c4bd3 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -66,11 +66,11 @@ __PACKAGE__->has_many( if @missing_args; return ( - { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} }, + { "$args->{foreign_alias}.artist" => { '=' => \ "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, }, $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + "$args->{foreign_alias}.artist" => { '=' => \[ '?', $args->{self_rowobj}->artistid ] }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, } ); diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 3d47fb17d..61f709cc6 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -43,7 +43,7 @@ is_same_sql_bind( )', [ [ - { sqlt_datatype => 'integer', dbic_colname => 'me.artist' } + {} => 21 ], [ From 03f6d1f7b65051799423237e9401689c1b43ad95 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 12:05:13 +0200 Subject: [PATCH 049/548] First step to add some sanity to _resolve_condition The original problem starts here: oh, yeah, I just used to check ref and randomly swap things it worked for enough years :) I will be forwarding you my psychiatrist bill READ DIFF AT YOUR OWN RISK Over the years _resolve_condition has accumulated 3 (or 4, or 5, depends how you look at it) distinct call-modes. None having anything to do with another. Also it is a hot method, holding crucial functionality, which of course means that currently at least 3 projects on CPAN are using it, despite the private attribute. Which in turn means couple more orders of magnitude of users on the DarkPAN. Thus just killing this method outright *without a replacement* is not an option. A from-scratch replacement in the face of only one person currently *barely* understanding this codepath is a scary proposition. Instead create an elaborate (and scarily complete) shim to proxy to a new method holding all the logic (with the idea of making it an official API in the coming commits). There are no changes to any other codepaths, as this is how we ensure that the shim is sane, and works. Next step is to erradicate all cases of the old call in the current codebase (while leaving the sub/shim intact)/ Now let's see if we can fix CampusExplorer's bugs first... --- Changes | 2 + lib/DBIx/Class/Relationship/Base.pm | 5 +- lib/DBIx/Class/ResultSource.pm | 261 ++++++++++++++++++++-------- 3 files changed, 190 insertions(+), 78 deletions(-) diff --git a/Changes b/Changes index 1ae385ce3..88ed8255d 100644 --- a/Changes +++ b/Changes @@ -29,6 +29,8 @@ Revision history for DBIx::Class * Misc - Stop explicitly stringifying objects before passing them to DBI, instead assume that the DBI/DBD combo will take care of things + - Remove ::ResultSource::resolve_condition - the underlying machinery + is in flux, and the method has had a deprecation warning for 5 years 0.08270 2014-01-30 21:54 (PST) * Fixes diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 16d213dc3..cf8267776 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -488,10 +488,7 @@ sub related_resultset { }; # keep in mind that the following if() block is part of a do{} - no return()s!!! - if ($is_crosstable) { - $self->throw_exception ( - "A cross-table relationship condition returned for statically declared '$rel'" - ) unless ref $rel_info->{cond} eq 'CODE'; + if ($is_crosstable and ref $rel_info->{cond} eq 'CODE') { # A WHOREIFFIC hack to reinvoke the entire condition resolution # with the correct alias. Another way of doing this involves a diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 00d257f80..6ef169e06 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1669,10 +1669,76 @@ sub _pk_depends_on { return 1; } -sub resolve_condition { - carp 'resolve_condition is a private method, stop calling it'; - my $self = shift; - $self->_resolve_condition (@_); +sub _resolve_condition { +# carp_unique sprintf +# '_resolve_condition is a private method, and moreover is about to go ' +# . 'away. Please contact the development team at %s if you believe you ' +# . 'have a genuine use for this method, in order to discuss alternatives.', +# DBIx::Class::_ENV_::HELP_URL, +# ; + +####################### +### API Design? What's that...? (a backwards compatible shim, kill me now) + + my ($self, $cond, @res_args, $rel_name); + + # we *SIMPLY DON'T KNOW YET* which arg is which, yay + ($self, $cond, $res_args[0], $res_args[1], $rel_name) = @_; + + # assume that an undef is an object-like unset (set_from_related(undef)) + my @is_objlike = map { ! defined $_ or length ref $_ } (@res_args); + + # turn objlike into proper objects for saner code further down + for (0,1) { + next unless $is_objlike[$_]; + + if ( defined blessed $res_args[$_] ) { + + # but wait - there is more!!! WHAT THE FUCK?!?!?!?! + if ($res_args[$_]->isa('DBIx::Class::ResultSet')) { + carp('Passing a resultset for relationship resolution makes no sense - invoking __gremlins__'); + $is_objlike[$_] = 0; + $res_args[$_] = '__gremlins__'; + } + } + else { + $res_args[$_] ||= {}; + + $self->throw_exception("Unsupported object-like structure encountered: $res_args[$_]") + unless ref $res_args[$_] eq 'HASH'; + + # hate everywhere + $res_args[$_] = $self->relationship_info($rel_name)->{source}->result_class->new($res_args[$_]); + } + } + + $self->throw_exception('No practical way to resolve a relationship between two structures') + if $is_objlike[0] and $is_objlike[1]; + + my $args = { + condition => $cond, + rel_name => $rel_name, + $is_objlike[1] ? ( self_alias => $res_args[0], foreign_alias => 'me', self_resultobj => $res_args[1] ) + : $is_objlike[0] ? ( self_alias => 'me', foreign_alias => $res_args[1], foreign_resultobj => $res_args[0] ) + : ( self_alias => $res_args[1], foreign_alias => $res_args[0] ) + }; +####################### + + # now it's fucking easy isn't it?! + my @res = $self->_resolve_relationship_condition( $args ); + + # FIXME - this is also insane, but just be consistent for now + # _resolve_relationship_condition always returns qualified cols + # even in the case of objects, but nothing downstream expects this + if (ref $res[0] eq 'HASH' and ($is_objlike[0] or $is_objlike[1]) ) { + $res[0] = { map + { ($_ =~ /\.(.+)/) => $res[0]{$_} } + keys %{$res[0]} + }; + } + + # more legacy + return wantarray ? @res : $res[0]; } our $UNRESOLVABLE_CONDITION = \ '1 = 0'; @@ -1681,20 +1747,36 @@ our $UNRESOLVABLE_CONDITION = \ '1 = 0'; # indicating whether this is a cross-table condition. Also an optional # list of non-trivial values (normally conditions) returned as a part # of a joinfree condition hash -sub _resolve_condition { - my ($self, $cond, $as, $for, $rel_name) = @_; +sub _resolve_relationship_condition { + my $self = shift; + + # self-explanatory API, modeled on the custom cond coderef: + # condition + # rel_name + # foreign_alias + # foreign_resultobj + # self_alias + # self_resultobj + my $args = { ref $_[0] eq 'HASH' ? %{ $_[0] } : @_ }; + + for ( qw( rel_name self_alias foreign_alias ) ) { + $self->throw_exception("Mandatory attribute '$_' is not a plain string") + if !defined $args->{$_} or length ref $args->{$_}; + } - my $obj_rel = defined blessed $for; + $self->throw_exception('No practical way to resolve a relationship between two objects') + if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; - if (ref $cond eq 'CODE') { - my $relalias = $obj_rel ? 'me' : $as; + $args->{condition} ||= $self->relationship_info($args->{rel_name})->{cond}; - my ($crosstable_cond, $joinfree_cond) = $cond->({ - self_alias => $obj_rel ? $as : $for, - foreign_alias => $relalias, + if (ref $args->{condition} eq 'CODE') { + + my ($crosstable_cond, $joinfree_cond) = $args->{condition}->({ + self_alias => $args->{self_alias}, + foreign_alias => $args->{foreign_alias}, self_resultsource => $self, - foreign_relname => $rel_name || ($obj_rel ? $as : $for), - self_rowobj => $obj_rel ? $for : undef + foreign_relname => $args->{rel_name}, + self_rowobj => defined $args->{self_resultobj} ? $args->{self_resultobj} : undef, }); my @nonvalue_cols; @@ -1702,18 +1784,19 @@ sub _resolve_condition { # FIXME sanity check until things stabilize, remove at some point $self->throw_exception ( - "A join-free condition returned for relationship '$rel_name' without a row-object to chain from" - ) unless $obj_rel; + "A join-free condition returned for relationship '$args->{rel_name}' without a row-object to chain from" + ) unless defined $args->{self_resultobj}; + + my $foreign_src_fq_col_list = { map { ( "$args->{foreign_alias}.$_" => 1 ) } $self->related_source($args->{rel_name})->columns }; - my $foreign_src_col_list = { map { ( "$relalias.$_" => 1 ) } $self->related_source($rel_name)->columns }; # FIXME another sanity check if ( ref $joinfree_cond ne 'HASH' or - grep { ! $foreign_src_col_list } keys %$joinfree_cond + grep { ! $foreign_src_fq_col_list->{$_} } keys %$joinfree_cond ) { $self->throw_exception ( - "The join-free condition returned for relationship '$rel_name' must be a hash " + "The join-free condition returned for relationship '$args->{rel_name}' must be a hash " .'reference with all keys being fully qualified column names of the foreign source' ); } @@ -1724,78 +1807,108 @@ sub _resolve_condition { @{ $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond) } }; @nonvalue_cols = map - { $_ =~ /^\Q$relalias.\E(.+)/ } + { $_ =~ /^\Q$args->{foreign_alias}.\E(.+)/ } grep { ! $joinfree_cond_equality_columns->{$_} } keys %$joinfree_cond; - return wantarray - ? ($joinfree_cond, 0, (@nonvalue_cols ? \@nonvalue_cols : undef)) - : $joinfree_cond - ; + return ($joinfree_cond, 0, (@nonvalue_cols ? \@nonvalue_cols : undef)); } else { - return wantarray ? ($crosstable_cond, 1) : $crosstable_cond; + return ($crosstable_cond, 1); } } - elsif (ref $cond eq 'HASH') { - my %ret; - foreach my $k (keys %{$cond}) { - my $v = $cond->{$k}; - # XXX should probably check these are valid columns - $k =~ s/^foreign\.// || - $self->throw_exception("Invalid rel cond key ${k}"); - $v =~ s/^self\.// || - $self->throw_exception("Invalid rel cond val ${v}"); - if (ref $for) { # Object - #warn "$self $k $for $v"; - unless ($for->has_column_loaded($v)) { - if ($for->in_storage) { - $self->throw_exception(sprintf - "Unable to resolve relationship '%s' from object %s: column '%s' not " - . 'loaded from storage (or not passed to new() prior to insert()). You ' - . 'probably need to call ->discard_changes to get the server-side defaults ' - . 'from the database.', - $as, - $for, - $v, - ); - } + elsif (ref $args->{condition} eq 'HASH') { + + # the condition is static - use parallel arrays + # for a "pivot" depending on which side of the + # rel did we get as an object + my (@f_cols, @l_cols); + for my $fc (keys %{$args->{condition}}) { + my $lc = $args->{condition}{$fc}; + + # FIXME STRICTMODE should probably check these are valid columns + $fc =~ s/^foreign\.// || + $self->throw_exception("Invalid rel cond key '$fc'"); + + $lc =~ s/^self\.// || + $self->throw_exception("Invalid rel cond val '$lc'"); + + push @f_cols, $fc; + push @l_cols, $lc; + } + + # plain values + if (! defined $args->{self_resultobj} and ! defined $args->{foreign_resultobj}) { + return ( { map + {( "$args->{foreign_alias}.$f_cols[$_]" => { -ident => "$args->{self_alias}.$l_cols[$_]" } )} + (0..$#f_cols) + }, 1 ); # is crosstable + } + else { + + my $cond; + + my ($obj, $obj_alias, $plain_alias, $obj_cols, $plain_cols) = defined $args->{self_resultobj} + ? ( @{$args}{qw( self_resultobj self_alias foreign_alias )}, \@l_cols, \@f_cols ) + : ( @{$args}{qw( foreign_resultobj foreign_alias self_alias )}, \@f_cols, \@l_cols ) + ; + + for my $i (0..$#$obj_cols) { + if (defined $args->{self_resultobj} and ! $obj->has_column_loaded($obj_cols->[$i])) { + + $self->throw_exception(sprintf + "Unable to resolve relationship '%s' from object '%s': column '%s' not " + . 'loaded from storage (or not passed to new() prior to insert()). You ' + . 'probably need to call ->discard_changes to get the server-side defaults ' + . 'from the database.', + $args->{rel_name}, + $obj, + $obj_cols->[$i], + ) if $obj->in_storage; + return $UNRESOLVABLE_CONDITION; } - $ret{$k} = $for->get_column($v); - #$ret{$k} = $for->get_column($v) if $for->has_column_loaded($v); - #warn %ret; - } elsif (!defined $for) { # undef, i.e. "no object" - $ret{$k} = undef; - } elsif (ref $as eq 'HASH') { # reverse hashref - $ret{$v} = $as->{$k}; - } elsif (ref $as) { # reverse object - $ret{$v} = $as->get_column($k); - } elsif (!defined $as) { # undef, i.e. "no reverse object" - $ret{$v} = undef; - } else { - $ret{"${as}.${k}"} = { -ident => "${for}.${v}" }; + else { + $cond->{"$plain_alias.$plain_cols->[$i]"} = $obj->get_column($obj_cols->[$i]); + } } - } - return wantarray - ? ( \%ret, ($obj_rel || !defined $as || ref $as) ? 0 : 1 ) - : \%ret - ; + return ($cond, 0); # joinfree + } } - elsif (ref $cond eq 'ARRAY') { - my (@ret, $crosstable); - for (@$cond) { - my ($cond, $crosstab) = $self->_resolve_condition($_, $as, $for, $rel_name); - push @ret, $cond; - $crosstable ||= $crosstab; + elsif (ref $args->{condition} eq 'ARRAY') { + if (@{$args->{condition}} == 0) { + return $UNRESOLVABLE_CONDITION; + } + elsif (@{$args->{condition}} == 1) { + return $self->_resolve_relationship_condition({ + %$args, + condition => $args->{condition}[0], + }); + } + else { + # FIXME - we are discarding nonvalues here... likely incorrect... + # then again - the entire thing is an OR, so we *can't* use + # the values anyway + # Return a hard crosstable => 1 to ensure nothing tries to use + # the result in such manner + my @ret; + for (@{$args->{condition}}) { + my ($cond) = $self->_resolve_relationship_condition({ + %$args, + condition => $_, + }); + push @ret, $cond; + } + return (\@ret, 1); # forced cross-tab } - return wantarray ? (\@ret, $crosstable) : \@ret; } else { - $self->throw_exception ("Can't handle condition $cond for relationship '$rel_name' yet :("); + $self->throw_exception ("Can't handle condition $args->{condition} for relationship '$args->{rel_name}' yet :("); } + + die "not supposed to get here - missing return()"; } =head2 related_source From 1daf13637feee7722e7bbaddb608920c9f8fd1df Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 13:15:02 +0200 Subject: [PATCH 050/548] Adjust error messages - joinfrees can be returned from noncode rels --- lib/DBIx/Class/Relationship/Base.pm | 4 ++-- lib/DBIx/Class/Relationship/ManyToMany.pm | 2 +- lib/DBIx/Class/ResultSet.pm | 10 +++++++--- t/relationship/custom.t | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index cf8267776..a924b7e26 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -640,7 +640,7 @@ sub new_related { $rel_info->{cond}, $rel, $self, $rel ); - $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment") + $self->throw_exception("Relationship '$rel' does not resolve to a join-free condition fragment") if $crosstable; if ( @@ -819,7 +819,7 @@ sub set_from_related { my ($cond, $crosstable, $nonequality_foreign_columns) = $rsrc->_resolve_condition ( $rel_info->{cond}, $f_obj, $rel, $rel ); - $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment") + $self->throw_exception("Relationship '$rel' does not resolve to a join-free condition fragment") if $crosstable; $self->throw_exception(sprintf ( diff --git a/lib/DBIx/Class/Relationship/ManyToMany.pm b/lib/DBIx/Class/Relationship/ManyToMany.pm index ef63b0884..8237602b9 100644 --- a/lib/DBIx/Class/Relationship/ManyToMany.pm +++ b/lib/DBIx/Class/Relationship/ManyToMany.pm @@ -138,7 +138,7 @@ EOW ); $self->throw_exception( - "Custom relationship '$rel' does not resolve to a join-free condition, " + "Relationship '$rel' does not resolve to a join-free condition, " ."unable to use with the ManyToMany helper '$f_rel'" ) if $crosstable; diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index ca8a23e00..37ef6cbc0 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -773,11 +773,15 @@ sub find { next if $keyref eq 'ARRAY'; # has_many for multi_create - my $rel_q = $rsrc->_resolve_condition( + my ($rel_cond, $crosstable) = $rsrc->_resolve_condition( $relinfo->{cond}, $val, $key, $key ); - die "Can't handle complex relationship conditions in find" if ref($rel_q) ne 'HASH'; - @related{keys %$rel_q} = values %$rel_q; + + $self->throw_exception("Complex condition via relationship '$key' is unsupported in find()") + if $crosstable or ref($rel_cond) ne 'HASH'; + + # supplement + @related{keys %$rel_cond} = values %$rel_cond; } } diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 61f709cc6..0cf32b568 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -177,7 +177,7 @@ is( # try a specific everything via a non-simplified rel throws_ok { $artist->create_related('cds_90s', { title => 'related_creation 4', year => '2038' }); -} qr/\QCustom relationship 'cds_90s' does not resolve to a join-free condition fragment/, +} qr/\QRelationship 'cds_90s' does not resolve to a join-free condition fragment/, 'Create failed - non-simplified rel'; # Do a self-join last-entry search From a3a17a151f4001019b64ef83d7e304bb7f0291af Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 15:51:30 +0200 Subject: [PATCH 051/548] Centralize custom rel args check, be more thorough --- t/lib/DBICTest/Schema/Artist.pm | 31 +++++++--------------- t/lib/DBICTest/Schema/Artwork.pm | 11 +++----- t/lib/DBICTest/Schema/Artwork_to_Artist.pm | 18 +++---------- t/lib/DBICTest/Schema/CD.pm | 9 +++++-- t/lib/DBICTest/Schema/Track.pm | 11 +++----- t/lib/DBICTest/Util.pm | 29 +++++++++++++++++++- 6 files changed, 55 insertions(+), 54 deletions(-) diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index 2bc7c4bd3..e060affaf 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -4,8 +4,8 @@ package # hide from PAUSE use warnings; use strict; -use base qw/DBICTest::BaseResult/; -use Carp qw/confess/; +use base 'DBICTest::BaseResult'; +use DBICTest::Util 'check_customcond_args'; __PACKAGE__->table('artist'); __PACKAGE__->source_info({ @@ -55,15 +55,10 @@ __PACKAGE__->has_many( __PACKAGE__->has_many( cds_80s => 'DBICTest::Schema::CD', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artist" => { '=' => \ "$args->{self_alias}.artistid" }, @@ -81,15 +76,10 @@ __PACKAGE__->has_many( __PACKAGE__->has_many( cds_84 => 'DBICTest::Schema::CD', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, @@ -107,15 +97,10 @@ __PACKAGE__->has_many( __PACKAGE__->has_many( cds_90s => 'DBICTest::Schema::CD', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, @@ -150,7 +135,11 @@ __PACKAGE__->many_to_many('artworks', 'artwork_to_artist', 'artwork'); __PACKAGE__->has_many( cds_without_genre => 'DBICTest::Schema::CD', sub { - my $args = shift; + # This is for test purposes only. A regular user does not + # need to sanity check the passed-in arguments, this is what + # the tests are for :) + my $args = &check_customcond_args; + return ( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, diff --git a/t/lib/DBICTest/Schema/Artwork.pm b/t/lib/DBICTest/Schema/Artwork.pm index 01ce45066..6fc8bb695 100644 --- a/t/lib/DBICTest/Schema/Artwork.pm +++ b/t/lib/DBICTest/Schema/Artwork.pm @@ -4,8 +4,8 @@ package # hide from PAUSE use warnings; use strict; -use base qw/DBICTest::BaseResult/; -use Carp qw/confess/; +use base 'DBICTest::BaseResult'; +use DBICTest::Util 'check_customcond_args'; __PACKAGE__->table('cd_artwork'); __PACKAGE__->add_columns( @@ -28,15 +28,10 @@ __PACKAGE__->many_to_many('artists_test_m2m_noopt', 'artwork_to_artist', 'artist # other test to manytomany __PACKAGE__->has_many('artwork_to_artist_test_m2m', 'DBICTest::Schema::Artwork_to_Artist', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artwork_cd_id" => { -ident => "$args->{self_alias}.cd_id" }, diff --git a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm index 57326e2cd..eb5b33357 100644 --- a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm +++ b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm @@ -4,8 +4,8 @@ package # hide from PAUSE use warnings; use strict; -use base qw/DBICTest::BaseResult/; -use Carp qw/confess/; +use base 'DBICTest::BaseResult'; +use DBICTest::Util 'check_customcond_args'; __PACKAGE__->table('artwork_to_artist'); __PACKAGE__->add_columns( @@ -24,15 +24,10 @@ __PACKAGE__->belongs_to('artist', 'DBICTest::Schema::Artist', 'artist_id'); __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" }, @@ -48,15 +43,10 @@ __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist', __PACKAGE__->belongs_to('artist_test_m2m_noopt', 'DBICTest::Schema::Artist', sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" }, diff --git a/t/lib/DBICTest/Schema/CD.pm b/t/lib/DBICTest/Schema/CD.pm index 45fdf6fe9..e7cccca1e 100644 --- a/t/lib/DBICTest/Schema/CD.pm +++ b/t/lib/DBICTest/Schema/CD.pm @@ -4,7 +4,8 @@ package # hide from PAUSE use warnings; use strict; -use base qw/DBICTest::BaseResult/; +use base 'DBICTest::BaseResult'; +use DBICTest::Util 'check_customcond_args'; # this tests table name as scalar ref # DO NOT REMOVE THE \ @@ -118,7 +119,11 @@ __PACKAGE__->might_have( 'last_track', 'DBICTest::Schema::Track', sub { - my $args = shift; + # This is for test purposes only. A regular user does not + # need to sanity check the passed-in arguments, this is what + # the tests are for :) + my $args = &check_customcond_args; + return ( { "$args->{foreign_alias}.trackid" => { '=' => diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index 5077bd0c9..60bad4e35 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -4,8 +4,8 @@ package # hide from PAUSE use warnings; use strict; -use base qw/DBICTest::BaseResult/; -use Carp qw/confess/; +use base 'DBICTest::BaseResult'; +use DBICTest::Util 'check_customcond_args'; __PACKAGE__->load_components(qw{ +DBICTest::DeployComponent @@ -76,15 +76,10 @@ __PACKAGE__->belongs_to( __PACKAGE__->has_many ( next_tracks => __PACKAGE__, sub { - my $args = shift; - # This is for test purposes only. A regular user does not # need to sanity check the passed-in arguments, this is what # the tests are for :) - my @missing_args = grep { ! defined $args->{$_} } - qw/self_alias foreign_alias self_resultsource foreign_relname/; - confess "Required arguments not supplied to custom rel coderef: @missing_args\n" - if @missing_args; + my $args = &check_customcond_args; return ( { "$args->{foreign_alias}.cd" => { -ident => "$args->{self_alias}.cd" }, diff --git a/t/lib/DBICTest/Util.pm b/t/lib/DBICTest/Util.pm index 0cd2b1279..e7a052559 100644 --- a/t/lib/DBICTest/Util.pm +++ b/t/lib/DBICTest/Util.pm @@ -4,9 +4,11 @@ use warnings; use strict; use Config; +use Carp 'confess'; +use Scalar::Util 'blessed'; use base 'Exporter'; -our @EXPORT_OK = qw/local_umask stacktrace/; +our @EXPORT_OK = qw(local_umask stacktrace check_customcond_args); sub local_umask { return unless defined $Config{d_umask}; @@ -44,4 +46,29 @@ sub stacktrace { return join "\tinvoked as ", map { sprintf ("%s at %s line %d\n", @$_ ) } @stack; } +sub check_customcond_args ($) { + my $args = shift; + + confess "Expecting a hashref" + unless ref $args eq 'HASH'; + + for (qw(foreign_relname self_alias foreign_alias)) { + confess "Custom condition argument '$_' must be a plain string" + if length ref $args->{$_} or ! length $args->{$_}; + } + + confess "Custom condition argument 'self_resultsource' must be a rsrc instance" + unless defined blessed $args->{self_resultsource} and $args->{self_resultsource}->isa('DBIx::Class::ResultSource'); + + confess "Passed resultsource has no record of the supplied rel_name - likely wrong \$rsrc" + unless ref $args->{self_resultsource}->relationship_info($args->{foreign_relname}); + + if (defined $args->{self_rowobj}) { + confess "Custom condition argument 'self_rowobj' must be a result instance" + unless defined blessed $args->{self_rowobj} and $args->{self_rowobj}->isa('DBIx::Class::Row'); + } + + $args; +} + 1; From a446d7f8fdc34bde8a31936f7900b77a0c210415 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 15:22:31 +0200 Subject: [PATCH 052/548] Rename (with a silent compat shim) couple of badly named customcond args foreign_relname makes absolutely no sense - it *is* a relationship after all, of course it has a name. Now renamed to rel_name to be consistent with the relationship condition resolver self_rowobj uses the old rowobj nomenclature - switch to self_resultobj --- Changes | 2 ++ lib/DBIx/Class/Relationship/Base.pm | 16 +++++++++++----- lib/DBIx/Class/ResultSet.pm | 2 +- lib/DBIx/Class/ResultSource.pm | 18 ++++++++++++------ t/lib/DBICTest/Util.pm | 20 ++++++++++++++------ 5 files changed, 40 insertions(+), 18 deletions(-) diff --git a/Changes b/Changes index 88ed8255d..055c6f482 100644 --- a/Changes +++ b/Changes @@ -5,6 +5,8 @@ Revision history for DBIx::Class like the rest of DBIC - DBIC::FilterColumn "from_storage" handler is now invoked on NULLs returned from storage + - Custom condition relationships are now invoked with a slightly + different signature (existing coderefs will continue to work) * Fixes - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index a924b7e26..c39755adf 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -232,11 +232,17 @@ clause, the C<$args> hashref passed to the subroutine contains some extra metadata. Currently the supplied coderef is executed as: $relationship_info->{cond}->({ - self_alias => The alias of the invoking resultset ('me' in case of a result object), - foreign_alias => The alias of the to-be-joined resultset (often matches relname), - self_resultsource => The invocant's resultsource, - foreign_relname => The relationship name (does *not* always match foreign_alias), - self_rowobj => The invocant itself in case of a $result_object->$relationship call + self_resultsource => The resultsource instance on which rel_name is registered + rel_name => The relationship name (does *NOT* always match foreign_alias) + + self_alias => The alias of the invoking resultset + foreign_alias => The alias of the to-be-joined resultset (does *NOT* always match rel_name) + + self_resultobj => The invocant object itself in case of a $resultobj->$rel_name() call + + # deprecated inconsistent names, will be forever available for legacy code + self_rowobj => Old deprecated slot for self_resultobj + foreign_relname => Old deprecated slot for rel_name }); =head3 attributes diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 37ef6cbc0..92665d6df 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -2214,7 +2214,7 @@ sub populate { foreach my $rel (@rels) { next unless ref $data->[$index]->{$rel} eq "HASH"; my $result = $self->related_resultset($rel)->create($data->[$index]->{$rel}); - my ($reverse_relname, $reverse_relinfo) = %{$rsrc->reverse_relationship_info($rel)}; + my (undef, $reverse_relinfo) = %{$rsrc->reverse_relationship_info($rel)}; my $related = $result->result_source->_resolve_condition( $reverse_relinfo->{cond}, $self, diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 6ef169e06..13397dd35 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1771,20 +1771,26 @@ sub _resolve_relationship_condition { if (ref $args->{condition} eq 'CODE') { - my ($crosstable_cond, $joinfree_cond) = $args->{condition}->({ + my $cref_args = { + rel_name => $args->{rel_name}, + self_resultsource => $self, self_alias => $args->{self_alias}, foreign_alias => $args->{foreign_alias}, - self_resultsource => $self, - foreign_relname => $args->{rel_name}, - self_rowobj => defined $args->{self_resultobj} ? $args->{self_resultobj} : undef, - }); + self_resultobj => defined $args->{self_resultobj} ? $args->{self_resultobj} : undef, + }; + + # legacy - never remove these!!! + $cref_args->{foreign_relname} = $cref_args->{rel_name}; + $cref_args->{self_rowobj} = $cref_args->{self_resultobj}; + + my ($crosstable_cond, $joinfree_cond) = $args->{condition}->($cref_args); my @nonvalue_cols; if ($joinfree_cond) { # FIXME sanity check until things stabilize, remove at some point $self->throw_exception ( - "A join-free condition returned for relationship '$args->{rel_name}' without a row-object to chain from" + "A join-free condition returned for relationship '$args->{rel_name}' without a result object to chain from" ) unless defined $args->{self_resultobj}; my $foreign_src_fq_col_list = { map { ( "$args->{foreign_alias}.$_" => 1 ) } $self->related_source($args->{rel_name})->columns }; diff --git a/t/lib/DBICTest/Util.pm b/t/lib/DBICTest/Util.pm index e7a052559..b821243dc 100644 --- a/t/lib/DBICTest/Util.pm +++ b/t/lib/DBICTest/Util.pm @@ -5,7 +5,7 @@ use strict; use Config; use Carp 'confess'; -use Scalar::Util 'blessed'; +use Scalar::Util qw(blessed refaddr); use base 'Exporter'; our @EXPORT_OK = qw(local_umask stacktrace check_customcond_args); @@ -52,20 +52,28 @@ sub check_customcond_args ($) { confess "Expecting a hashref" unless ref $args eq 'HASH'; - for (qw(foreign_relname self_alias foreign_alias)) { + for (qw(rel_name foreign_relname self_alias foreign_alias)) { confess "Custom condition argument '$_' must be a plain string" if length ref $args->{$_} or ! length $args->{$_}; } + confess "Current and legacy rel_name arguments do not match" + if $args->{rel_name} ne $args->{foreign_relname}; + confess "Custom condition argument 'self_resultsource' must be a rsrc instance" unless defined blessed $args->{self_resultsource} and $args->{self_resultsource}->isa('DBIx::Class::ResultSource'); confess "Passed resultsource has no record of the supplied rel_name - likely wrong \$rsrc" - unless ref $args->{self_resultsource}->relationship_info($args->{foreign_relname}); + unless ref $args->{self_resultsource}->relationship_info($args->{rel_name}); + + if (defined $args->{self_resultobj} or defined $args->{self_rowobj} ) { + for (qw(self_resultobj self_rowobj)) { + confess "Custom condition argument '$_' must be a result instance" + unless defined blessed $args->{$_} and $args->{$_}->isa('DBIx::Class::Row'); + } - if (defined $args->{self_rowobj}) { - confess "Custom condition argument 'self_rowobj' must be a result instance" - unless defined blessed $args->{self_rowobj} and $args->{self_rowobj}->isa('DBIx::Class::Row'); + confess "Current and legacy self_resultobj arguments do not match" + if refaddr($args->{self_resultobj}) != refaddr($args->{self_rowobj}); } $args; From a5f5e47019daf25c0b0f9708cbd3ab2695584c5a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 16:09:22 +0200 Subject: [PATCH 053/548] Pedantry - settle on rel_name vs relname, consistency throughout Only outlier is relname_to_table_alias, which should have never existed in the first place. Punting deprecation/etc cycle until future Oracle work... sigh --- lib/DBIx/Class/Manual/FAQ.pod | 4 +- lib/DBIx/Class/Relationship.pm | 4 +- lib/DBIx/Class/Relationship/Base.pm | 4 +- lib/DBIx/Class/ResultSource.pm | 2 +- lib/DBIx/Class/ResultSource/RowParser/Util.pm | 4 +- lib/DBIx/Class/Row.pm | 126 +++++++++--------- 6 files changed, 72 insertions(+), 72 deletions(-) diff --git a/lib/DBIx/Class/Manual/FAQ.pod b/lib/DBIx/Class/Manual/FAQ.pod index 71595d515..bef779e57 100644 --- a/lib/DBIx/Class/Manual/FAQ.pod +++ b/lib/DBIx/Class/Manual/FAQ.pod @@ -451,8 +451,8 @@ what create_related() from L does, you could add this to Book.pm: sub foo { - my ($self, $relname, $col_data) = @_; - return $self->related_resultset($relname)->create($col_data); + my ($self, $rel_name, $col_data) = @_; + return $self->related_resultset($rel_name)->create($col_data); } Invoked like this: diff --git a/lib/DBIx/Class/Relationship.pm b/lib/DBIx/Class/Relationship.pm index 427b5aaea..1b9e9d78f 100644 --- a/lib/DBIx/Class/Relationship.pm +++ b/lib/DBIx/Class/Relationship.pm @@ -105,7 +105,7 @@ L. All helper methods are called similar to the following template: - __PACKAGE__->$method_name('relname', 'Foreign::Class', \%cond|\@cond|\&cond?, \%attrs?); + __PACKAGE__->$method_name('rel_name', 'Foreign::Class', \%cond|\@cond|\&cond?, \%attrs?); Both C and C are optional. Pass C for C if you want to use the default value for it, but still want to set C. @@ -327,7 +327,7 @@ The second is almost exactly the same as the accessor method but "_rs" is added to the end of the method name, eg C<$accessor_name_rs()>. This method works just like the normal accessor, except that it always returns a resultset, even in list context. The third method, named C<< -add_to_$relname >>, will also be added to your Row items; this allows +add_to_$rel_name >>, will also be added to your Row items; this allows you to insert new related items, using the same mechanism as in L. diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index c39755adf..a2e78ff58 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -38,11 +38,11 @@ methods, for predefined ones, look in L. =over 4 -=item Arguments: 'relname', 'Foreign::Class', $condition, $attrs +=item Arguments: $rel_name, $foreign_class, $condition, $attrs =back - __PACKAGE__->add_relationship('relname', + __PACKAGE__->add_relationship('rel_name', 'Foreign::Class', $condition, $attrs); diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 13397dd35..4f45c581a 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1363,7 +1363,7 @@ sub add_relationship { =back - my @relnames = $source->relationships(); + my @rel_names = $source->relationships(); Returns all relationship names for this source. diff --git a/lib/DBIx/Class/ResultSource/RowParser/Util.pm b/lib/DBIx/Class/ResultSource/RowParser/Util.pm index d1c1e3b71..6203efa39 100644 --- a/lib/DBIx/Class/ResultSource/RowParser/Util.pm +++ b/lib/DBIx/Class/ResultSource/RowParser/Util.pm @@ -267,7 +267,7 @@ sub __visit_infmap_collapse { my $parent_attach_slot = sprintf( '$collapse_idx[%d]%s%s{%s}', @{$args}{qw/-parent_node_idx -parent_node_key/}, $args->{hri_style} ? '' : '[1]', - perlstring($args->{-node_relname}), + perlstring($args->{-node_rel_name}), ); if ($args->{collapse_map}->{-is_single}) { @@ -300,7 +300,7 @@ sub __visit_infmap_collapse { collapse_map => $relinfo, -parent_node_idx => $cur_node_idx, -parent_node_key => $node_key, - -node_relname => $rel, + -node_rel_name => $rel, }); my $rel_src_pos = $#src + 1; diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index cb902d23f..2aa5b2395 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -126,26 +126,26 @@ with NULL as the default, and save yourself a SELECT. ## tests! sub __new_related_find_or_new_helper { - my ($self, $relname, $values) = @_; + my ($self, $rel_name, $values) = @_; my $rsrc = $self->result_source; # create a mock-object so all new/set_column component overrides will run: - my $rel_rs = $rsrc->related_source($relname)->resultset; + my $rel_rs = $rsrc->related_source($rel_name)->resultset; my $new_rel_obj = $rel_rs->new_result($values); my $proc_data = { $new_rel_obj->get_columns }; - if ($self->__their_pk_needs_us($relname)) { - MULTICREATE_DEBUG and print STDERR "MC $self constructing $relname via new_result\n"; + if ($self->__their_pk_needs_us($rel_name)) { + MULTICREATE_DEBUG and print STDERR "MC $self constructing $rel_name via new_result\n"; return $new_rel_obj; } - elsif ($rsrc->_pk_depends_on($relname, $proc_data )) { + elsif ($rsrc->_pk_depends_on($rel_name, $proc_data )) { if (! keys %$proc_data) { # there is nothing to search for - blind create - MULTICREATE_DEBUG and print STDERR "MC $self constructing default-insert $relname\n"; + MULTICREATE_DEBUG and print STDERR "MC $self constructing default-insert $rel_name\n"; } else { - MULTICREATE_DEBUG and print STDERR "MC $self constructing $relname via find_or_new\n"; + MULTICREATE_DEBUG and print STDERR "MC $self constructing $rel_name via find_or_new\n"; # this is not *really* find or new, as we don't want to double-new the # data (thus potentially double encoding or whatever) my $exists = $rel_rs->find ($proc_data); @@ -156,17 +156,17 @@ sub __new_related_find_or_new_helper { else { my $us = $rsrc->source_name; $self->throw_exception ( - "Unable to determine relationship '$relname' direction from '$us', " - . "possibly due to a missing reverse-relationship on '$relname' to '$us'." + "Unable to determine relationship '$rel_name' direction from '$us', " + . "possibly due to a missing reverse-relationship on '$rel_name' to '$us'." ); } } sub __their_pk_needs_us { # this should maybe be in resultsource. - my ($self, $relname) = @_; + my ($self, $rel_name) = @_; my $rsrc = $self->result_source; - my $reverse = $rsrc->reverse_relationship_info($relname); - my $rel_source = $rsrc->related_source($relname); + my $reverse = $rsrc->reverse_relationship_info($rel_name); + my $rel_source = $rsrc->related_source($rel_name); my $us = { $self->get_columns }; foreach my $key (keys %$reverse) { # if their primary key depends on us, then we have to @@ -352,27 +352,27 @@ sub insert { # insert what needs to be inserted before us my %pre_insert; - for my $relname (keys %related_stuff) { - my $rel_obj = $related_stuff{$relname}; + for my $rel_name (keys %related_stuff) { + my $rel_obj = $related_stuff{$rel_name}; - if (! $self->{_rel_in_storage}{$relname}) { + if (! $self->{_rel_in_storage}{$rel_name}) { next unless (blessed $rel_obj && $rel_obj->isa('DBIx::Class::Row')); next unless $rsrc->_pk_depends_on( - $relname, { $rel_obj->get_columns } + $rel_name, { $rel_obj->get_columns } ); # The guard will save us if we blow out of this scope via die $rollback_guard ||= $storage->txn_scope_guard; - MULTICREATE_DEBUG and print STDERR "MC $self pre-reconstructing $relname $rel_obj\n"; + MULTICREATE_DEBUG and print STDERR "MC $self pre-reconstructing $rel_name $rel_obj\n"; my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_columns }; my $existing; # if there are no keys - nothing to search for if (keys %$them and $existing = $self->result_source - ->related_source($relname) + ->related_source($rel_name) ->resultset ->find($them) ) { @@ -382,11 +382,11 @@ sub insert { $rel_obj->insert; } - $self->{_rel_in_storage}{$relname} = 1; + $self->{_rel_in_storage}{$rel_name} = 1; } - $self->set_from_related($relname, $rel_obj); - delete $related_stuff{$relname}; + $self->set_from_related($rel_name, $rel_obj); + delete $related_stuff{$rel_name}; } # start a transaction here if not started yet and there is more stuff @@ -427,25 +427,25 @@ sub insert { $self->{_dirty_columns} = {}; $self->{related_resultsets} = {}; - foreach my $relname (keys %related_stuff) { - next unless $rsrc->has_relationship ($relname); + foreach my $rel_name (keys %related_stuff) { + next unless $rsrc->has_relationship ($rel_name); - my @cands = ref $related_stuff{$relname} eq 'ARRAY' - ? @{$related_stuff{$relname}} - : $related_stuff{$relname} + my @cands = ref $related_stuff{$rel_name} eq 'ARRAY' + ? @{$related_stuff{$rel_name}} + : $related_stuff{$rel_name} ; if (@cands && blessed $cands[0] && $cands[0]->isa('DBIx::Class::Row') ) { - my $reverse = $rsrc->reverse_relationship_info($relname); + my $reverse = $rsrc->reverse_relationship_info($rel_name); foreach my $obj (@cands) { $obj->set_from_related($_, $self) for keys %$reverse; - if ($self->__their_pk_needs_us($relname)) { - if (exists $self->{_ignore_at_insert}{$relname}) { - MULTICREATE_DEBUG and print STDERR "MC $self skipping post-insert on $relname\n"; + if ($self->__their_pk_needs_us($rel_name)) { + if (exists $self->{_ignore_at_insert}{$rel_name}) { + MULTICREATE_DEBUG and print STDERR "MC $self skipping post-insert on $rel_name\n"; } else { - MULTICREATE_DEBUG and print STDERR "MC $self inserting $relname $obj\n"; + MULTICREATE_DEBUG and print STDERR "MC $self inserting $rel_name $obj\n"; $obj->insert; } } else { @@ -953,20 +953,20 @@ sub set_column { # # FIXME - this is a quick *largely incorrect* hack, pending a more # serious rework during the merge of single and filter rels - my $relnames = $self->result_source->{_relationships}; - for my $relname (keys %$relnames) { + my $rel_names = $self->result_source->{_relationships}; + for my $rel_name (keys %$rel_names) { - my $acc = $relnames->{$relname}{attrs}{accessor} || ''; + my $acc = $rel_names->{$rel_name}{attrs}{accessor} || ''; - if ( $acc eq 'single' and $relnames->{$relname}{attrs}{fk_columns}{$column} ) { - delete $self->{related_resultsets}{$relname}; - delete $self->{_relationship_data}{$relname}; - #delete $self->{_inflated_column}{$relname}; + if ( $acc eq 'single' and $rel_names->{$rel_name}{attrs}{fk_columns}{$column} ) { + delete $self->{related_resultsets}{$rel_name}; + delete $self->{_relationship_data}{$rel_name}; + #delete $self->{_inflated_column}{$rel_name}; } - elsif ( $acc eq 'filter' and $relname eq $column) { - delete $self->{related_resultsets}{$relname}; - #delete $self->{_relationship_data}{$relname}; - delete $self->{_inflated_column}{$relname}; + elsif ( $acc eq 'filter' and $rel_name eq $column) { + delete $self->{related_resultsets}{$rel_name}; + #delete $self->{_relationship_data}{$rel_name}; + delete $self->{_inflated_column}{$rel_name}; } } @@ -1049,7 +1049,7 @@ sub set_columns { =head2 set_inflated_columns - $result->set_inflated_columns({ $col => $val, $relname => $obj, ... }); + $result->set_inflated_columns({ $col => $val, $rel_name => $obj, ... }); =over @@ -1153,19 +1153,19 @@ sub copy { # Its possible we'll have 2 relations to the same Source. We need to make # sure we don't try to insert the same row twice else we'll violate unique # constraints - my $relnames_copied = {}; + my $rel_names_copied = {}; - foreach my $relname ($self->result_source->relationships) { - my $rel_info = $self->result_source->relationship_info($relname); + foreach my $rel_name ($self->result_source->relationships) { + my $rel_info = $self->result_source->relationship_info($rel_name); next unless $rel_info->{attrs}{cascade_copy}; my $resolved = $self->result_source->_resolve_condition( - $rel_info->{cond}, $relname, $new, $relname + $rel_info->{cond}, $rel_name, $new, $rel_name ); - my $copied = $relnames_copied->{ $rel_info->{source} } ||= {}; - foreach my $related ($self->search_related($relname)->all) { + my $copied = $rel_names_copied->{ $rel_info->{source} } ||= {}; + foreach my $related ($self->search_related($rel_name)->all) { my $id_str = join("\0", $related->id); next if $copied->{$id_str}; $copied->{$id_str} = 1; @@ -1241,56 +1241,56 @@ sub inflate_result { ; if ($prefetch) { - for my $relname ( keys %$prefetch ) { + for my $rel_name ( keys %$prefetch ) { - my $relinfo = $rsrc->relationship_info($relname) or do { + my $relinfo = $rsrc->relationship_info($rel_name) or do { my $err = sprintf "Inflation into non-existent relationship '%s' of '%s' requested", - $relname, + $rel_name, $rsrc->source_name, ; - if (my ($colname) = sort { length($a) <=> length ($b) } keys %{$prefetch->{$relname}[0] || {}} ) { + if (my ($colname) = sort { length($a) <=> length ($b) } keys %{$prefetch->{$rel_name}[0] || {}} ) { $err .= sprintf ", check the inflation specification (columns/as) ending in '...%s.%s'", - $relname, + $rel_name, $colname, } $rsrc->throw_exception($err); }; - $class->throw_exception("No accessor type declared for prefetched relationship '$relname'") + $class->throw_exception("No accessor type declared for prefetched relationship '$rel_name'") unless $relinfo->{attrs}{accessor}; - my $rel_rs = $new->related_resultset($relname); + my $rel_rs = $new->related_resultset($rel_name); my @rel_objects; if ( - @{ $prefetch->{$relname} || [] } + @{ $prefetch->{$rel_name} || [] } and - ref($prefetch->{$relname}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class + ref($prefetch->{$rel_name}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class ) { - if (ref $prefetch->{$relname}[0] eq 'ARRAY') { + if (ref $prefetch->{$rel_name}[0] eq 'ARRAY') { my $rel_rsrc = $rel_rs->result_source; my $rel_class = $rel_rs->result_class; my $rel_inflator = $rel_class->can('inflate_result'); @rel_objects = map { $rel_class->$rel_inflator ( $rel_rsrc, @$_ ) } - @{$prefetch->{$relname}} + @{$prefetch->{$rel_name}} ; } else { @rel_objects = $rel_rs->result_class->inflate_result( - $rel_rs->result_source, @{$prefetch->{$relname}} + $rel_rs->result_source, @{$prefetch->{$rel_name}} ); } } if ($relinfo->{attrs}{accessor} eq 'single') { - $new->{_relationship_data}{$relname} = $rel_objects[0]; + $new->{_relationship_data}{$rel_name} = $rel_objects[0]; } elsif ($relinfo->{attrs}{accessor} eq 'filter') { - $new->{_inflated_column}{$relname} = $rel_objects[0]; + $new->{_inflated_column}{$rel_name} = $rel_objects[0]; } $rel_rs->set_cache(\@rel_objects); From 1adbd3fc463c963d77fa2755eccaf6112e63487a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 11 Jun 2014 16:32:59 +0200 Subject: [PATCH 054/548] Add foreign_resultobj to the customrel signature Tests in next commit --- Changes | 3 +++ lib/DBIx/Class/Relationship/Base.pm | 3 +++ lib/DBIx/Class/ResultSource.pm | 29 ++++++++++++++++++++++------- t/lib/DBICTest/Util.pm | 13 +++++++++++++ 4 files changed, 41 insertions(+), 7 deletions(-) diff --git a/Changes b/Changes index 055c6f482..b1b040f86 100644 --- a/Changes +++ b/Changes @@ -7,6 +7,9 @@ Revision history for DBIx::Class returned from storage - Custom condition relationships are now invoked with a slightly different signature (existing coderefs will continue to work) + - Add extra custom condition coderef attribute 'foreign_resultobj' + to allow for proper reverse-relationship emulation + (i.e. $result->set_from_related($custom_cond, $foreign_resultobj) * Fixes - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index a2e78ff58..e11263fba 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -238,7 +238,10 @@ metadata. Currently the supplied coderef is executed as: self_alias => The alias of the invoking resultset foreign_alias => The alias of the to-be-joined resultset (does *NOT* always match rel_name) + # only one of these (or none at all) will ever be supplied to aid in the + # construction of a join-free condition self_resultobj => The invocant object itself in case of a $resultobj->$rel_name() call + foreign_resultobj => The related object in case of $resultobj->set_from_related($rel_name, $foreign_resultobj) # deprecated inconsistent names, will be forever available for legacy code self_rowobj => Old deprecated slot for self_resultobj diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 4f45c581a..9f6ac9cbc 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1776,34 +1776,49 @@ sub _resolve_relationship_condition { self_resultsource => $self, self_alias => $args->{self_alias}, foreign_alias => $args->{foreign_alias}, - self_resultobj => defined $args->{self_resultobj} ? $args->{self_resultobj} : undef, + self_resultobj => (defined $args->{self_resultobj} ? $args->{self_resultobj} : undef), + foreign_resultobj => (defined $args->{foreign_resultobj} ? $args->{foreign_resultobj} : undef), }; # legacy - never remove these!!! $cref_args->{foreign_relname} = $cref_args->{rel_name}; $cref_args->{self_rowobj} = $cref_args->{self_resultobj}; - my ($crosstable_cond, $joinfree_cond) = $args->{condition}->($cref_args); + my ($crosstable_cond, $joinfree_cond, @extra) = $args->{condition}->($cref_args); + + # FIXME sanity check + carp_unique('A custom condition coderef can return at most 2 conditions: extra return values discarded') + if @extra; my @nonvalue_cols; if ($joinfree_cond) { + my ($joinfree_alias, $joinfree_source); + if (defined $args->{self_resultobj}) { + $joinfree_alias = $args->{foreign_alias}; + $joinfree_source = $self->related_source($args->{rel_name}); + } + elsif (defined $args->{foreign_resultobj}) { + $joinfree_alias = $args->{self_alias}; + $joinfree_source = $self; + } + # FIXME sanity check until things stabilize, remove at some point $self->throw_exception ( "A join-free condition returned for relationship '$args->{rel_name}' without a result object to chain from" - ) unless defined $args->{self_resultobj}; + ) unless $joinfree_alias; - my $foreign_src_fq_col_list = { map { ( "$args->{foreign_alias}.$_" => 1 ) } $self->related_source($args->{rel_name})->columns }; + my $fq_col_list = { map { ( "$joinfree_alias.$_" => 1 ) } $joinfree_source->columns }; # FIXME another sanity check if ( ref $joinfree_cond ne 'HASH' or - grep { ! $foreign_src_fq_col_list->{$_} } keys %$joinfree_cond + grep { ! $fq_col_list->{$_} } keys %$joinfree_cond ) { $self->throw_exception ( "The join-free condition returned for relationship '$args->{rel_name}' must be a hash " - .'reference with all keys being fully qualified column names of the foreign source' + .'reference with all keys being fully qualified column names of the corresponding source' ); } @@ -1813,7 +1828,7 @@ sub _resolve_relationship_condition { @{ $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond) } }; @nonvalue_cols = map - { $_ =~ /^\Q$args->{foreign_alias}.\E(.+)/ } + { $_ =~ /^\Q$joinfree_alias.\E(.+)/ } grep { ! $joinfree_cond_equality_columns->{$_} } keys %$joinfree_cond; diff --git a/t/lib/DBICTest/Util.pm b/t/lib/DBICTest/Util.pm index b821243dc..847fba8d8 100644 --- a/t/lib/DBICTest/Util.pm +++ b/t/lib/DBICTest/Util.pm @@ -66,7 +66,10 @@ sub check_customcond_args ($) { confess "Passed resultsource has no record of the supplied rel_name - likely wrong \$rsrc" unless ref $args->{self_resultsource}->relationship_info($args->{rel_name}); + my $rowobj_cnt = 0; + if (defined $args->{self_resultobj} or defined $args->{self_rowobj} ) { + $rowobj_cnt++; for (qw(self_resultobj self_rowobj)) { confess "Custom condition argument '$_' must be a result instance" unless defined blessed $args->{$_} and $args->{$_}->isa('DBIx::Class::Row'); @@ -76,6 +79,16 @@ sub check_customcond_args ($) { if refaddr($args->{self_resultobj}) != refaddr($args->{self_rowobj}); } + if (defined $args->{foreign_resultobj}) { + $rowobj_cnt++; + + confess "Custom condition argument 'foreign_resultobj' must be a result instance" + unless defined blessed $args->{foreign_resultobj} and $args->{foreign_resultobj}->isa('DBIx::Class::Row'); + } + + confess "Result objects supplied on both ends of a relationship" + if $rowobj_cnt == 2; + $args; } From 8c7c83982c9ec8ebf8674c1100f9103f2af06402 Mon Sep 17 00:00:00 2001 From: Gerda Shank Date: Mon, 10 Feb 2014 14:22:06 -0500 Subject: [PATCH 055/548] Add (now passing) tests for set_from_related() via 'foreign_resultobj' --- t/lib/DBICTest/Schema/Artist.pm | 17 +++++++++++++++++ t/lib/DBICTest/Schema/Track.pm | 23 +++++++++++++++++++++++ t/relationship/custom.t | 24 +++++++++++++++++++++++- 3 files changed, 63 insertions(+), 1 deletion(-) diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index e060affaf..d1d0740ea 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -51,6 +51,23 @@ __PACKAGE__->has_many( { order_by => { -asc => 'year'} }, ); +__PACKAGE__->has_many( + cds_cref_cond => 'DBICTest::Schema::CD', + sub { + # This is for test purposes only. A regular user does not + # need to sanity check the passed-in arguments, this is what + # the tests are for :) + my $args = &check_customcond_args; + + return ( + { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} }, + }, + $args->{self_rowobj} && { + "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + } + ); + }, +); __PACKAGE__->has_many( cds_80s => 'DBICTest::Schema::CD', diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index 60bad4e35..c09ff1de4 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -53,6 +53,29 @@ __PACKAGE__->grouping_column ('cd'); __PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD', undef, { proxy => { cd_title => 'title' }, }); +# custom condition coderef +__PACKAGE__->belongs_to( cd_cref_cond => 'DBICTest::Schema::CD', +sub { + # This is for test purposes only. A regular user does not + # need to sanity check the passed-in arguments, this is what + # the tests are for :) + my $args = &check_customcond_args; + + return ( + { + "$args->{foreign_alias}.cdid" => { -ident => "$args->{self_alias}.cd" }, + }, + + ( $args->{self_resultobj} ? { + "$args->{foreign_alias}.cdid" => $args->{self_resultobj}->cd + } : () ), + + ( $args->{foreign_resultobj} ? { + "$args->{self_alias}.cd" => $args->{foreign_resultobj}->cdid + } : () ), + ); +} +); __PACKAGE__->belongs_to( disc => 'DBICTest::Schema::CD' => 'cd', { proxy => 'year' }); diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 0cf32b568..a623b4b8f 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -151,6 +151,10 @@ is_deeply( } 'prefetchy-fetchy-fetch'; +# create_related a plain cd via the equoivalent coderef cond, with no extra conditions +lives_ok { + $artist->create_related('cds_cref_cond', { title => 'related creation via coderef cond', year => '2010' } ); +} 'created_related with simple condition works'; # try to create_related a 80s cd throws_ok { @@ -159,7 +163,8 @@ throws_ok { 'Create failed - complex cond'; # now supply an explicit arg overwriting the ambiguous cond -my $id_2020 = $artist->create_related('cds_80s', { title => 'related creation 2', year => '2020' })->id; +my $cd_2020 = $artist->create_related('cds_80s', { title => 'related creation 2', year => '2020' }); +my $id_2020 = $cd_2020->id; is( $schema->resultset('CD')->find($id_2020)->title, 'related creation 2', @@ -268,4 +273,21 @@ is_deeply ( 'Prefetched singles in proper order' ); +# test set_from_related with a belongs_to custom condition +my $cd = $schema->resultset("CD")->find(4); +$artist = $cd->search_related('artist'); +my $track = $schema->resultset("Track")->create( { + trackid => 1, + cd => 3, + position => 99, + title => 'Some Track' +} ); +$track->set_from_related( cd_cref_cond => $cd ); +is ($track->get_column('cd'), 4, 'set from related via coderef cond'); +is_deeply ( + { $track->cd->get_columns }, + { $cd->get_columns }, + 'set from related via coderef cond inflates properly', +); + done_testing; From 78b3d153ad874085ee183cfa6ad827089adde583 Mon Sep 17 00:00:00 2001 From: Karen Etheridge Date: Wed, 11 Jun 2014 12:46:25 -0700 Subject: [PATCH 056/548] replace all remaining uses of self_rowobj with self_resultobj in pod, test schemas --- lib/DBIx/Class/Relationship/Base.pm | 6 +++--- t/lib/DBICTest/Schema/Artist.pm | 16 ++++++++-------- t/lib/DBICTest/Schema/Artwork.pm | 4 ++-- t/lib/DBICTest/Schema/Artwork_to_Artist.pm | 4 ++-- t/lib/DBICTest/Schema/Track.pm | 6 +++--- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index e11263fba..906307fec 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -183,7 +183,7 @@ While every coderef-based condition must return a valid C clause, it may elect to additionally return a simplified join-free condition hashref when invoked as C<< $result->relationship >>, as opposed to C<< $rs->related_resultset('relationship') >>. In this case C<$result> is -passed to the coderef as C<< $args->{self_rowobj} >>, so a user can do the +passed to the coderef as C<< $args->{self_resultobj} >>, so a user can do the following: sub { @@ -194,8 +194,8 @@ following: "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, ); diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index d1d0740ea..470796a21 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -62,8 +62,8 @@ __PACKAGE__->has_many( return ( { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, } ); }, @@ -81,8 +81,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { '=' => \ "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => { '=' => \[ '?', $args->{self_rowobj}->artistid ] }, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artist" => { '=' => \[ '?', $args->{self_resultobj}->artistid ] }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, } ); @@ -102,8 +102,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => 1984, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, "$args->{foreign_alias}.year" => 1984, } ); @@ -161,8 +161,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.genreid" => undef, - }, $args->{self_rowobj} && { - "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, + }, $args->{self_resultobj} && { + "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, "$args->{foreign_alias}.genreid" => undef, } ), diff --git a/t/lib/DBICTest/Schema/Artwork.pm b/t/lib/DBICTest/Schema/Artwork.pm index 6fc8bb695..d9ddc332e 100644 --- a/t/lib/DBICTest/Schema/Artwork.pm +++ b/t/lib/DBICTest/Schema/Artwork.pm @@ -36,8 +36,8 @@ __PACKAGE__->has_many('artwork_to_artist_test_m2m', 'DBICTest::Schema::Artwork_t return ( { "$args->{foreign_alias}.artwork_cd_id" => { -ident => "$args->{self_alias}.cd_id" }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artwork_cd_id" => $args->{self_rowobj}->cd_id, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artwork_cd_id" => $args->{self_resultobj}->cd_id, } ); } diff --git a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm index eb5b33357..e4c4cf2d4 100644 --- a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm +++ b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm @@ -33,8 +33,8 @@ __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist', { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" }, "$args->{foreign_alias}.rank" => { '<' => 10 }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.artistid" => $args->{self_rowobj}->artist_id, + $args->{self_resultobj} && { + "$args->{foreign_alias}.artistid" => $args->{self_resultobj}->artist_id, "$args->{foreign_alias}.rank" => { '<' => 10 }, } ); diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index c09ff1de4..a1cb27a1b 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -108,9 +108,9 @@ __PACKAGE__->has_many ( { "$args->{foreign_alias}.cd" => { -ident => "$args->{self_alias}.cd" }, "$args->{foreign_alias}.position" => { '>' => { -ident => "$args->{self_alias}.position" } }, }, - $args->{self_rowobj} && { - "$args->{foreign_alias}.cd" => $args->{self_rowobj}->get_column('cd'), - "$args->{foreign_alias}.position" => { '>' => $args->{self_rowobj}->pos }, + $args->{self_resultobj} && { + "$args->{foreign_alias}.cd" => $args->{self_resultobj}->get_column('cd'), + "$args->{foreign_alias}.position" => { '>' => $args->{self_resultobj}->pos }, } ) } From c2abfbbebf5952ef4e9f45750903f5129fdbabae Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 12 Jun 2014 06:54:12 +0200 Subject: [PATCH 057/548] Even more robust _resolve_condition shim - guess rel_name when needed --- lib/DBIx/Class/ResultSource.pm | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 9f6ac9cbc..fb4c27d2c 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1717,10 +1717,15 @@ sub _resolve_condition { my $args = { condition => $cond, - rel_name => $rel_name, - $is_objlike[1] ? ( self_alias => $res_args[0], foreign_alias => 'me', self_resultobj => $res_args[1] ) - : $is_objlike[0] ? ( self_alias => 'me', foreign_alias => $res_args[1], foreign_resultobj => $res_args[0] ) - : ( self_alias => $res_args[1], foreign_alias => $res_args[0] ) + + # where-is-waldo block guesses relname, then further down we override it if available + ( + $is_objlike[1] ? ( rel_name => $res_args[0], self_alias => $res_args[0], foreign_alias => 'me', self_resultobj => $res_args[1] ) + : $is_objlike[0] ? ( rel_name => $res_args[1], self_alias => 'me', foreign_alias => $res_args[1], foreign_resultobj => $res_args[0] ) + : ( rel_name => $res_args[0], self_alias => $res_args[1], foreign_alias => $res_args[0] ) + ), + + ( $rel_name ? ( rel_name => $rel_name ) : () ), }; ####################### @@ -1776,8 +1781,8 @@ sub _resolve_relationship_condition { self_resultsource => $self, self_alias => $args->{self_alias}, foreign_alias => $args->{foreign_alias}, - self_resultobj => (defined $args->{self_resultobj} ? $args->{self_resultobj} : undef), - foreign_resultobj => (defined $args->{foreign_resultobj} ? $args->{foreign_resultobj} : undef), + self_resultobj => $args->{self_resultobj}, + foreign_resultobj => $args->{foreign_resultobj}, }; # legacy - never remove these!!! From 9a94cf9318433056df45629190a8ae433a5a9722 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 12 Jun 2014 06:53:38 +0200 Subject: [PATCH 058/548] Fix test tripping on external warnings --- t/delete/cascade_missing.t | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/t/delete/cascade_missing.t b/t/delete/cascade_missing.t index 03de8832c..8bd8a769b 100644 --- a/t/delete/cascade_missing.t +++ b/t/delete/cascade_missing.t @@ -11,13 +11,13 @@ use DBICTest; my $schema = DBICTest->init_schema(); $schema->_unregister_source('CD'); -warnings_like { +warnings_exist { my $s = $schema; lives_ok { $_->delete for $s->resultset('Artist')->all; } 'delete on rows with dangling rels lives'; } [ - # 12 == 3 artists * failed cascades: + # 9 == 3 artists * failed cascades: # cds # cds_unordered # cds_very_very_very_long_relationship_name From 5592d633292c0fecf45b554ab6d4b235b140dc15 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 13 Jun 2014 16:37:31 +0200 Subject: [PATCH 059/548] More fallout of the _resolve_cond refactor 03f6d1f7 I had in the back of my mind "one can't just create random objects" but figured it may work anyway - WROOOOOOOONG. Instead pass through the hash as before, will need to rethink that part before settling on an API for resolve_relationship_condition. FML --- lib/DBIx/Class/ResultSource.pm | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index fb4c27d2c..ce88cbb89 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1704,11 +1704,10 @@ sub _resolve_condition { else { $res_args[$_] ||= {}; + # hate everywhere - have to pass in as a plain hash + # pretending to be an object at least for now $self->throw_exception("Unsupported object-like structure encountered: $res_args[$_]") unless ref $res_args[$_] eq 'HASH'; - - # hate everywhere - $res_args[$_] = $self->relationship_info($rel_name)->{source}->result_class->new($res_args[$_]); } } @@ -1881,7 +1880,16 @@ sub _resolve_relationship_condition { ; for my $i (0..$#$obj_cols) { - if (defined $args->{self_resultobj} and ! $obj->has_column_loaded($obj_cols->[$i])) { + + # FIXME - temp shim + if (! blessed $obj) { + $cond->{"$plain_alias.$plain_cols->[$i]"} = $obj->{$obj_cols->[$i]}; + } + elsif ( + defined $args->{self_resultobj} + and + ! $obj->has_column_loaded($obj_cols->[$i]) + ) { $self->throw_exception(sprintf "Unable to resolve relationship '%s' from object '%s': column '%s' not " From 6cfb1d2f28554c9d8943275fee3774a5f4d448d4 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 16 Jun 2014 14:21:59 +0200 Subject: [PATCH 060/548] Remove vestigial test envvar --- lib/DBIx/Class/_Util.pm | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 1407ddcbd..7cf6a5272 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -23,14 +23,8 @@ BEGIN { DBICTEST => eval { DBICTest::RunMode->is_author } ? 1 : 0, # During 5.13 dev cycle HELEMs started to leak on copy - PEEPEENESS => - # request for all tests would force "non-leaky" illusion and vice-versa - defined $ENV{DBICTEST_ALL_LEAKS} ? !$ENV{DBICTEST_ALL_LEAKS} - # otherwise confess that this perl is busted ONLY on smokers - : eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006) ? 1 - # otherwise we are good - : 0 - , + # add an escape for these perls ON SMOKERS - a user will still get death + PEEPEENESS => ( eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006) ), ASSERT_NO_INTERNAL_WANTARRAY => $ENV{DBIC_ASSERT_NO_INTERNAL_WANTARRAY} ? 1 : 0, From 7a1bbec9a9d6a818c6b713fd723312a4247266cd Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 16 Jun 2014 14:28:25 +0200 Subject: [PATCH 061/548] (travis) Announce what we set during POISON_ENV --- maint/travis-ci_scripts/30_before_script.bash | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index 10f380c29..e6c2ab593 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -6,11 +6,16 @@ if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi # poison the environment if [[ "$POISON_ENV" = "true" ]] ; then - # look through lib, find all mentioned ENVvars and set them - # to true and see if anything explodes - for var in $(grep -P '\$ENV\{' -r lib/ | grep -oP 'DBIC_\w+' | sort -u | grep -v DBIC_TRACE) ; do + # in addition to making sure tests do not rely on implicid order of + # returned results, look through lib, find all mentioned ENVvars and + # set them to true and see if anything explodes + for var in \ + DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER \ + $(grep -P '\$ENV\{' -r lib/ --exclude-dir Optional | grep -oP '\bDBIC\w+' | sort -u | grep -v DBIC_TRACE) + do if [[ -z "${!var}" ]] ; then export $var=1 + echo "POISON_ENV: setting $var to 1" fi done @@ -18,8 +23,6 @@ if [[ "$POISON_ENV" = "true" ]] ; then export DBI_DSN="dbi:ODBC:server=NonexistentServerAddress" export DBI_DRIVER="ADO" - # make sure tests do not rely on implicid order of returned results - export DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER=1 # emulate a local::lib-like env # trick cpanm into executing true as shell - we just need the find+unpack From 37b9b05b2fd693c01ef01a29765fba97077393d2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 16 Jun 2014 16:15:44 +0200 Subject: [PATCH 062/548] Fix ::Ordered in combination with delete_all See pod-changes for description of the solution. I cringe but... oh well --- Changes | 2 ++ lib/DBIx/Class/Ordered.pm | 53 ++++++++++++++++++++++++---------- t/ordered/unordered_movement.t | 25 +++++++++------- 3 files changed, 54 insertions(+), 26 deletions(-) diff --git a/Changes b/Changes index b1b040f86..06e9cf02e 100644 --- a/Changes +++ b/Changes @@ -10,6 +10,8 @@ Revision history for DBIx::Class - Add extra custom condition coderef attribute 'foreign_resultobj' to allow for proper reverse-relationship emulation (i.e. $result->set_from_related($custom_cond, $foreign_resultobj) + - When in a transaction, DBIC::Ordered now seamlesly handles result + objects that went out of sync with the storage (RT#96499) * Fixes - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases diff --git a/lib/DBIx/Class/Ordered.pm b/lib/DBIx/Class/Ordered.pm index 5e40dc0c2..a5db68bda 100644 --- a/lib/DBIx/Class/Ordered.pm +++ b/lib/DBIx/Class/Ordered.pm @@ -367,7 +367,30 @@ sub move_to { my $position_column = $self->position_column; - if ($self->is_column_changed ($position_column) ) { + my $is_txn; + if ($is_txn = $self->result_source->schema->storage->transaction_depth) { + # Reload position state from storage + # The thinking here is that if we are in a transaction, it is + # *more likely* the object went out of sync due to resultset + # level shenanigans. Instead of always reloading (slow) - go + # ahead and hand-hold only in the case of higher layers + # requesting the safety of a txn + + $self->store_column( + $position_column, + ( $self->result_source + ->resultset + ->search($self->_storage_ident_condition, { rows => 1, columns => $position_column }) + ->cursor + ->next + )[0] || $self->throw_exception( + sprintf "Unable to locate object '%s' in storage - object went ouf of sync...?", + $self->ID + ), + ); + delete $self->{_dirty_columns}{$position_column}; + } + elsif ($self->is_column_changed ($position_column) ) { # something changed our position, we need to know where we # used to be - use the stashed value $self->store_column($position_column, delete $self->{_column_data_in_storage}{$position_column}); @@ -380,7 +403,7 @@ sub move_to { return 0; } - my $guard = $self->result_source->schema->txn_scope_guard; + my $guard = $is_txn ? undef : $self->result_source->schema->txn_scope_guard; my ($direction, @between); if ( $from_position < $to_position ) { @@ -402,7 +425,7 @@ sub move_to { $self->_shift_siblings ($direction, @between); $self->_ordered_internal_update({ $position_column => $new_pos_val }); - $guard->commit; + $guard->commit if $guard; return 1; } @@ -861,18 +884,18 @@ will prevent such race conditions going undetected. =head2 Multiple Moves -Be careful when issuing move_* methods to multiple objects. If -you've pre-loaded the objects then when you move one of the objects -the position of the other object will not reflect their new value -until you reload them from the database - see -L. - -There are times when you will want to move objects as groups, such -as changing the parent of several objects at once - this directly -conflicts with this problem. One solution is for us to write a -ResultSet class that supports a parent() method, for example. Another -solution is to somehow automagically modify the objects that exist -in the current object's result set to have the new position value. +If you have multiple same-group result objects already loaded from storage, +you need to be careful when executing C operations on them: +without a L reload the L of the +"siblings" will be out of sync with the underlying storage. + +Starting from version C<0.082800> DBIC will implicitly perform such +reloads when the C happens as a part of a transaction +(a good example of such situation is C<< $ordered_resultset->delete_all >>). + +If it is not possible for you to wrap the entire call-chain in a transaction, +you will need to call L to get an object +up-to-date before proceeding, otherwise undefined behavior will result. =head2 Default Values diff --git a/t/ordered/unordered_movement.t b/t/ordered/unordered_movement.t index 9cbc3da81..dc083068a 100644 --- a/t/ordered/unordered_movement.t +++ b/t/ordered/unordered_movement.t @@ -9,19 +9,22 @@ use DBICTest; my $schema = DBICTest->init_schema(); my $cd = $schema->resultset('CD')->next; +$cd->tracks->delete; -lives_ok { - $cd->tracks->delete; +$schema->resultset('CD')->related_resultset('tracks')->delete; - my @tracks = map - { $cd->create_related('tracks', { title => "t_$_", position => $_ }) } - (4,2,5,1,3) - ; +is $cd->tracks->count, 0, 'No tracks'; - for (@tracks) { - $_->discard_changes; - $_->delete; - } -} 'Creation/deletion of out-of order tracks successful'; +$cd->create_related('tracks', { title => "t_$_", position => $_ }) + for (4,2,3,1,5); + +is $cd->tracks->count, 5, 'Created 5 tracks'; + +# a txn should force the implicit pos reload, regardless of order +$schema->txn_do(sub { + $cd->tracks->delete_all +}); + +is $cd->tracks->count, 0, 'Successfully deleted everything'; done_testing; From e6ed824bbb02cbe1dfdc18987f468791af228e63 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 16 Jun 2014 16:37:27 +0200 Subject: [PATCH 063/548] Minor optimization of codepath (no func changes) --- lib/DBIx/Class/Ordered.pm | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/lib/DBIx/Class/Ordered.pm b/lib/DBIx/Class/Ordered.pm index a5db68bda..539abd833 100644 --- a/lib/DBIx/Class/Ordered.pm +++ b/lib/DBIx/Class/Ordered.pm @@ -742,20 +742,13 @@ sub _shift_siblings { if ( first { $_ eq $position_column } ( map { @$_ } (values %{{ $rsrc->unique_constraints }} ) ) ) { - my $cursor = $shift_rs->search ( + my $clean_rs = $rsrc->resultset; + + for ( $shift_rs->search ( {}, { order_by => { "-$ord", $position_column }, select => [$position_column, @pcols] } - )->cursor; - my $rs = $rsrc->resultset; - - my @all_data = $cursor->all; - while (my $data = shift @all_data) { - my $pos = shift @$data; - my $cond; - for my $i (0.. $#pcols) { - $cond->{$pcols[$i]} = $data->[$i]; - } - - $rs->find($cond)->update ({ $position_column => $pos + ( ($op eq '+') ? 1 : -1 ) }); + )->cursor->all ) { + my $pos = shift @$_; + $clean_rs->find(@$_)->update ({ $position_column => $pos + ( ($op eq '+') ? 1 : -1 ) }); } } else { From 1b658919f5ff67e9ac656daf6642a3bae8cec282 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 17 Jun 2014 12:41:06 +0200 Subject: [PATCH 064/548] Add a self-explanatory *compile-time* $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} The idea is to be able to catch tests that rely on a particular order of the results of ->all. ->next is untouched as injecting extra \'RANDOM' order clauses is too heavy-handed for a test environment (and besides, having ->all and ->next return stuff in differing order is just as good of a monkey-wrench) --- lib/DBIx/Class/Storage/DBI/Cursor.pm | 14 +++++++++++--- lib/DBIx/Class/_Util.pm | 2 ++ t/104view.t | 2 ++ t/71mysql.t | 4 ++-- t/73oracle_hq.t | 6 ++++++ t/746mssql.t | 4 ++-- t/88result_set_column.t | 6 ++++++ t/cdbi/testlib/DBIC/Test/SQLite.pm | 5 +++++ t/prefetch/correlated.t | 4 ++-- t/prefetch/grouped.t | 12 +++++++----- t/prefetch/via_search_related.t | 2 +- t/resultset/update_delete.t | 15 +++++++++++++-- 12 files changed, 59 insertions(+), 17 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBI/Cursor.pm b/lib/DBIx/Class/Storage/DBI/Cursor.pm index 6681d2354..4d15401a2 100644 --- a/lib/DBIx/Class/Storage/DBI/Cursor.pm +++ b/lib/DBIx/Class/Storage/DBI/Cursor.pm @@ -3,10 +3,11 @@ package DBIx::Class::Storage::DBI::Cursor; use strict; use warnings; -use base qw/DBIx::Class::Cursor/; +use base 'DBIx::Class::Cursor'; use Try::Tiny; -use Scalar::Util qw/refaddr weaken/; +use Scalar::Util qw(refaddr weaken); +use List::Util 'shuffle'; use namespace::clean; __PACKAGE__->mk_group_accessors('simple' => @@ -177,7 +178,14 @@ sub all { (undef, $sth) = $self->storage->_select( @{$self->{args}} ); - return @{$sth->fetchall_arrayref}; + return ( + DBIx::Class::_ENV_::SHUFFLE_UNORDERED_RESULTSETS + and + ! $self->{attrs}{order_by} + ) + ? shuffle @{$sth->fetchall_arrayref} + : @{$sth->fetchall_arrayref} + ; } sub sth { diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 7cf6a5272..ad438e755 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -26,6 +26,8 @@ BEGIN { # add an escape for these perls ON SMOKERS - a user will still get death PEEPEENESS => ( eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006) ), + SHUFFLE_UNORDERED_RESULTSETS => $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} ? 1 : 0, + ASSERT_NO_INTERNAL_WANTARRAY => $ENV{DBIC_ASSERT_NO_INTERNAL_WANTARRAY} ? 1 : 0, IV_SIZE => $Config{ivsize}, diff --git a/t/104view.t b/t/104view.t index a13ea00e5..4abe7e82a 100644 --- a/t/104view.t +++ b/t/104view.t @@ -29,6 +29,7 @@ is_deeply ( { result_class => 'DBIx::Class::ResultClass::HashRefInflator', prefetch => ['artist', { tracks => [qw/cd year1999cd year2000cd/] } ], + order_by => 'tracks.trackid', }, )->all ], @@ -39,6 +40,7 @@ is_deeply ( result_class => 'DBIx::Class::ResultClass::HashRefInflator', prefetch => ['artist', { tracks => [qw/cd year1999cd year2000cd/] } ], columns => [qw/cdid single_track title/], # to match the columns retrieved by the virtview + order_by => 'tracks.trackid', }, )->all ], diff --git a/t/71mysql.t b/t/71mysql.t index bebbc4b2f..ef2c7de6c 100644 --- a/t/71mysql.t +++ b/t/71mysql.t @@ -352,8 +352,8 @@ ZEROINSEARCH: { ]}); warnings_exist { is_deeply ( - [ $restrict_rs->get_column('y')->all ], - [ $y_rs->all ], + [ sort $restrict_rs->get_column('y')->all ], + [ sort $y_rs->all ], 'Zero year was correctly excluded from resultset', ) } qr/ \QUse of distinct => 1 while selecting anything other than a column \E diff --git a/t/73oracle_hq.t b/t/73oracle_hq.t index 0595edf53..6d27a0561 100644 --- a/t/73oracle_hq.t +++ b/t/73oracle_hq.t @@ -3,6 +3,12 @@ use warnings; use Test::Exception; use Test::More; + +# I *strongly* suspect Oracle has an implicit stable output order when +# dealing with HQs. So just punt on the entire shuffle thing. +BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 } + + use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); diff --git a/t/746mssql.t b/t/746mssql.t index 5e062f606..e4a9de09d 100644 --- a/t/746mssql.t +++ b/t/746mssql.t @@ -280,8 +280,8 @@ SQL my $sealed_owners = $owners->as_subselect_rs; is_deeply ( - [ map { $_->name } ($sealed_owners->all) ], - [ map { $_->name } ($owners->all) ], + [ sort map { $_->name } ($sealed_owners->all) ], + [ sort map { $_->name } ($owners->all) ], "$test_type: Sort preserved from within a subquery", ); } diff --git a/t/88result_set_column.t b/t/88result_set_column.t index 9a04bd16d..21e1c9cac 100644 --- a/t/88result_set_column.t +++ b/t/88result_set_column.t @@ -4,6 +4,12 @@ use warnings; use Test::More; use Test::Warn; use Test::Exception; + +# MASSIVE FIXME - there is a hole in ::RSC / as_subselect_rs +# losing the order. Needs a rework/extract of the realiaser, +# and that's a whole another bag of dicks +BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 } + use lib qw(t/lib); use DBICTest ':DiffSQL'; diff --git a/t/cdbi/testlib/DBIC/Test/SQLite.pm b/t/cdbi/testlib/DBIC/Test/SQLite.pm index 5dc4a66f1..905ed8811 100644 --- a/t/cdbi/testlib/DBIC/Test/SQLite.pm +++ b/t/cdbi/testlib/DBIC/Test/SQLite.pm @@ -36,6 +36,11 @@ use warnings; use Test::More; +# adding implicit search criteria to the iterator will alter the test +# mechanics - leave everything as-is instead, and hope SQLite won't +# change too much +BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 } + use lib 't/lib'; use DBICTest; diff --git a/t/prefetch/correlated.t b/t/prefetch/correlated.t index 6c2e0b769..fdbd2622f 100644 --- a/t/prefetch/correlated.t +++ b/t/prefetch/correlated.t @@ -13,7 +13,7 @@ my $cdrs = $schema->resultset('CD')->search({ 'me.artist' => { '!=', 2 }}); my $cd_data = { map { $_->cdid => { siblings => $cdrs->search ({ artist => $_->get_column('artist') })->count - 1, - track_titles => [ map { $_->title } ($_->tracks->all) ], + track_titles => [ sort $_->tracks->get_column('title')->all ], }, } ( $cdrs->all ) }; @@ -65,7 +65,7 @@ $schema->is_executed_querycount( sub { cmp_deeply ( { map { $_->cdid => { - track_titles => [ map { $_->title } ($_->tracks->all) ], + track_titles => [ sort map { $_->title } ($_->tracks->all) ], siblings => $_->get_column ('sibling_count'), } } $c_rs->all diff --git a/t/prefetch/grouped.t b/t/prefetch/grouped.t index a1b986057..0f6f59a29 100644 --- a/t/prefetch/grouped.t +++ b/t/prefetch/grouped.t @@ -23,10 +23,12 @@ for ($cd_rs->all) { is ($_->tracks->count, 3, '3 tracks for CD' . $_->id ); } +my @cdids = sort $cd_rs->get_column ('cdid')->all; + # Test a belongs_to prefetch of a has_many { my $track_rs = $schema->resultset ('Track')->search ( - { 'me.cd' => { -in => [ $cd_rs->get_column ('cdid')->all ] } }, + { 'me.cd' => { -in => \@cdids } }, { select => [ 'me.cd', @@ -72,7 +74,7 @@ for ($cd_rs->all) { me )', [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' } - => $_ ] } ($cd_rs->get_column ('cdid')->all) ], + => $_ ] } @cdids ], 'count() query generated expected SQL', ); @@ -91,7 +93,7 @@ for ($cd_rs->all) { WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) ) )', [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' } - => $_ ] } ( ($cd_rs->get_column ('cdid')->all) x 2 ) ], + => $_ ] } (@cdids) x 2 ], 'next() query generated expected SQL', ); @@ -283,7 +285,7 @@ for ($cd_rs->all) { # RT 47779, test group_by as a scalar ref { my $track_rs = $schema->resultset ('Track')->search ( - { 'me.cd' => { -in => [ $cd_rs->get_column ('cdid')->all ] } }, + { 'me.cd' => { -in => \@cdids } }, { select => [ 'me.cd', @@ -312,7 +314,7 @@ for ($cd_rs->all) { me )', [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' } - => $_ ] } ($cd_rs->get_column ('cdid')->all) ], + => $_ ] } (@cdids) ], 'count() query generated expected SQL', ); } diff --git a/t/prefetch/via_search_related.t b/t/prefetch/via_search_related.t index f1aa3d000..316035d4b 100644 --- a/t/prefetch/via_search_related.t +++ b/t/prefetch/via_search_related.t @@ -185,7 +185,7 @@ lives_ok (sub { }); is_deeply( - $rs->all_hri, + $rs->search({}, { order_by => 'me.title' })->all_hri, [ { title => "Caterwaulin' Blues", max_trk => 3 }, { title => "Come Be Depressed With Us", max_trk => 3 }, diff --git a/t/resultset/update_delete.t b/t/resultset/update_delete.t index c3e8c2f58..7d9efa9c1 100644 --- a/t/resultset/update_delete.t +++ b/t/resultset/update_delete.t @@ -5,6 +5,11 @@ use lib qw(t/lib); use Test::More; use Test::Exception; +# MASSIVE FIXME - there is a hole in ::RSC / as_subselect_rs +# losing the order. Needs a rework/extract of the realiaser, +# and that's a whole another bag of dicks +BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 } + use DBICTest::Schema::CD; BEGIN { # the default scalarref table name will not work well for this test @@ -31,7 +36,7 @@ my ($fa, $fb, $fc) = $tkfks->related_resultset ('fourkeys')->populate ([ # [qw/2 2 /], #]); my ($ta, $tb) = $schema->resultset ('TwoKeys') - ->search ( [ { artist => 1, cd => 1 }, { artist => 2, cd => 2 } ]) + ->search ( [ { artist => 1, cd => 1 }, { artist => 2, cd => 2 } ], { order_by => 'artist' }) ->all; my $tkfk_cnt = $tkfks->count; @@ -73,7 +78,10 @@ is ($fb->discard_changes->read_count, 21, 'Update ran only once on discard-join is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); # make the multi-join stick -my $fks_multi = $fks->search({ 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } }); +my $fks_multi = $fks->search( + { 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } }, + { order_by => [ $fks->result_source->primary_columns ] }, +); $schema->is_executed_sql_bind( sub { $fks_multi->update ({ read_count => \ 'read_count + 1' }) }, [ @@ -85,6 +93,7 @@ $schema->is_executed_sql_bind( sub { ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? GROUP BY me.foo, me.bar, me.hello, me.goodbye + ORDER BY foo, bar, hello, goodbye ', (1, 2) x 2, 666, @@ -118,6 +127,7 @@ $schema->is_executed_sql_bind( sub { ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello WHERE "blah" = "bleh" AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? GROUP BY me.foo, me.bar, me.hello, me.goodbye + ORDER BY foo, bar, hello, goodbye ', (1, 2) x 2, 666, @@ -147,6 +157,7 @@ $schema->is_executed_sql_bind( sub { AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? + ORDER BY foo, bar, hello, goodbye ) ) ', From b6b8f72fff65a26976069f55cf134e6e607e6112 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dagfinn=20Ilmari=20Manns=C3=A5ker?= Date: Fri, 28 Mar 2014 16:48:17 +0000 Subject: [PATCH 065/548] Populate caches for related result sets even if they're empty This avoids unnecessary database hits when accessing prefetched related resultsets with no rows. --- Changes | 2 ++ lib/DBIx/Class/ResultSet.pm | 4 ++-- t/prefetch/empty_cache.t | 39 +++++++++++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 t/prefetch/empty_cache.t diff --git a/Changes b/Changes index 06e9cf02e..57e61353d 100644 --- a/Changes +++ b/Changes @@ -19,6 +19,8 @@ Revision history for DBIx::Class - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver routines, triggering a connection before the set-cycle is finished + - Prevent erroneous database hit when accessing prefetched related + resultsets with no rows - Fix incorrect handling of custom relationship conditions returning SQLA literal expressions - Fix multi-value literal populate not working with simplified bind diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 92665d6df..2bc320b7b 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -3098,11 +3098,11 @@ sub related_resultset { if (my $cache = $self->get_cache) { my @related_cache = map - { @{$_->related_resultset($rel)->get_cache||[]} } + { $_->related_resultset($rel)->get_cache || () } @$cache ; - $new->set_cache(\@related_cache) if @related_cache; + $new->set_cache([ map @$_, @related_cache ]) if @related_cache == @$cache; } $new; diff --git a/t/prefetch/empty_cache.t b/t/prefetch/empty_cache.t new file mode 100644 index 000000000..6fa4bb98f --- /dev/null +++ b/t/prefetch/empty_cache.t @@ -0,0 +1,39 @@ +use strict; +use warnings; + +use Test::More; + +use lib qw(t/lib); +use DBICTest; + +my $schema = DBICTest->init_schema(); + +my $no_albums_artist = { name => 'We Have No Albums' }; +$schema->resultset('Artist')->create($no_albums_artist); + +foreach ( + [empty => \'0 = 1', 0], + [nonempty => $no_albums_artist, 1], +) { + my ($desc, $cond, $count) = @$_; + + my $artists_rs = $schema->resultset('Artist') + ->search($cond, { prefetch => 'cds', cache => 1 }); + + $schema->is_executed_querycount( sub { + my @artists = $artists_rs->all; + is( 0+@{$artists_rs->get_cache}, $count, "$desc cache on original resultset" ); + is( 0+@artists, $count, "$desc original resultset" ); + }, 1, "->all on $desc original resultset hit db" ); + + $schema->is_executed_querycount( sub { + my $cds_rs = $artists_rs->related_resultset('cds'); + is_deeply( $cds_rs->get_cache, [], 'empty cache on related resultset' ); + + my @cds = $cds_rs->all; + is( 0+@cds, 0, 'empty related resultset' ); + }, 0, '->all on empty related resultest didn\'t hit db' ); +} + + +done_testing; From 9a0dd978bd1c47059e4dabc0a389b4bb679b04fd Mon Sep 17 00:00:00 2001 From: Alexander Hartmaier Date: Thu, 18 Jul 2013 18:31:46 +0200 Subject: [PATCH 066/548] More coherent +(columns/select/as) quoting notices and descriptions --- lib/DBIx/Class/ResultSet.pm | 52 ++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 2bc320b7b..8d960794c 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -3892,7 +3892,7 @@ case the key is the C value, and the value is used as the C from that, then auto-populates C from C and L. @@ -3905,16 +3905,20 @@ is the same as =head2 +columns +B You B explicitly quote C<'+columns'> when using this attribute. +Not doing so causes Perl to incorrectly interpret C<+columns> as a bareword +with a unary plus operator before it, which is the same as simply C. + =over 4 -=item Value: \@columns +=item Value: \@extra_columns =back Indicates additional columns to be selected from storage. Works the same as -L but adds columns to the selection. (You may also use the +L but adds columns to the current selection. (You may also use the C attribute, as in earlier versions of DBIC, but this is -deprecated). For example:- +deprecated) $schema->resultset('CD')->search(undef, { '+columns' => ['artist.name'], @@ -3926,20 +3930,6 @@ passed to object inflation. Note that the 'artist' is the name of the column (or relationship) accessor, and 'name' is the name of the column accessor in the related table. -B You need to explicitly quote '+columns' when defining the attribute. -Not doing so causes Perl to incorrectly interpret +columns as a bareword with a -unary plus operator before it. - -=head2 include_columns - -=over 4 - -=item Value: \@columns - -=back - -Deprecated. Acts as a synonym for L for backward compatibility. - =head2 select =over 4 @@ -3970,20 +3960,22 @@ identifier aliasing. You can however alias a function, so you can use it in e.g. an C clause. This is done via the C<-as> B. + =over 4 -Indicates additional columns to be selected from storage. Works the same as -L but adds columns to the default selection, instead of specifying -an explicit list. +=item Value: \@extra_select_columns =back +Indicates additional columns to be selected from storage. Works the same as +L but adds columns to the current selection, instead of specifying +a new explicit list. + =head2 as =over 4 @@ -3992,7 +3984,7 @@ an explicit list. =back -Indicates column names for object inflation. That is L indicates the +Indicates DBIC-side names for object inflation. That is L indicates the slot name in which the column value will be stored within the L object. The value will then be accessible via this identifier by the C method (or via the object accessor B for details. =head2 +as +B You B explicitly quote C<'+as'> when using this attribute. +Not doing so causes Perl to incorrectly interpret C<+as> as a bareword +with a unary plus operator before it, which is the same as simply C. + =over 4 -Indicates additional column names for those added via L. See L. +=item Value: \@extra_inflation_names =back +Indicates additional inflation names for selectors added via L. See L. + =head2 join =over 4 From 6dd8c7b6732363559bb33121f6acd2b0b96f1fe9 Mon Sep 17 00:00:00 2001 From: Alexander Hartmaier Date: Thu, 8 Aug 2013 10:07:51 +0200 Subject: [PATCH 067/548] Improved docs for ResultSet columns attribute on how to retrieve related columns --- lib/DBIx/Class/ResultSet.pm | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 8d960794c..21fafff49 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -3896,12 +3896,26 @@ earlier versions of DBIC, but this is deprecated) Essentially C does the same as L and L. - columns => [ 'foo', { bar => 'baz' } ] + columns => [ 'some_column', { dbic_slot => 'another_column' } ] is the same as - select => [qw/foo baz/], - as => [qw/foo bar/] + select => [qw(some_column another_column)], + as => [qw(some_column dbic_slot)] + +If you want to individually retrieve related columns (in essence perform +manual prefetch) you have to make sure to specify the correct inflation slot +chain such that it matches existing relationships: + + my $rs = $schema->resultset('Artist')->search({}, { + # required to tell DBIC to collapse has_many relationships + collapse => 1, + join => { cds => 'tracks'}, + '+columns' => { + 'cds.cdid' => 'cds.cdid', + 'cds.tracks.title' => 'tracks.title', + }, + }); =head2 +columns From a89c6a79e09e6dd6798b2c4500690916767eb9c5 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 1 Jul 2014 11:33:14 +0200 Subject: [PATCH 068/548] Add an explicit webchat link, change IRC to x_IRC This should work around the issue of 'preferred company/startup' raised in https://github.com/CPAN-API/metacpan-web/issues/1235#issuecomment-47610925 --- Makefile.PL | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile.PL b/Makefile.PL index 108624a9f..531f3f835 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -16,7 +16,8 @@ BEGIN { } homepage 'http://www.dbix-class.org/'; -resources 'IRC' => 'irc://irc.perl.org/#dbix-class'; +resources 'x_IRC' => 'irc://irc.perl.org/#dbix-class'; +resources 'x_WebIRC' => 'https://chat.mibbit.com/#dbix-class@irc.perl.org'; resources 'license' => 'http://dev.perl.org/licenses/'; resources 'repository' => 'https://github.com/dbsrgits/DBIx-Class'; resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class'; From 18637ebbd75e7c2fb129da41960fde4b63f8c8fe Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 28 Jun 2014 12:23:52 +0200 Subject: [PATCH 069/548] Forgotten reenable of syntax test to guard for 37af2968e --- xt/strictures.t | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/xt/strictures.t b/xt/strictures.t index 399662163..adfd9a71f 100644 --- a/xt/strictures.t +++ b/xt/strictures.t @@ -12,9 +12,18 @@ unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_strictures') ) { : plan skip_all => "Test needs: $missing" } - use File::Find; +# The rationale is - if we can load all our optdeps +# that are related to lib/ - then we should be able to run +# perl -c checks (via syntax_ok), and all should just work +my $missing_groupdeps_present = grep + { DBIx::Class::Optional::Dependencies->req_ok_for($_) } + grep + { $_ !~ /^ (?: test | rdbms | dist ) _ /x } + keys %{DBIx::Class::Optional::Dependencies->req_group_list} +; + find({ wanted => sub { -f $_ or return; @@ -31,7 +40,8 @@ find({ Test::Strict::strict_ok($f); Test::Strict::warnings_ok($f); - #Test::Strict::syntax_ok($f) if $f =~ /^ (?: lib )/x; + Test::Strict::syntax_ok($f) + if ! $missing_groupdeps_present and $f =~ /^ (?: lib )/x; }, no_chdir => 1, }, (qw(lib t examples maint)) ); From 77a6448dc1197c5f6b2621c2e10fc0c6d6570d9c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 1 Jul 2014 10:45:02 +0200 Subject: [PATCH 070/548] namespace::autoclean is a really stupid idea - purge with fire --- lib/DBIx/Class/Admin.pm | 2 +- lib/DBIx/Class/Optional/Dependencies.pm | 1 - lib/DBIx/Class/ResultSet.pm | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/DBIx/Class/Admin.pm b/lib/DBIx/Class/Admin.pm index b30aa0a3b..4c7c6bb13 100644 --- a/lib/DBIx/Class/Admin.pm +++ b/lib/DBIx/Class/Admin.pm @@ -15,7 +15,7 @@ use MooseX::Types::JSON qw(JSON); use MooseX::Types::Path::Class qw(Dir File); use MooseX::Types::LoadableClass qw(LoadableClass); use Try::Tiny; -use namespace::autoclean; +use namespace::clean; =head1 NAME diff --git a/lib/DBIx/Class/Optional/Dependencies.pm b/lib/DBIx/Class/Optional/Dependencies.pm index 23ffebed8..0bbc6c41f 100644 --- a/lib/DBIx/Class/Optional/Dependencies.pm +++ b/lib/DBIx/Class/Optional/Dependencies.pm @@ -36,7 +36,6 @@ my $admin_basic = { %$min_json_any, 'MooseX::Types::Path::Class' => '0.05', 'MooseX::Types::JSON' => '0.02', - 'namespace::autoclean' => '0.09', }; my $admin_script = { diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 21fafff49..0d4d4bdb9 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -88,7 +88,6 @@ similar to: package MyApp::Schema::ResultSet::User; use Moose; - use namespace::autoclean; use MooseX::NonMoose; extends 'DBIx::Class::ResultSet'; From 293a9242a533ffe950028a36151f566ab7e70c8b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 1 Jul 2014 11:00:57 +0200 Subject: [PATCH 071/548] Move the Subclassing POD under Examples --- lib/DBIx/Class/ResultSet.pm | 54 ++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 0d4d4bdb9..4496efd03 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -80,33 +80,6 @@ However, if it is used in a boolean context it is B true. So if you want to check if a resultset has any results, you must use C. -=head1 CUSTOM ResultSet CLASSES THAT USE Moose - -If you want to make your custom ResultSet classes with L, use a template -similar to: - - package MyApp::Schema::ResultSet::User; - - use Moose; - use MooseX::NonMoose; - extends 'DBIx::Class::ResultSet'; - - sub BUILDARGS { $_[2] } - - ...your code... - - __PACKAGE__->meta->make_immutable; - - 1; - -The L is necessary so that the L constructor does not -clash with the regular ResultSet constructor. Alternatively, you can use: - - __PACKAGE__->meta->make_immutable(inline_constructor => 0); - -The L is necessary because the -signature of the ResultSet C is C<< ->new($source, \%args) >>. - =head1 EXAMPLES =head2 Chaining resultsets @@ -194,6 +167,33 @@ Which is the same as: See: L, L, L, L, L. +=head2 Custom ResultSet classes using Moose + +If you want to make your custom ResultSet classes with L, use a template +similar to: + + package MyApp::Schema::ResultSet::User; + + use Moose; + use MooseX::NonMoose; + extends 'DBIx::Class::ResultSet'; + + sub BUILDARGS { $_[2] } + + ...your code... + + __PACKAGE__->meta->make_immutable; + + 1; + +The L is necessary so that the L constructor does not +clash with the regular ResultSet constructor. Alternatively, you can use: + + __PACKAGE__->meta->make_immutable(inline_constructor => 0); + +The L is necessary because the +signature of the ResultSet C is C<< ->new($source, \%args) >>. + =head1 METHODS =head2 new From 9fbe82fb0dd6a961ba378d6b24f6f220273099bc Mon Sep 17 00:00:00 2001 From: skaufman Date: Sat, 28 Jun 2014 22:33:27 -0400 Subject: [PATCH 072/548] Change tab space (4 => 2), no further changes --- .mailmap | 3 ++- lib/DBIx/Class.pm | 2 ++ lib/DBIx/Class/ResultSet.pm | 18 +++++++++--------- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.mailmap b/.mailmap index 7fa161b21..09f5e6e49 100644 --- a/.mailmap +++ b/.mailmap @@ -28,8 +28,9 @@ Jonathan Chu Jose Luis Martinez Matt Phillips Norbert Csongrádi -Roman Filippov Peter Rabbitson +Roman Filippov +Samuel Kaufman Tim Bunce Toby Corkindale Wallace Reis diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 4c7e298b7..b1a0270f2 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -528,6 +528,8 @@ semifor: Marc Mims SineSwiper: Brendan Byrd +skaufman: Samuel Kaufman + solomon: Jared Johnson spb: Stephen Bennett diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 4496efd03..84f9e68e4 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -172,24 +172,24 @@ See: L, L, L, L, L. If you want to make your custom ResultSet classes with L, use a template similar to: - package MyApp::Schema::ResultSet::User; + package MyApp::Schema::ResultSet::User; - use Moose; - use MooseX::NonMoose; - extends 'DBIx::Class::ResultSet'; + use Moose; + use MooseX::NonMoose; + extends 'DBIx::Class::ResultSet'; - sub BUILDARGS { $_[2] } + sub BUILDARGS { $_[2] } - ...your code... + ...your code... - __PACKAGE__->meta->make_immutable; + __PACKAGE__->meta->make_immutable; - 1; + 1; The L is necessary so that the L constructor does not clash with the regular ResultSet constructor. Alternatively, you can use: - __PACKAGE__->meta->make_immutable(inline_constructor => 0); + __PACKAGE__->meta->make_immutable(inline_constructor => 0); The L is necessary because the signature of the ResultSet C is C<< ->new($source, \%args) >>. From a0a0da0ae382af82fc832d7eff56082343e29ddf Mon Sep 17 00:00:00 2001 From: skaufman Date: Sat, 28 Jun 2014 22:33:27 -0400 Subject: [PATCH 073/548] Better example/documentation of custom resultset classes --- lib/DBIx/Class/ResultSet.pm | 42 ++++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 84f9e68e4..0e0aa63fc 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -167,7 +167,47 @@ Which is the same as: See: L, L, L, L, L. -=head2 Custom ResultSet classes using Moose +=head2 Custom ResultSet classes + +To add methods to your resultsets, you can subclass L, similar to: + + package MyApp::Schema::ResultSet::User; + + use strict; + use warnings; + + use base 'DBIx::Class::ResultSet'; + + sub active { + my $self = shift; + $self->search({ $self->current_source_alias . '.active' => 1 }); + } + + sub unverified { + my $self = shift; + $self->search({ $self->current_source_alias . '.verified' => 0 }); + } + + sub created_n_days_ago { + my ($self, $days_ago) = @_; + $self->search({ + $self->current_source_alias . '.create_date' => { + '<=', + $self->result_source->schema->storage->datetime_parser->format_datetime( + DateTime->now( time_zone => 'UTC' )->subtract( days => $days_ago ) + )} + }); + } + + sub users_to_warn { shift->active->unverified->created_n_days_ago(7) } + + 1; + +See L on how DBIC can discover and +automatically attach L-specific +L classes. + +=head3 ResultSet subclassing with Moose If you want to make your custom ResultSet classes with L, use a template similar to: From 01bed9ce7e414e27063236ca01f16522feee9dc5 Mon Sep 17 00:00:00 2001 From: skaufman Date: Sat, 28 Jun 2014 22:33:27 -0400 Subject: [PATCH 074/548] Better (and much more precise) explanation of Moose/Moo subclassing --- lib/DBIx/Class/ResultSet.pm | 36 ++++++++++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 0e0aa63fc..359d692d1 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -207,10 +207,31 @@ See L on how DBIC can discover and automatically attach L-specific L classes. -=head3 ResultSet subclassing with Moose +=head3 ResultSet subclassing with Moose and similar constructor-providers -If you want to make your custom ResultSet classes with L, use a template -similar to: +Using L or L in your ResultSet classes is usually overkill, but +you may find it useful if your ResultSets contain a lot of business logic +(e.g. C, C, etc) or if you just prefer to organize +your code via roles. + +In order to write custom ResultSet classes with L you need to use the +following template. The L is necessary due to the +unusual signature of the L C<< ->new($source, \%args) >>. + + use Moo; + extends 'DBIx::Class::ResultSet'; + sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_ + + ...your code... + + 1; + +If you want to build your custom ResultSet classes with L, you need +a similar, though a little more elaborate template in order to interface the +inlining of the L-provided +L, +with the DBIC one. package MyApp::Schema::ResultSet::User; @@ -218,7 +239,7 @@ similar to: use MooseX::NonMoose; extends 'DBIx::Class::ResultSet'; - sub BUILDARGS { $_[2] } + sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_ ...your code... @@ -227,13 +248,12 @@ similar to: 1; The L is necessary so that the L constructor does not -clash with the regular ResultSet constructor. Alternatively, you can use: +entirely overwrite the DBIC one (in contrast L does this automatically). +Alternatively, you can skip L and get by with just L +instead by doing: __PACKAGE__->meta->make_immutable(inline_constructor => 0); -The L is necessary because the -signature of the ResultSet C is C<< ->new($source, \%args) >>. - =head1 METHODS =head2 new From 68367f1f7fb5dd2d13931ec85c3a9bbb415f8359 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 00:34:24 +0200 Subject: [PATCH 075/548] Expand closed STDERR test, check actual exception --- t/storage/debug.t | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/t/storage/debug.t b/t/storage/debug.t index d16e1292e..ce85bc58f 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -4,6 +4,7 @@ no warnings 'once'; use Test::More; use Test::Exception; +use Try::Tiny; use lib qw(t/lib); use DBICTest; use Path::Class qw/file/; @@ -53,12 +54,21 @@ END { } open(STDERRCOPY, '>&STDERR'); -close(STDERR); -dies_ok { + +# STDERR will be closed, no T::B diag in blocks +my $exception = try { + close(STDERR); $schema->resultset('CD')->search({})->count; -} 'Died on closed FH'; +} catch { + $_ +} finally { + # restore STDERR + open(STDERR, '>&STDERRCOPY'); +}; + +like $exception, qr/\QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)/; + -open(STDERR, '>&STDERRCOPY'); # test debugcb and debugobj protocol { From e9f71ab2a49f61024a982fd4ee6f6351fb283c6a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 00:45:32 +0200 Subject: [PATCH 076/548] Stop permanently enabling autoflush on the debug filehandle --- lib/DBIx/Class/Storage/Statistics.pm | 3 +-- t/storage/debug.t | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index 6c77ffbb8..1ee529913 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -67,7 +67,6 @@ sub debugfh { or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!"); } - $fh->autoflush(); $self->_debugfh($fh); } @@ -85,7 +84,7 @@ sub print { return if $self->silence; - $self->debugfh->print($msg); + $self->debugfh->printflush($msg); } =head2 silence diff --git a/t/storage/debug.t b/t/storage/debug.t index ce85bc58f..77f7e423f 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -23,7 +23,6 @@ $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new); ok ( $schema->storage->debug(1), 'debug' ); $schema->storage->debugfh($lfn->openw); -$schema->storage->debugfh->autoflush(1); $schema->resultset('CD')->count; my @loglines = $lfn->slurp; From 9d522a4e02ee0bc2278f69605ccfea6b046e9b40 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 00:57:20 +0200 Subject: [PATCH 077/548] IFF debug goes to STDERR by default - silence possible wide-char warnings --- Changes | 2 ++ lib/DBIx/Class/Storage/Statistics.pm | 13 +++++++++++-- t/storage/debug.t | 20 ++++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/Changes b/Changes index 57e61353d..5284c012a 100644 --- a/Changes +++ b/Changes @@ -36,6 +36,8 @@ Revision history for DBIx::Class savepoints on DBD::SQLite < 1.39 * Misc + - IFF DBIC_TRACE output defaults to STDERR we now silence the possible + wide-char warnings if the trace happens to contain unicode - Stop explicitly stringifying objects before passing them to DBI, instead assume that the DBI/DBD combo will take care of things - Remove ::ResultSource::resolve_condition - the underlying machinery diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index 1ee529913..ec47c541b 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -3,9 +3,10 @@ use strict; use warnings; use base qw/DBIx::Class/; +use DBIx::Class::_Util 'sigwarn_silencer'; use namespace::clean; -__PACKAGE__->mk_group_accessors(simple => qw/callback _debugfh silence/); +__PACKAGE__->mk_group_accessors(simple => qw/callback _debugfh _defaulted_to_stderr silence/); =head1 NAME @@ -55,6 +56,7 @@ sub debugfh { if (@_) { $self->_debugfh($_[0]); + $self->_defaulted_to_stderr(undef); } elsif (!defined($self->_debugfh())) { my $fh; my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} @@ -65,6 +67,7 @@ sub debugfh { } else { open ($fh, '>&STDERR') or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!"); + $self->_defaulted_to_stderr(1); } $self->_debugfh($fh); @@ -84,7 +87,13 @@ sub print { return if $self->silence; - $self->debugfh->printflush($msg); + my $fh = $self->debugfh; + + # not using 'no warnings' here because all of this can change at runtime + local $SIG{__WARN__} = sigwarn_silencer(qr/^Wide character in print/) + if $self->_defaulted_to_stderr; + + $fh->printflush($msg); } =head2 silence diff --git a/t/storage/debug.t b/t/storage/debug.t index 77f7e423f..ffcb21f01 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -5,6 +5,7 @@ no warnings 'once'; use Test::More; use Test::Exception; use Try::Tiny; +use File::Spec; use lib qw(t/lib); use DBICTest; use Path::Class qw/file/; @@ -67,6 +68,25 @@ my $exception = try { like $exception, qr/\QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)/; +my @warnings; +$exception = try { + local $SIG{__WARN__} = sub { push @warnings, @_ if $_[0] =~ /character/i }; + close STDERR; + open(STDERR, '>', File::Spec->devnull) or die $!; + $schema->resultset('CD')->search({ title => "\x{1f4a9}" })->count; + ''; +} catch { + $_; +} finally { + # restore STDERR + close STDERR; + open(STDERR, '>&STDERRCOPY'); +}; + +die "How did that fail... $exception" + if $exception; + +is_deeply(\@warnings, [], 'No warnings with unicode on STDERR'); # test debugcb and debugobj protocol From 4faaf174b94290230f2ebb2cc5077bc11752f69c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 02:46:14 +0200 Subject: [PATCH 078/548] Break out the test tracer into a standalone file --- t/lib/DBICTest/BaseSchema.pm | 22 +++------------------- t/lib/DBICTest/SQLTracerObj.pm | 21 +++++++++++++++++++++ 2 files changed, 24 insertions(+), 19 deletions(-) create mode 100644 t/lib/DBICTest/SQLTracerObj.pm diff --git a/t/lib/DBICTest/BaseSchema.pm b/t/lib/DBICTest/BaseSchema.pm index 5fb9022f3..cdc7a02e7 100644 --- a/t/lib/DBICTest/BaseSchema.pm +++ b/t/lib/DBICTest/BaseSchema.pm @@ -11,35 +11,19 @@ use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistr use DBICTest::Util 'local_umask'; use namespace::clean; -{ - package # moar hide - DBICTest::SQLTracerObj; - use base 'DBIx::Class::Storage::Statistics'; - - sub query_start { push @{$_[0]{sqlbinds}}, [ ($_[1] =~ /^\s*(\S+)/)[0], [ $_[1], @{ $_[2]||[] } ] ] } - - # who the hell came up with this API >:( - for my $txn (qw(begin rollback commit)) { - no strict 'refs'; - *{"txn_$txn"} = sub { push @{$_[0]{sqlbinds}}, [ uc $txn => [ uc $txn ] ] }; - } - - sub svp_begin { push @{$_[0]{sqlbinds}}, [ SAVEPOINT => [ "SAVEPOINT $_[1]" ] ] } - sub svp_release { push @{$_[0]{sqlbinds}}, [ RELEASE_SAVEPOINT => [ "RELEASE $_[1]" ] ] } - sub svp_rollback { push @{$_[0]{sqlbinds}}, [ ROLLBACK_TO_SAVEPOINT => [ "ROLLBACK TO $_[1]" ] ] } - -} - sub capture_executed_sql_bind { my ($self, $cref) = @_; $self->throw_exception("Expecting a coderef to run") unless ref $cref eq 'CODE'; + require DBICTest::SQLTracerObj; + # hack around stupid, stupid API no warnings 'redefine'; local *DBIx::Class::Storage::DBI::_format_for_trace = sub { $_[1] }; Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO; + local $self->storage->{debugcb}; local $self->storage->{debugobj} = my $tracer_obj = DBICTest::SQLTracerObj->new; local $self->storage->{debug} = 1; diff --git a/t/lib/DBICTest/SQLTracerObj.pm b/t/lib/DBICTest/SQLTracerObj.pm new file mode 100644 index 000000000..23baeb34d --- /dev/null +++ b/t/lib/DBICTest/SQLTracerObj.pm @@ -0,0 +1,21 @@ +package # moar hide + DBICTest::SQLTracerObj; + +use strict; +use warnings; + +use base 'DBIx::Class::Storage::Statistics'; + +sub query_start { push @{$_[0]{sqlbinds}}, [ ($_[1] =~ /^\s*(\S+)/)[0], [ $_[1], @{ $_[2]||[] } ] ] } + +# who the hell came up with this API >:( +for my $txn (qw(begin rollback commit)) { + no strict 'refs'; + *{"txn_$txn"} = sub { push @{$_[0]{sqlbinds}}, [ uc $txn => [ uc $txn ] ] }; +} + +sub svp_begin { push @{$_[0]{sqlbinds}}, [ SAVEPOINT => [ "SAVEPOINT $_[1]" ] ] } +sub svp_release { push @{$_[0]{sqlbinds}}, [ RELEASE_SAVEPOINT => [ "RELEASE $_[1]" ] ] } +sub svp_rollback { push @{$_[0]{sqlbinds}}, [ ROLLBACK_TO_SAVEPOINT => [ "ROLLBACK TO $_[1]" ] ] } + +1; From 68b8ba54e535ba5e68e044b3bedec73b20500b72 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 02:05:04 +0200 Subject: [PATCH 079/548] Switch DBIC::Storage::Statistics to Moo (for trial purposes) This is a component which has some subclassing in the wild, use it as a canary to highlight any remaining issues Moo might have within DBIC --- lib/DBIx/Class/Storage/Statistics.pm | 76 ++++++++++++++++------------ t/storage/debug.t | 8 ++- 2 files changed, 52 insertions(+), 32 deletions(-) diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index ec47c541b..0248936ec 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -2,12 +2,22 @@ package DBIx::Class::Storage::Statistics; use strict; use warnings; -use base qw/DBIx::Class/; +# DO NOT edit away without talking to riba first, he will just put it back +# BEGIN pre-Moo2 import block +BEGIN { + require warnings; + my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all}; + local $ENV{PERL_STRICTURES_EXTRA} = 0; + require Moo; Moo->import; + require Sub::Quote; Sub::Quote->import('quote_sub'); + ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} ); +} +# END pre-Moo2 import block + +extends 'DBIx::Class'; use DBIx::Class::_Util 'sigwarn_silencer'; use namespace::clean; -__PACKAGE__->mk_group_accessors(simple => qw/callback _debugfh _defaulted_to_stderr silence/); - =head1 NAME DBIx::Class::Storage::Statistics - SQL Statistics @@ -26,20 +36,10 @@ for collecting the statistics as discussed in L. =head1 METHODS -=cut - =head2 new Returns a new L object. -=cut -sub new { - my $self = {}; - bless $self, (ref($_[0]) || $_[0]); - - return $self; -} - =head2 debugfh Sets or retrieves the filehandle used for trace/debug output. This should @@ -51,31 +51,45 @@ As getter it will lazily open a filehandle for you if one is not already set. =cut +# FIXME - there ought to be a way to fold this into _debugfh itself +# having the undef re-trigger the builder (or better yet a default +# which can be folded in as a qsub) sub debugfh { my $self = shift; - if (@_) { - $self->_debugfh($_[0]); - $self->_defaulted_to_stderr(undef); - } elsif (!defined($self->_debugfh())) { - my $fh; - my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} - || $ENV{DBIC_TRACE}; - if (defined($debug_env) && ($debug_env =~ /=(.+)$/)) { - open ($fh, '>>', $1) - or die("Cannot open trace file $1: $!"); - } else { - open ($fh, '>&STDERR') - or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!"); - $self->_defaulted_to_stderr(1); - } - - $self->_debugfh($fh); + return $self->_debugfh(@_) if @_; + $self->_debugfh || $self->_build_debugfh; +} + +has _debugfh => ( + is => 'rw', + lazy => 1, + trigger => quote_sub( '$_[0]->_defaulted_to_stderr(undef)' ), + builder => '_build_debugfh', +); + +sub _build_debugfh { + my $fh; + + my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} || $ENV{DBIC_TRACE}; + + if (defined($debug_env) and ($debug_env =~ /=(.+)$/)) { + open ($fh, '>>', $1) + or die("Cannot open trace file $1: $!\n"); + } + else { + open ($fh, '>&STDERR') + or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!\n"); + $_[0]->_defaulted_to_stderr(1); } - $self->_debugfh; + $fh; } +has [qw(_defaulted_to_stderr silence callback)] => ( + is => 'rw', +); + =head2 print Prints the specified string to our debugging filehandle. Provided to save our diff --git a/t/storage/debug.t b/t/storage/debug.t index ffcb21f01..f28d4b5f7 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -55,9 +55,11 @@ END { open(STDERRCOPY, '>&STDERR'); +my $exception_line_number; # STDERR will be closed, no T::B diag in blocks my $exception = try { close(STDERR); + $exception_line_number = __LINE__ + 1; # important for test, do not reformat $schema->resultset('CD')->search({})->count; } catch { $_ @@ -66,7 +68,11 @@ my $exception = try { open(STDERR, '>&STDERRCOPY'); }; -like $exception, qr/\QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)/; +like $exception, qr/ + \QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)\E + .+ + \Qat @{[__FILE__]} line $exception_line_number\E$ +/xms; my @warnings; $exception = try { From df4312bcd2871c005751ce8c49c98c6be3692699 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 02:30:52 +0200 Subject: [PATCH 080/548] Clearer name of method/variables before refactoring --- lib/DBIx/Class/ResultSet.pm | 2 +- lib/DBIx/Class/SQLMaker/LimitDialects.pm | 16 ++++++++-------- lib/DBIx/Class/Storage/DBIHacks.pm | 10 +++++----- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 359d692d1..59bd19b41 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -1327,7 +1327,7 @@ sub _construct_results { and $rsrc->schema ->storage - ->_main_source_order_by_portion_is_stable($rsrc, $attrs->{order_by}, $attrs->{where}) + ->_extract_colinfo_of_stable_main_source_order_by_portion($rsrc, $attrs->{order_by}, $attrs->{where}) ) ? 1 : 0 ) unless defined $attrs->{_ordered_for_collapse}; diff --git a/lib/DBIx/Class/SQLMaker/LimitDialects.pm b/lib/DBIx/Class/SQLMaker/LimitDialects.pm index da65b7c2e..18b53298a 100644 --- a/lib/DBIx/Class/SQLMaker/LimitDialects.pm +++ b/lib/DBIx/Class/SQLMaker/LimitDialects.pm @@ -543,7 +543,7 @@ sub _GenericSubQ { . 'root-table-based order criteria.' ); - my $usable_order_ci = $root_rsrc->storage->_main_source_order_by_portion_is_stable( + my $usable_order_colinfo = $root_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( $root_rsrc, $supplied_order, $rs_attrs->{where}, @@ -562,14 +562,14 @@ sub _GenericSubQ { }; # truncate to what we'll use - $#order_bits = ( (keys %$usable_order_ci) - 1 ); + $#order_bits = ( (keys %$usable_order_colinfo) - 1 ); # @order_bits likely will come back quoted (due to how the prefetch # rewriter operates # Hence supplement the column_info lookup table with quoted versions if ($self->quote_char) { - $usable_order_ci->{$self->_quote($_)} = $usable_order_ci->{$_} - for keys %$usable_order_ci; + $usable_order_colinfo->{$self->_quote($_)} = $usable_order_colinfo->{$_} + for keys %$usable_order_colinfo; } # calculate the condition @@ -584,15 +584,15 @@ sub _GenericSubQ { ($bit, my $is_desc) = $self->_split_order_chunk($bit); push @is_desc, $is_desc; - push @unqualified_names, $usable_order_ci->{$bit}{-colname}; - push @qualified_names, $usable_order_ci->{$bit}{-fq_colname}; + push @unqualified_names, $usable_order_colinfo->{$bit}{-colname}; + push @qualified_names, $usable_order_colinfo->{$bit}{-fq_colname}; - push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_ci->{$bit}{-fq_colname} }; + push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_colinfo->{$bit}{-fq_colname} }; }; my (@where_cond, @skip_colpair_stack); for my $i (0 .. $#order_bits) { - my $ci = $usable_order_ci->{$order_bits[$i]}; + my $ci = $usable_order_colinfo->{$order_bits[$i]}; my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $root_alias); my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } }; diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 3164e5a4f..1bb922499 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -887,13 +887,13 @@ sub _order_by_is_stable { my @cols = ( ( map { $_->[0] } $self->_extract_order_criteria($order_by) ), ( $where ? @{ $self->_extract_fixed_condition_columns($where) || [] } : () ), - ) or return undef; + ) or return 0; my $colinfo = $self->_resolve_column_info($ident, \@cols); return keys %$colinfo ? $self->_columns_comprise_identifying_set( $colinfo, \@cols ) - : undef + : 0 ; } @@ -909,14 +909,14 @@ sub _columns_comprise_identifying_set { return 1 if $src->_identifying_column_set($_); } - return undef; + return 0; } -# this is almost identical to the above, except it accepts only +# this is almost similar to _order_by_is_stable, except it takes # a single rsrc, and will succeed only if the first portion of the order # by is stable. # returns that portion as a colinfo hashref on success -sub _main_source_order_by_portion_is_stable { +sub _extract_colinfo_of_stable_main_source_order_by_portion { my ($self, $main_rsrc, $order_by, $where) = @_; die "Huh... I expect a blessed result_source..." From 9cc3585d8b82799078cb292d97a90a0d952089d9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 06:18:15 +0200 Subject: [PATCH 081/548] Pass the main rsrc in attrs, instead of trying to fish it out later --- lib/DBIx/Class/ResultSet.pm | 2 +- lib/DBIx/Class/SQLMaker/LimitDialects.pm | 26 ++++++++++++------------ lib/DBIx/Class/Storage/DBI.pm | 10 ++------- 3 files changed, 16 insertions(+), 22 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 59bd19b41..8208d1649 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -3393,7 +3393,7 @@ sub _resolved_attrs { return $self->{_attrs} if $self->{_attrs}; my $attrs = { %{ $self->{attrs} || {} } }; - my $source = $self->result_source; + my $source = $attrs->{result_source} = $self->result_source; my $alias = $attrs->{alias}; $self->throw_exception("Specifying distinct => 1 in conjunction with collapse => 1 is unsupported") diff --git a/lib/DBIx/Class/SQLMaker/LimitDialects.pm b/lib/DBIx/Class/SQLMaker/LimitDialects.pm index 18b53298a..186122164 100644 --- a/lib/DBIx/Class/SQLMaker/LimitDialects.pm +++ b/lib/DBIx/Class/SQLMaker/LimitDialects.pm @@ -278,7 +278,7 @@ EOS if ( $rs_attrs->{order_by} and - $rs_attrs->{_rsroot_rsrc}->storage->_order_by_is_stable( + $rs_attrs->{result_source}->storage->_order_by_is_stable( @{$rs_attrs}{qw/from order_by where/} ) ) { @@ -331,7 +331,7 @@ sub _prep_for_skimming_limit { if ($sq_attrs->{order_by_requested}) { $self->throw_exception ( 'Unable to safely perform "skimming type" limit with supplied unstable order criteria' - ) unless ($rs_attrs->{_rsroot_rsrc}->schema->storage->_order_by_is_stable( + ) unless ($rs_attrs->{result_source}->schema->storage->_order_by_is_stable( $rs_attrs->{from}, $requested_order, $rs_attrs->{where}, @@ -343,11 +343,11 @@ sub _prep_for_skimming_limit { $inner_order = [ map { "$rs_attrs->{alias}.$_" } ( @{ - $rs_attrs->{_rsroot_rsrc}->_identifying_column_set + $rs_attrs->{result_source}->_identifying_column_set || $self->throw_exception(sprintf( 'Unable to auto-construct stable order criteria for "skimming type" limit ' - . "dialect based on source '%s'", $rs_attrs->{_rsroot_rsrc}->name) ); + . "dialect based on source '%s'", $rs_attrs->{result_source}->name) ); } ) ]; } @@ -532,7 +532,7 @@ Currently used by B, due to lack of any other option. sub _GenericSubQ { my ($self, $sql, $rs_attrs, $rows, $offset) = @_; - my $root_rsrc = $rs_attrs->{_rsroot_rsrc}; + my $main_rsrc = $rs_attrs->{result_source}; # Explicitly require an order_by # GenSubQ is slow enough as it is, just emulating things @@ -540,11 +540,11 @@ sub _GenericSubQ { # to shoot their DBA in the foot my $supplied_order = delete $rs_attrs->{order_by} or $self->throw_exception ( 'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, ' - . 'root-table-based order criteria.' + . 'main-table-based order criteria.' ); - my $usable_order_colinfo = $root_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( - $root_rsrc, + my $usable_order_colinfo = $main_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( + $main_rsrc, $supplied_order, $rs_attrs->{where}, ) or $self->throw_exception( @@ -574,8 +574,8 @@ sub _GenericSubQ { # calculate the condition my $count_tbl_alias = 'rownum__emulation'; - my $root_alias = $rs_attrs->{alias}; - my $root_tbl_name = $root_rsrc->name; + my $main_alias = $rs_attrs->{alias}; + my $main_tbl_name = $main_rsrc->name; my (@unqualified_names, @qualified_names, @is_desc, @new_order_by); @@ -594,7 +594,7 @@ sub _GenericSubQ { for my $i (0 .. $#order_bits) { my $ci = $usable_order_colinfo->{$order_bits[$i]}; - my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $root_alias); + my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $main_alias); my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } }; push @skip_colpair_stack, [ @@ -683,7 +683,7 @@ WHERE ( SELECT COUNT(*) FROM %s %s $counted_where ) $rownum_cond $inner_order_sql ", map { $self->_quote ($_) } ( $rs_attrs->{alias}, - $root_tbl_name, + $main_tbl_name, $count_tbl_alias, )); } @@ -693,7 +693,7 @@ $inner_order_sql # # Generates inner/outer select lists for various limit dialects # which result in one or more subqueries (e.g. RNO, Top, RowNum) -# Any non-root-table columns need to have their table qualifier +# Any non-main-table columns need to have their table qualifier # turned into a column alias (otherwise names in subqueries clash # and/or lose their source table) # diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 0bf32949f..728d2b41f 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -2413,20 +2413,12 @@ sub _select_args { #) if $orig_attrs->{!args_as_stored_at_the_end_of_this_method!}; my $sql_maker = $self->sql_maker; - my $alias2source = $self->_resolve_ident_sources ($ident); my $attrs = { %$orig_attrs, select => $select, from => $ident, where => $where, - - # limit dialects use this stuff - # yes, some CDBICompat crap does not supply an {alias} >.< - ( $orig_attrs->{alias} and $alias2source->{$orig_attrs->{alias}} ) - ? ( _rsroot_rsrc => $alias2source->{$orig_attrs->{alias}} ) - : () - , }; # Sanity check the attributes (SQLMaker does it too, but @@ -2517,6 +2509,8 @@ sub _select_args { $orig_attrs->{_last_sqlmaker_alias_map} = $attrs->{_aliastypes}; ### + # my $alias2source = $self->_resolve_ident_sources ($ident); + # # This would be the point to deflate anything found in $attrs->{where} # (and leave $attrs->{bind} intact). Problem is - inflators historically # expect a result object. And all we have is a resultsource (it is trivial From 302d35f8b3225b4da3755dd3e3f45efede130533 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 06:09:40 +0200 Subject: [PATCH 082/548] Teach order_by stability analyzer about search_related There is more stuff we can do here as noted in the FIXME added to the very end of _extract_colinfo_of_stable_main_source_order_by_portion. But for the time being this will do (also see the subsequent tests for extra insanity we could and should add) Tests come in the next commit --- Changes | 2 + lib/DBIx/Class/ResultSet.pm | 2 +- lib/DBIx/Class/SQLMaker/LimitDialects.pm | 22 ++-- lib/DBIx/Class/Storage/DBIHacks.pm | 143 +++++++++++------------ 4 files changed, 86 insertions(+), 83 deletions(-) diff --git a/Changes b/Changes index 5284c012a..3adf7c783 100644 --- a/Changes +++ b/Changes @@ -19,6 +19,8 @@ Revision history for DBIx::Class - Fix on_connect_* not always firing in some cases - a race condition existed between storage accessor setters and the determine_driver routines, triggering a connection before the set-cycle is finished + - Fix failure to detect stable order criteria when in iterator + mode of a has_many prefetch off a search_related chain - Prevent erroneous database hit when accessing prefetched related resultsets with no rows - Fix incorrect handling of custom relationship conditions returning diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 8208d1649..6f39723cd 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -1327,7 +1327,7 @@ sub _construct_results { and $rsrc->schema ->storage - ->_extract_colinfo_of_stable_main_source_order_by_portion($rsrc, $attrs->{order_by}, $attrs->{where}) + ->_extract_colinfo_of_stable_main_source_order_by_portion($attrs) ) ? 1 : 0 ) unless defined $attrs->{_ordered_for_collapse}; diff --git a/lib/DBIx/Class/SQLMaker/LimitDialects.pm b/lib/DBIx/Class/SQLMaker/LimitDialects.pm index 186122164..b972809f7 100644 --- a/lib/DBIx/Class/SQLMaker/LimitDialects.pm +++ b/lib/DBIx/Class/SQLMaker/LimitDialects.pm @@ -538,23 +538,31 @@ sub _GenericSubQ { # GenSubQ is slow enough as it is, just emulating things # like in other cases is not wise - make the user work # to shoot their DBA in the foot - my $supplied_order = delete $rs_attrs->{order_by} or $self->throw_exception ( + $self->throw_exception ( 'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, ' . 'main-table-based order criteria.' - ); + ) unless $rs_attrs->{order_by}; my $usable_order_colinfo = $main_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( - $main_rsrc, - $supplied_order, - $rs_attrs->{where}, - ) or $self->throw_exception( - 'Generic Subquery Limit can not work with order criteria based on sources other than the current one' + $rs_attrs + ); + + $self->throw_exception( + 'Generic Subquery Limit can not work with order criteria based on sources other than the main one' + ) if ( + ! keys %{$usable_order_colinfo||{}} + or + grep + { $_->{-source_alias} ne $rs_attrs->{alias} } + (values %$usable_order_colinfo) ); ### ### ### we need to know the directions after we figured out the above - reextract *again* ### this is eyebleed - trying to get it to work at first + my $supplied_order = delete $rs_attrs->{order_by}; + my @order_bits = do { local $self->{quote_char}; local $self->{order_bind}; diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 1bb922499..d78ab7483 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -784,31 +784,9 @@ sub _resolve_column_info { sub _inner_join_to_node { my ($self, $from, $alias) = @_; - # subqueries and other oddness are naturally not supported - return $from if ( - ref $from ne 'ARRAY' - || - @$from <= 1 - || - ref $from->[0] ne 'HASH' - || - ! $from->[0]{-alias} - || - $from->[0]{-alias} eq $alias # this last bit means $alias is the head of $from - nothing to do - ); - - # find the current $alias in the $from structure - my $switch_branch; - JOINSCAN: - for my $j (@{$from}[1 .. $#$from]) { - if ($j->[0]{-alias} eq $alias) { - $switch_branch = $j->[0]{-join_path}; - last JOINSCAN; - } - } + my $switch_branch = $self->_find_join_path_to_node($from, $alias); - # something else went quite wrong - return $from unless $switch_branch; + return $from unless @{$switch_branch||[]}; # So it looks like we will have to switch some stuff around. # local() is useless here as we will be leaving the scope @@ -836,6 +814,29 @@ sub _inner_join_to_node { return \@new_from; } +sub _find_join_path_to_node { + my ($self, $from, $target_alias) = @_; + + # subqueries and other oddness are naturally not supported + return undef if ( + ref $from ne 'ARRAY' + || + ref $from->[0] ne 'HASH' + || + ! defined $from->[0]{-alias} + ); + + # no path - the head is the alias + return [] if $from->[0]{-alias} eq $target_alias; + + for my $i (1 .. $#$from) { + return $from->[$i][0]{-join_path} if ( ($from->[$i][0]{-alias}||'') eq $target_alias ); + } + + # something else went quite wrong + return undef; +} + sub _extract_order_criteria { my ($self, $order_by, $sql_maker) = @_; @@ -917,70 +918,62 @@ sub _columns_comprise_identifying_set { # by is stable. # returns that portion as a colinfo hashref on success sub _extract_colinfo_of_stable_main_source_order_by_portion { - my ($self, $main_rsrc, $order_by, $where) = @_; + my ($self, $attrs) = @_; - die "Huh... I expect a blessed result_source..." - if ref($main_rsrc) eq 'ARRAY'; + my $nodes = $self->_find_join_path_to_node($attrs->{from}, $attrs->{alias}); + + return unless defined $nodes; my @ord_cols = map { $_->[0] } - ( $self->_extract_order_criteria($order_by) ) + ( $self->_extract_order_criteria($attrs->{order_by}) ) ; return unless @ord_cols; - my $colinfos = $self->_resolve_column_info($main_rsrc); + my $valid_aliases = { map { $_ => 1 } ( + $attrs->{from}[0]{-alias}, + map { values %$_ } @$nodes, + ) }; - for (0 .. $#ord_cols) { - if ( - ! $colinfos->{$ord_cols[$_]} - or - $colinfos->{$ord_cols[$_]}{-result_source} != $main_rsrc - ) { - $#ord_cols = $_ - 1; - last; - } - } + my $colinfos = $self->_resolve_column_info($attrs->{from}); - # we just truncated it above - return unless @ord_cols; + my ($colinfos_to_return, $seen_main_src_cols); - my $order_portion_ci = { map { - $colinfos->{$_}{-colname} => $colinfos->{$_}, - $colinfos->{$_}{-fq_colname} => $colinfos->{$_}, - } @ord_cols }; + for my $col (@ord_cols) { + # if order criteria is unresolvable - there is nothing we can do + my $colinfo = $colinfos->{$col} or last; - # since all we check here are the start of the order_by belonging to the - # top level $rsrc, a present identifying set will mean that the resultset - # is ordered by its leftmost table in a stable manner - # - # RV of _identifying_column_set contains unqualified names only - my $unqualified_idset = $main_rsrc->_identifying_column_set({ - ( $where ? %{ - $self->_resolve_column_info( - $main_rsrc, $self->_extract_fixed_condition_columns($where)||[] - ) - } : () ), - %$order_portion_ci - }) or return; - - my $ret_info; - my %unqualified_idcols_from_order = map { - $order_portion_ci->{$_} ? ( $_ => $order_portion_ci->{$_} ) : () - } @$unqualified_idset; - - # extra optimization - cut the order_by at the end of the identifying set - # (just in case the user was stupid and overlooked the obvious) - for my $i (0 .. $#ord_cols) { - my $col = $ord_cols[$i]; - my $unqualified_colname = $order_portion_ci->{$col}{-colname}; - $ret_info->{$col} = { %{$order_portion_ci->{$col}}, -idx_in_order_subset => $i }; - delete $unqualified_idcols_from_order{$ret_info->{$col}{-colname}}; - - # we didn't reach the end of the identifying portion yet - return $ret_info unless keys %unqualified_idcols_from_order; + # if we reached the end of the allowed aliases - also nothing we can do + last unless $valid_aliases->{$colinfo->{-source_alias}}; + + $colinfos_to_return->{$col} = $colinfo; + + $seen_main_src_cols->{$colinfo->{-colname}} = 1 + if $colinfo->{-source_alias} eq $attrs->{alias}; } - die 'How did we get here...'; + # FIXME the condition may be singling out things on its own, so we + # conceivable could come back wi "stable-ordered by nothing" + # not confient enough in the parser yet, so punt for the time being + return unless $seen_main_src_cols; + + my $main_src_fixed_cols_from_cond = [ $attrs->{where} + ? ( + map + { + ( $colinfos->{$_} and $colinfos->{$_}{-source_alias} eq $attrs->{alias} ) + ? $colinfos->{$_}{-colname} + : () + } + @{$self->_extract_fixed_condition_columns($attrs->{where}) || []} + ) + : () + ]; + + return $attrs->{result_source}->_identifying_column_set([ + keys %$seen_main_src_cols, + @$main_src_fixed_cols_from_cond, + ]) ? $colinfos_to_return : (); } # Attempts to flatten a passed in SQLA condition as much as possible towards From b7dc8a5c05ee42134bba1e2a1a6b5f4cda8d8384 Mon Sep 17 00:00:00 2001 From: Brendan Byrd Date: Wed, 12 Feb 2014 09:32:45 -0500 Subject: [PATCH 083/548] Initial (now passing) tests prompting the order_by analyzer revamp 302d35f8 --- t/prefetch/multiple_hasmany_torture.t | 28 +++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/t/prefetch/multiple_hasmany_torture.t b/t/prefetch/multiple_hasmany_torture.t index 1623937df..d3998e098 100644 --- a/t/prefetch/multiple_hasmany_torture.t +++ b/t/prefetch/multiple_hasmany_torture.t @@ -6,6 +6,7 @@ use Test::Deep; use Test::Exception; use lib qw(t/lib); use DBICTest; +use DBIx::Class::_Util 'sigwarn_silencer'; my $schema = DBICTest->init_schema(); @@ -124,4 +125,31 @@ my $art_rs_prefetch = $art_rs->search({}, { cmp_deeply( $art_rs_prefetch->next, $artist_with_extras ); +for my $order ( + [ [qw( cds.cdid tracks.position )] ], + + [ [qw( artistid tracks.cd tracks.position )], + 'we need to proxy the knowledge from the collapser that tracks.cd is a stable sorter for CDs' ], +) { + + my $cds_rs_prefetch = $art_rs->related_resultset('cds')->search({}, { + order_by => [ $order->[0], qw(producer.name tracks_2.position) ], + result_class => 'DBIx::Class::ResultClass::HashRefInflator', + prefetch => [ + { tracks => { cd_single => 'tracks' } }, + { cd_to_producer => 'producer' }, + ], + }); + + local $SIG{__WARN__} = sigwarn_silencer(qr/Unable to properly collapse has_many results/) if $order->[1]; + + cmp_deeply( $cds_rs_prefetch->next, $artist_with_extras->{cds}[0], '1st cd structure matches' ); + cmp_deeply( $cds_rs_prefetch->next, $artist_with_extras->{cds}[1], '2nd cd structure matches' ); + + # INTERNALS! (a.k.a boars, gore and whores) DO NOT CARGOCULT!!! + local $TODO = $order->[1] if $order->[1]; + ok( $cds_rs_prefetch->_resolved_attrs->{_ordered_for_collapse}, 'ordered_for_collapse detected properly' ); +} + + done_testing; From 7cbd6cbd07bd62b29dfa60b361c774626b06e967 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 06:58:03 +0200 Subject: [PATCH 084/548] Revert optimistic and sloppy changes from 3705e3b2 - DBI does not always DTRT MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In addition I rebroke the issue from RT#79576, ffs man pay attention general question - does DBI guarantee that objects with stringification overload will get their stringification called or is this up to the DBD (and thus will vary) ribasushi, I had an issue in DBD::ODBC ages ago with this - looking for it now rt 78838 - bind_param does not correctly stringify blessed objects when connected to MS SQL Server, magic was not being applied in DBD::ODBC case so I think the answer to your question is DBI does not but DBDs should if they are written correctly I wonder why DBI does not ribasushi: no explicit ‘guarantee’ but I think any place it doesn’t is probably a bug. timbunce_: given mje had to do magic in his DBD, I suspect DBI has a "hole" somewhere then no? basically I was looking into removing the explicit stringifications in DBIC, hence the question if there is consensus that DBI ought to do it all on its own, we could test for it, fix it up, and then I disable the checks when I detect a sufficiently advanced DBI.pm yes, ribasushi: “… we could test for it, fix it up, and then I disable the checks when I detect a sufficiently advanced DBI.pm or something” :) nod ;) --- Changes | 2 -- lib/DBIx/Class/Storage/DBI.pm | 19 ++++++++++++++++++- lib/DBIx/Class/_Util.pm | 4 +++- t/100populate.t | 2 +- 4 files changed, 22 insertions(+), 5 deletions(-) diff --git a/Changes b/Changes index 3adf7c783..0b56392e0 100644 --- a/Changes +++ b/Changes @@ -40,8 +40,6 @@ Revision history for DBIx::Class * Misc - IFF DBIC_TRACE output defaults to STDERR we now silence the possible wide-char warnings if the trace happens to contain unicode - - Stop explicitly stringifying objects before passing them to DBI, - instead assume that the DBI/DBD combo will take care of things - Remove ::ResultSource::resolve_condition - the underlying machinery is in flux, and the method has had a deprecation warning for 5 years diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 728d2b41f..0e579e95e 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1895,9 +1895,16 @@ sub _bind_sth_params { ); } else { + # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD + my $v = ( length ref $bind->[$i][1] and is_plain_value $bind->[$i][1] ) + ? "$bind->[$i][1]" + : $bind->[$i][1] + ; + $sth->bind_param( $i + 1, - $bind->[$i][1], + # The temp-var is CRUCIAL - DO NOT REMOVE IT, breaks older DBD::SQLite RT#79576 + $v, $bind_attrs->[$i], ); } @@ -2036,6 +2043,16 @@ sub insert_bulk { my @col_range = (0..$#$cols); + # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD + # For the time being forcibly stringify whatever is stringifiable + # ResultSet::populate() hands us a copy - safe to mangle + for my $r (0 .. $#$data) { + for my $c (0 .. $#{$data->[$r]}) { + $data->[$r][$c] = "$data->[$r][$c]" + if ( length ref $data->[$r][$c] and is_plain_value $data->[$r][$c] ); + } + } + my $colinfos = $source->columns_info($cols); local $self->{_autoinc_supplied_for_op} = diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index ad438e755..0ab7ac926 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -177,7 +177,9 @@ sub is_plain_value ($) { # intersted in are much more limited than the fullblown thing, and # this is a relatively hot piece of code ( - # either has stringification which DBI prefers out of the box + # FIXME - DBI needs fixing to stringify regardless of DBD + # + # either has stringification which DBI SHOULD prefer out of the box #first { *{$_ . '::(""'}{CODE} } @{ mro::get_linear_isa( ref $_[0] ) } overload::Method($_[0], '""') or diff --git a/t/100populate.t b/t/100populate.t index 27eb3effc..4a3f0ac7a 100644 --- a/t/100populate.t +++ b/t/100populate.t @@ -416,7 +416,7 @@ warnings_like { ) ? () # one unique for populate() and create() each - : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 3 + : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 2 ], 'Data integrity warnings as planned'; lives_ok { From 7edb97b0949d66fd79e6f222e08da56cd5ca92fd Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 8 Jul 2014 07:42:06 +0200 Subject: [PATCH 085/548] Older perls get confused by this construct - rewrap --- t/lib/DBICTest.pm | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/t/lib/DBICTest.pm b/t/lib/DBICTest.pm index 432eb8714..7bc3dde86 100644 --- a/t/lib/DBICTest.pm +++ b/t/lib/DBICTest.pm @@ -259,10 +259,16 @@ sub _database { } sub __mk_disconnect_guard { - return if DBIx::Class::_ENV_::PEEPEENESS; # leaks handles, delaying DESTROY, can't work right my $db_file = shift; - return unless -f $db_file; + + return if ( + # this perl leaks handles, delaying DESTROY, can't work right + DBIx::Class::_ENV_::PEEPEENESS + or + ! -f $db_file + ); + my $orig_inode = (stat($db_file))[1] or return; From facd0e8e687648e52f29df73d62d9c993b9b19d1 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Jul 2014 11:51:00 +0200 Subject: [PATCH 086/548] Switch UNRESOLVABLE_CONDITION to an _Util constant --- lib/DBIx/Class/Relationship/Base.pm | 5 +++-- lib/DBIx/Class/ResultSet.pm | 4 ++-- lib/DBIx/Class/ResultSource.pm | 15 +++++++++++---- lib/DBIx/Class/_Util.pm | 3 +++ 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 906307fec..e2f6224fc 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -7,6 +7,7 @@ use base qw/DBIx::Class/; use Scalar::Util qw/weaken blessed/; use Try::Tiny; +use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use namespace::clean; =head1 NAME @@ -493,7 +494,7 @@ sub related_resultset { } catch { $self->throw_exception ($_) if $self->in_storage; - $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION; # RV, no return() + UNRESOLVABLE_CONDITION; # RV, no return() }; # keep in mind that the following if() block is part of a do{} - no return()s!!! @@ -522,7 +523,7 @@ sub related_resultset { # FIXME - this conditional doesn't seem correct - got to figure out # at some point what it does. Also the entire UNRESOLVABLE_CONDITION # business seems shady - we could simply not query *at all* - if ($cond eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) { + if ($cond eq UNRESOLVABLE_CONDITION) { my $reverse = $rsrc->reverse_relationship_info($rel); foreach my $rev_rel (keys %$reverse) { if ($reverse->{$rev_rel}{attrs}{accessor} && $reverse->{$rev_rel}{attrs}{accessor} eq 'multi') { diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 6f39723cd..f817d3a23 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -7,7 +7,7 @@ use DBIx::Class::Carp; use DBIx::Class::ResultSetColumn; use Scalar::Util qw/blessed weaken reftype/; use DBIx::Class::_Util qw( - fail_on_internal_wantarray is_plain_value is_literal_value + fail_on_internal_wantarray is_plain_value is_literal_value UNRESOLVABLE_CONDITION ); use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture @@ -2492,7 +2492,7 @@ sub _merge_with_rscond { if (! defined $self->{cond}) { # just massage $data below } - elsif ($self->{cond} eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) { + elsif ($self->{cond} eq UNRESOLVABLE_CONDITION) { %new_data = %{ $self->{attrs}{related_objects} || {} }; # nothing might have been inserted yet @cols_from_relations = keys %new_data; } diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index ce88cbb89..81ce7d89b 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -9,7 +9,7 @@ use DBIx::Class::ResultSet; use DBIx::Class::ResultSourceHandle; use DBIx::Class::Carp; -use DBIx::Class::_Util 'is_literal_value'; +use DBIx::Class::_Util qw(is_literal_value UNRESOLVABLE_CONDITION); use Devel::GlobalDestruction; use Try::Tiny; use List::Util 'first'; @@ -1745,7 +1745,14 @@ sub _resolve_condition { return wantarray ? @res : $res[0]; } -our $UNRESOLVABLE_CONDITION = \ '1 = 0'; +# Keep this indefinitely. There is evidence of both CPAN and +# darkpan using it, and there isn't much harm in an extra var +# anyway. +our $UNRESOLVABLE_CONDITION = UNRESOLVABLE_CONDITION; +# YES I KNOW THIS IS EVIL +# it is there to save darkpan from themselves, since internally +# we are moving to a constant +Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); # Resolves the passed condition to a concrete query fragment and a flag # indicating whether this is a cross-table condition. Also an optional @@ -1901,7 +1908,7 @@ sub _resolve_relationship_condition { $obj_cols->[$i], ) if $obj->in_storage; - return $UNRESOLVABLE_CONDITION; + return UNRESOLVABLE_CONDITION; } else { $cond->{"$plain_alias.$plain_cols->[$i]"} = $obj->get_column($obj_cols->[$i]); @@ -1913,7 +1920,7 @@ sub _resolve_relationship_condition { } elsif (ref $args->{condition} eq 'ARRAY') { if (@{$args->{condition}} == 0) { - return $UNRESOLVABLE_CONDITION; + return UNRESOLVABLE_CONDITION; } elsif (@{$args->{condition}} == 1) { return $self->_resolve_relationship_condition({ diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 0ab7ac926..e281b6680 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -59,8 +59,11 @@ our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray refcount hrefaddr is_exception is_plain_value is_literal_value + UNRESOLVABLE_CONDITION ); +use constant UNRESOLVABLE_CONDITION => \ '1 = 0'; + sub sigwarn_silencer ($) { my $pattern = shift; From 8e40a627f9c94df8ae46c1c1abc6f7abdb3fdfdf Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Jul 2014 12:32:20 +0200 Subject: [PATCH 087/548] Refactor ::DBIHacks::_extract_fixed_condition_columns (sequel to 8d005ad9) Instead of just returning an array of column names, switch to a hashref that can double as an inferred-value bag. As a bonus deduplicates and folds-in another codepath from ::ResultSet::_merge_with_rscond In the process fixup _collapse_cond to be even more robust in some arrayref corner-cases. Now it is guaranteed to return a hashref at all times --- lib/DBIx/Class/ResultSet.pm | 50 ++++--------- lib/DBIx/Class/ResultSource.pm | 11 ++- lib/DBIx/Class/Storage/DBIHacks.pm | 113 +++++++++++++++++------------ t/sqlmaker/dbihacks_internals.t | 75 ++++++++++++++----- 4 files changed, 145 insertions(+), 104 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index f817d3a23..f71bf3834 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -7,7 +7,7 @@ use DBIx::Class::Carp; use DBIx::Class::ResultSetColumn; use Scalar::Util qw/blessed weaken reftype/; use DBIx::Class::_Util qw( - fail_on_internal_wantarray is_plain_value is_literal_value UNRESOLVABLE_CONDITION + fail_on_internal_wantarray UNRESOLVABLE_CONDITION ); use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture @@ -2485,7 +2485,7 @@ sub new_result { sub _merge_with_rscond { my ($self, $data) = @_; - my (%new_data, @cols_from_relations); + my ($implied_data, @cols_from_relations); my $alias = $self->{attrs}{alias}; @@ -2493,43 +2493,25 @@ sub _merge_with_rscond { # just massage $data below } elsif ($self->{cond} eq UNRESOLVABLE_CONDITION) { - %new_data = %{ $self->{attrs}{related_objects} || {} }; # nothing might have been inserted yet - @cols_from_relations = keys %new_data; - } - elsif (ref $self->{cond} ne 'HASH') { - $self->throw_exception( - "Can't abstract implicit construct, resultset condition not a hash" - ); + $implied_data = $self->{attrs}{related_objects}; # nothing might have been inserted yet + @cols_from_relations = keys %{ $implied_data || {} }; } else { - if ($self->{cond}) { - my $implied = $self->_remove_alias( - $self->result_source->schema->storage->_collapse_cond($self->{cond}), - $alias, - ); - - for my $c (keys %$implied) { - my $v = $implied->{$c}; - if ( ! length ref $v or is_plain_value($v) ) { - $new_data{$c} = $v; - } - elsif ( - ref $v eq 'HASH' and keys %$v == 1 and exists $v->{'='} and is_literal_value($v->{'='}) - ) { - $new_data{$c} = $v->{'='}; - } - } - } + my $eqs = $self->result_source->schema->storage->_extract_fixed_condition_columns($self->{cond}, 'consider_nulls'); + $implied_data = { map { + ( ($eqs->{$_}||'') eq UNRESOLVABLE_CONDITION ) ? () : ( $_ => $eqs->{$_} ) + } keys %$eqs }; } - # precedence must be given to passed values over values inherited from - # the cond, so the order here is important. - %new_data = ( - %new_data, - %{ $self->_remove_alias($data, $alias) }, + return ( + { map + { %{ $self->_remove_alias($_, $alias) } } + # precedence must be given to passed values over values inherited from + # the cond, so the order here is important. + ( $implied_data||(), $data) + }, + \@cols_from_relations ); - - return (\%new_data, \@cols_from_relations); } # _has_resolved_attr diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 81ce7d89b..886f47e10 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -9,7 +9,7 @@ use DBIx::Class::ResultSet; use DBIx::Class::ResultSourceHandle; use DBIx::Class::Carp; -use DBIx::Class::_Util qw(is_literal_value UNRESOLVABLE_CONDITION); +use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use Devel::GlobalDestruction; use Try::Tiny; use List::Util 'first'; @@ -1834,14 +1834,13 @@ sub _resolve_relationship_condition { } # see which parts of the joinfree cond are *NOT* foreign-source-column equalities - my $joinfree_cond_equality_columns = { map - {( $_ => 1 )} - @{ $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond) } - }; + my $joinfree_cond_equality_columns = + $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond, 'consider_nulls'); + @nonvalue_cols = map { $_ =~ /^\Q$joinfree_alias.\E(.+)/ } grep - { ! $joinfree_cond_equality_columns->{$_} } + { ! exists $joinfree_cond_equality_columns->{$_} } keys %$joinfree_cond; return ($joinfree_cond, 0, (@nonvalue_cols ? \@nonvalue_cols : undef)); diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index d78ab7483..ae04942fd 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -16,7 +16,7 @@ use mro 'c3'; use List::Util 'first'; use Scalar::Util 'blessed'; use Sub::Name 'subname'; -use DBIx::Class::_Util qw(is_plain_value is_literal_value); +use DBIx::Class::_Util qw(is_plain_value is_literal_value UNRESOLVABLE_CONDITION); use namespace::clean; # @@ -887,7 +887,7 @@ sub _order_by_is_stable { my @cols = ( ( map { $_->[0] } $self->_extract_order_criteria($order_by) ), - ( $where ? @{ $self->_extract_fixed_condition_columns($where) || [] } : () ), + ( $where ? keys %{ $self->_extract_fixed_condition_columns($where) } : () ), ) or return 0; my $colinfo = $self->_resolve_column_info($ident, \@cols); @@ -965,7 +965,7 @@ sub _extract_colinfo_of_stable_main_source_order_by_portion { ? $colinfos->{$_}{-colname} : () } - @{$self->_extract_fixed_condition_columns($attrs->{where}) || []} + keys %{ $self->_extract_fixed_condition_columns($attrs->{where}) } ) : () ]; @@ -1074,10 +1074,7 @@ sub _collapse_cond { : { $w[0] => undef } ; } - elsif ( ref $w[0] ) { - return \@w; - } - elsif ( @w == 2 ) { + elsif ( @w == 2 and ! ref $w[0]) { if ( ( $w[0]||'' ) =~ /^\-and$/i ) { return (ref $w[1] eq 'HASH' or ref $w[1] eq 'ARRAY') ? $self->_collapse_cond($w[1], (ref $w[1] eq 'ARRAY') ) @@ -1088,14 +1085,16 @@ sub _collapse_cond { return $self->_collapse_cond({ @w }); } } + else { + return { -or => \@w }; + } } else { # not a hash not an array return { '' => $where }; } - # catchall, some of the things above fall through - return $where; + die 'should not get here'; } sub _collapse_cond_unroll_pairs { @@ -1179,48 +1178,72 @@ sub _collapse_cond_unroll_pairs { return @conds; } - -# returns an arrayref of column names which *definitely* have some -# sort of non-nullable *single* equality requested in the given condition -# specification. This is used to figure out if a resultset is -# constrained to a column which is part of a unique constraint, -# which in turn allows us to better predict how ordering will behave -# etc. +# Analyzes a given condition and attempts to extract all columns +# with a definitive fixed-condition criteria. Returns a hashref +# of k/v pairs suitable to be passed to set_columns(), with a +# MAJOR CAVEAT - multi-value (contradictory) equalities are still +# represented as a reference to the UNRESOVABLE_CONDITION constant +# The reason we do this is that some codepaths only care about the +# codition being stable, as opposed to actually making sense # -# this is a rudimentary, incomplete, and error-prone extractor -# however this is OK - it is conservative, and if we can not find -# something that is in fact there - the stack will recover gracefully +# The normal mode is used to figure out if a resultset is constrained +# to a column which is part of a unique constraint, which in turn +# allows us to better predict how ordering will behave etc. +# +# With the optional "consider_nulls" boolean argument, the function +# is instead used to infer inambiguous values from conditions +# (e.g. the inheritance of resultset conditions on new_result) +# +my $undef_marker = \ do{ my $x = 'undef' }; sub _extract_fixed_condition_columns { - my $self = shift; - my $where_hash = $self->_collapse_cond(shift); - - my $res; - for my $c (keys %$where_hash) { - if (defined (my $v = $where_hash->{$c}) ) { - if ( - ! length ref $v - or - is_plain_value ($v) - or - ( - ref $v eq 'HASH' - and - keys %$v == 1 - and - ref $v->{'='} - and - is_literal_value($v->{'='}) - ) - ) { - $res->{$c} = 1; - } - elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') { - $res->{$_} = 1 for map { @{ $self->_extract_fixed_condition_columns({ $c => $_ }) } } @{$v}[1..$#$v]; + my ($self, $where, $consider_nulls) = @_; + my $where_hash = $self->_collapse_cond($_[1]); + + my $res = {}; + my ($c, $v); + for $c (keys %$where_hash) { + my $vals; + + if (!defined ($v = $where_hash->{$c}) ) { + $vals->{$undef_marker} = $v if $consider_nulls + } + elsif ( + ! length ref $v + or + is_plain_value ($v) + ) { + $vals->{$v} = $v; + } + elsif ( + ref $v eq 'HASH' + and + keys %$v == 1 + and + ref $v->{'='} + and + # do not need to check for plain values - _collapse_cond did it for us + is_literal_value($v->{'='}) + ) { + $vals->{$v->{'='}} = $v->{'='}; + } + elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') { + for ( @{$v}[1..$#$v] ) { + my $subval = $self->_extract_fixed_condition_columns({ $c => $_ }, 'consider nulls'); # always fish nulls out on recursion + next unless exists $subval->{$c}; # didn't find anything + $vals->{defined $subval->{$c} ? $subval->{$c} : $undef_marker} = $subval->{$c}; } } + + if (keys %$vals == 1) { + ($res->{$c}) = (values %$vals) + unless !$consider_nulls and exists $vals->{$undef_marker}; + } + elsif (keys %$vals > 1) { + $res->{$c} = UNRESOLVABLE_CONDITION; + } } - return [ sort keys %$res ]; + $res; } 1; diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 81deb892e..66f0148d5 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -5,6 +5,7 @@ use Test::Warn; use lib qw(t/lib); use DBICTest ':DiffSQL'; +use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use Data::Dumper; @@ -30,67 +31,89 @@ for my $t ( where => { artistid => 1, charfield => undef }, cc_result => { artistid => 1, charfield => undef }, sql => 'WHERE artistid = ? AND charfield IS NULL', - efcc_result => [qw( artistid )], + efcc_result => { artistid => 1 }, + efcc_n_result => { artistid => 1, charfield => undef }, }, { where => { -and => [ artistid => 1, charfield => undef, { rank => 13 } ] }, cc_result => { artistid => 1, charfield => undef, rank => 13 }, sql => 'WHERE artistid = ? AND charfield IS NULL AND rank = ?', - efcc_result => [qw( artistid rank )], + efcc_result => { artistid => 1, rank => 13 }, + efcc_n_result => { artistid => 1, charfield => undef, rank => 13 }, }, { where => { -and => [ { artistid => 1, charfield => undef}, { rank => 13 } ] }, cc_result => { artistid => 1, charfield => undef, rank => 13 }, sql => 'WHERE artistid = ? AND charfield IS NULL AND rank = ?', - efcc_result => [qw( artistid rank )], + efcc_result => { artistid => 1, rank => 13 }, + efcc_n_result => { artistid => 1, charfield => undef, rank => 13 }, }, { where => { -and => [ -or => { name => 'Caterwauler McCrae' }, 'rank' ] }, cc_result => { name => 'Caterwauler McCrae', rank => undef }, sql => 'WHERE name = ? AND rank IS NULL', - efcc_result => [qw( name )], + efcc_result => { name => 'Caterwauler McCrae' }, + efcc_n_result => { name => 'Caterwauler McCrae', rank => undef }, }, { where => { -and => [ [ [ artist => {'=' => \'foo' } ] ], { name => \[ '= ?', 'bar' ] } ] }, cc_result => { artist => {'=' => \'foo' }, name => \[ '= ?', 'bar' ] }, sql => 'WHERE artist = foo AND name = ?', - efcc_result => [qw( artist )], + efcc_result => { artist => \'foo' }, }, { where => { -and => [ -or => { name => 'Caterwauler McCrae', artistid => 2 } ] }, cc_result => { -or => [ artistid => 2, name => 'Caterwauler McCrae' ] }, sql => 'WHERE artistid = ? OR name = ?', - efcc_result => [], + efcc_result => {}, + }, + { + where => { -or => { name => 'Caterwauler McCrae', artistid => 2 } }, + cc_result => { -or => [ artistid => 2, name => 'Caterwauler McCrae' ] }, + sql => 'WHERE artistid = ? OR name = ?', + efcc_result => {}, }, { where => { -and => [ \'foo=bar', [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'} ] }, cc_result => { '' => \'foo=bar', name => 'Caterwauler McCrae', artistid => $num }, sql => 'WHERE foo=bar AND artistid = ? AND name = ?', - efcc_result => [qw( artistid name )], + efcc_result => { name => 'Caterwauler McCrae', artistid => $num }, }, { where => { artistid => [ $num ], rank => [ 13, 2, 3 ], charfield => [ undef ] }, cc_result => { artistid => $num, charfield => undef, rank => [13, 2, 3] }, sql => 'WHERE artistid = ? AND charfield IS NULL AND ( rank = ? OR rank = ? OR rank = ? )', - efcc_result => [qw( artistid )], + efcc_result => { artistid => $num }, + efcc_n_result => { artistid => $num, charfield => undef }, }, { where => { artistid => { '=' => 1 }, rank => { '>' => 12 }, charfield => { '=' => undef } }, cc_result => { artistid => 1, charfield => undef, rank => { '>' => 12 } }, sql => 'WHERE artistid = ? AND charfield IS NULL AND rank > ?', - efcc_result => [qw( artistid )], + efcc_result => { artistid => 1 }, + efcc_n_result => { artistid => 1, charfield => undef }, }, { where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ $num, $num ] } }, cc_result => { artistid => 1, charfield => [-and => { '=' => \'1' }, { '=' => \['?',2] } ], rank => { '=' => [$num, $num] } }, sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )', - efcc_result => [qw( artistid charfield )], + efcc_result => { artistid => 1, charfield => UNRESOLVABLE_CONDITION }, }, { where => { -and => [ artistid => 1, artistid => 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => [ -or => { '=', 2 } ], rank => [-and => undef, { '=', undef }, { '!=', 2 } ] }, cc_result => { artistid => [ -and => 1, 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => 2, rank => [ -and => undef, undef, { '!=', 2 } ] }, sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank IS NULL AND rank IS NULL AND rank != ?', - efcc_result => [qw( artistid charfield name )], + efcc_result => { + artistid => UNRESOLVABLE_CONDITION, + name => 2, + charfield => 2, + }, + efcc_n_result => { + artistid => UNRESOLVABLE_CONDITION, + name => 2, + charfield => 2, + rank => undef, + }, }, { where => { -and => [ @@ -106,24 +129,24 @@ for my $t ( ], }, sql => 'WHERE ( _macro.to LIKE ? OR _wc_macros.to LIKE ? ) AND group.is_active = ? AND me.is_active = ?', - efcc_result => [qw( group.is_active me.is_active )], + efcc_result => { 'group.is_active' => 1, 'me.is_active' => 1 }, }, { where => { artistid => [] }, cc_result => { artistid => [] }, - efcc_result => [], + efcc_result => {}, }, (map { { where => { -and => $_ }, cc_result => undef, - efcc_result => [], + efcc_result => {}, sql => '', }, { where => { -or => $_ }, cc_result => undef, - efcc_result => [], + efcc_result => {}, sql => '', }, } ( @@ -138,14 +161,15 @@ for my $t ( )), # FIXME legacy compat crap, possibly worth undef/dieing in SQLMaker - { where => { artistid => {} }, sql => '', cc_result => undef, efcc_result => [] }, + { where => { artistid => {} }, sql => '', cc_result => undef, efcc_result => {}, efcc_n_result => {} }, # batshit insanity, just to be thorough { where => { -and => [ [ 'artistid' ], [ -and => [ artistid => { '!=', 69 }, artistid => undef, artistid => { '=' => 200 } ]], artistid => [], { -or => [] }, { -and => [] }, [ 'charfield' ], { name => [] }, 'rank' ] }, cc_result => { artistid => [ -and => undef, { '!=', 69 }, undef, 200, [] ], charfield => undef, name => [], rank => undef }, sql => 'WHERE artistid IS NULL AND artistid != ? AND artistid IS NULL AND artistid = ? AND 0=1 AND charfield IS NULL AND 0=1 AND rank IS NULL', - efcc_result => [qw( artistid )], + efcc_result => { artistid => UNRESOLVABLE_CONDITION }, + efcc_n_result => { artistid => UNRESOLVABLE_CONDITION, charfield => undef, rank => undef }, }, # original test from RT#93244 @@ -166,14 +190,21 @@ for my $t ( 'me.title' => 'Spoonful of bees', }, sql => 'WHERE LOWER(me.title) LIKE ? AND me.title = ?', - efcc_result => [qw( me.title )], + efcc_result => { 'me.title' => 'Spoonful of bees' }, } ) { for my $w ( $t->{where}, [ -and => $t->{where} ], - ( keys %{$t->{where}} <= 1 ) ? [ %{$t->{where}} ] : () + ( keys %{$t->{where}} <= 1 ? [ %{$t->{where}} ] : () ), + ( (keys %{$t->{where}} == 1 and $t->{where}{-or}) + ? ( ref $t->{where}{-or} eq 'HASH' + ? [ map { $_ => $t->{where}{-or}{$_} } sort keys %{$t->{where}{-or}} ] + : $t->{where}{-or} + ) + : () + ), ) { my $name = do { local ($Data::Dumper::Indent, $Data::Dumper::Terse, $Data::Dumper::Sortkeys) = (0, 1, 1); Dumper $w }; @@ -201,6 +232,12 @@ for my $t ( $t->{efcc_result}, "Expected fixed_condition produced on $name", ); + + is_deeply( + $schema->storage->_extract_fixed_condition_columns($w, 'consider_nulls'), + $t->{efcc_n_result}, + "Expected fixed_condition including NULLs produced on $name", + ) if $t->{efcc_n_result}; } } From ee3337752d314667ec575398bb5007670db9bfbe Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Jul 2014 13:52:45 +0200 Subject: [PATCH 088/548] Move a CDBI escape to the CDBI class hierarchy --- lib/DBIx/Class/CDBICompat/Relationships.pm | 4 ++++ lib/DBIx/Class/Relationship/Base.pm | 3 --- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/CDBICompat/Relationships.pm b/lib/DBIx/Class/CDBICompat/Relationships.pm index 58b29e06c..3ce3ef53d 100644 --- a/lib/DBIx/Class/CDBICompat/Relationships.pm +++ b/lib/DBIx/Class/CDBICompat/Relationships.pm @@ -200,4 +200,8 @@ sub search { $self->next::method($where, $attrs); } +sub new_related { + return shift->search_related(shift)->new_result(shift); +} + 1; diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index e2f6224fc..286825095 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -641,8 +641,6 @@ sub new_related { # sanity check - currently throw when a complex coderef rel is encountered # FIXME - should THROW MOAR! - if (ref $self) { # cdbi calls this as a class method, /me vomits - my $rsrc = $self->result_source; my $rel_info = $rsrc->relationship_info($rel) or $self->throw_exception( "No such relationship '$rel'" ); @@ -664,7 +662,6 @@ sub new_related { map { "'$_'" } @unspecified_rel_condition_chunks )); } - } return $self->search_related($rel)->new_result($values); } From 83a6b24431383e560f414f2fcaefe7b8c08e03d2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sat, 12 Jul 2014 17:32:52 +0200 Subject: [PATCH 089/548] Another heavy refactor of the rel resolver prototype (sequel to 03f6d1f7) Change the return value of _resolve_relationship_condition to a 3-value hash, with a lot of the safety logic consolidated within that method. The best way to gauge the significance of the changes is to look at the diff of lib/DBIx/Class/Relationship/Base.pm Also stop returning the "noncondition-columns" in the _resolve_condition compa-shim. The information was only used by the code removed from ::Relationship::Base, and is rather new with no evidence of use within CPAN/DarkPAN. It can be easily added back if necessary. --- lib/DBIx/Class/Relationship/Base.pm | 80 ++-------- lib/DBIx/Class/ResultSource.pm | 218 ++++++++++++++++++---------- t/relationship/custom.t | 2 +- 3 files changed, 155 insertions(+), 145 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 286825095..e0007ffee 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -631,39 +631,15 @@ your storage until you call L on it. =cut sub new_related { - my ($self, $rel, $values) = @_; - - # FIXME - this is a bad position for this (also an identical copy in - # set_from_related), but I have no saner way to hook, and I absolutely - # want this to throw at least for coderefs, instead of the "insert a NULL - # when it gets hard" insanity --ribasushi - # - # sanity check - currently throw when a complex coderef rel is encountered - # FIXME - should THROW MOAR! - - my $rsrc = $self->result_source; - my $rel_info = $rsrc->relationship_info($rel) - or $self->throw_exception( "No such relationship '$rel'" ); - my (undef, $crosstable, $nonequality_foreign_columns) = $rsrc->_resolve_condition ( - $rel_info->{cond}, $rel, $self, $rel - ); - - $self->throw_exception("Relationship '$rel' does not resolve to a join-free condition fragment") - if $crosstable; - - if ( - $nonequality_foreign_columns - and - my @unspecified_rel_condition_chunks = grep { ! exists $values->{$_} } @$nonequality_foreign_columns - ) { - $self->throw_exception(sprintf ( - "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s", - $rel, - map { "'$_'" } @unspecified_rel_condition_chunks - )); - } - - return $self->search_related($rel)->new_result($values); + my ($self, $rel, $data) = @_; + + return $self->search_related($rel)->new_result( $self->result_source->_resolve_relationship_condition ( + infer_values_based_on => $data, + rel_name => $rel, + self_resultobj => $self, + foreign_alias => $rel, + self_alias => 'me', + )->{inferred_values} ); } =head2 create_related @@ -805,37 +781,13 @@ set them in the storage. sub set_from_related { my ($self, $rel, $f_obj) = @_; - my $rsrc = $self->result_source; - my $rel_info = $rsrc->relationship_info($rel) - or $self->throw_exception( "No such relationship '$rel'" ); - - if (defined $f_obj) { - my $f_class = $rel_info->{class}; - $self->throw_exception( "Object '$f_obj' isn't a ".$f_class ) - unless blessed $f_obj and $f_obj->isa($f_class); - } - - - # FIXME - this is a bad position for this (also an identical copy in - # new_related), but I have no saner way to hook, and I absolutely - # want this to throw at least for coderefs, instead of the "insert a NULL - # when it gets hard" insanity --ribasushi - # - # sanity check - currently throw when a complex coderef rel is encountered - # FIXME - should THROW MOAR! - my ($cond, $crosstable, $nonequality_foreign_columns) = $rsrc->_resolve_condition ( - $rel_info->{cond}, $f_obj, $rel, $rel - ); - $self->throw_exception("Relationship '$rel' does not resolve to a join-free condition fragment") - if $crosstable; - - $self->throw_exception(sprintf ( - "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s", - $rel, - map { "'$_'" } @$nonequality_foreign_columns - )) if $nonequality_foreign_columns; - - $self->set_columns($cond); + $self->set_columns( $self->result_source->_resolve_relationship_condition ( + infer_values_based_on => {}, + rel_name => $rel, + foreign_resultobj => $f_obj, + foreign_alias => $rel, + self_alias => 'me', + )->{inferred_values} ); return 1; } diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 886f47e10..c67963488 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1729,11 +1729,15 @@ sub _resolve_condition { ####################### # now it's fucking easy isn't it?! - my @res = $self->_resolve_relationship_condition( $args ); + my $rc = $self->_resolve_relationship_condition( $args ); - # FIXME - this is also insane, but just be consistent for now - # _resolve_relationship_condition always returns qualified cols - # even in the case of objects, but nothing downstream expects this + my @res = ( + ( $rc->{join_free_condition} || $rc->{condition} ), + ! $rc->{join_free_condition}, + ); + + # _resolve_relationship_condition always returns qualified cols even in the + # case of join_free_condition, but nothing downstream expects this if (ref $res[0] eq 'HASH' and ($is_objlike[0] or $is_objlike[1]) ) { $res[0] = { map { ($_ =~ /\.(.+)/) => $res[0]{$_} } @@ -1741,7 +1745,7 @@ sub _resolve_condition { }; } - # more legacy + # and more legacy return wantarray ? @res : $res[0]; } @@ -1754,31 +1758,51 @@ our $UNRESOLVABLE_CONDITION = UNRESOLVABLE_CONDITION; # we are moving to a constant Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); -# Resolves the passed condition to a concrete query fragment and a flag -# indicating whether this is a cross-table condition. Also an optional -# list of non-trivial values (normally conditions) returned as a part -# of a joinfree condition hash +# Resolves the passed condition to a concrete query fragment and extra +# metadata +# +## self-explanatory API, modeled on the custom cond coderef: +# rel_name +# foreign_alias +# foreign_resultobj +# self_alias +# self_resultobj +# require_join_free_condition +# infer_values_based_on (optional, mandatory hashref argument) +# condition (optional, derived from $self->rel_info(rel_name)) +# +## returns a hash +# condition +# join_free_condition (maybe undef) +# inferred_values (maybe undef, always complete or empty) +# sub _resolve_relationship_condition { my $self = shift; - # self-explanatory API, modeled on the custom cond coderef: - # condition - # rel_name - # foreign_alias - # foreign_resultobj - # self_alias - # self_resultobj my $args = { ref $_[0] eq 'HASH' ? %{ $_[0] } : @_ }; for ( qw( rel_name self_alias foreign_alias ) ) { - $self->throw_exception("Mandatory attribute '$_' is not a plain string") + $self->throw_exception("Mandatory argument '$_' is not a plain string") if !defined $args->{$_} or length ref $args->{$_}; } $self->throw_exception('No practical way to resolve a relationship between two objects') if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; - $args->{condition} ||= $self->relationship_info($args->{rel_name})->{cond}; + my $rel_info = $self->relationship_info($args->{rel_name}); + # or $self->throw_exception( "No such relationship '$args->{rel_name}'" ); + + $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) + if defined blessed $args->{foreign_resultobj} and ! $args->{foreign_resultobj}->isa($rel_info->{class}); + + $args->{condition} ||= $rel_info->{cond}; + + $self->throw_exception( "Argument to infer_values_based_on must be a hash" ) + if exists $args->{infer_values_based_on} and ref $args->{infer_values_based_on} ne 'HASH'; + + $args->{require_join_free_condition} ||= !!$args->{infer_values_based_on}; + + my $ret; if (ref $args->{condition} eq 'CODE') { @@ -1787,22 +1811,29 @@ sub _resolve_relationship_condition { self_resultsource => $self, self_alias => $args->{self_alias}, foreign_alias => $args->{foreign_alias}, - self_resultobj => $args->{self_resultobj}, - foreign_resultobj => $args->{foreign_resultobj}, + ( map + { (exists $args->{$_}) ? ( $_ => $args->{$_} ) : () } + qw( self_resultobj foreign_resultobj ) + ), }; # legacy - never remove these!!! $cref_args->{foreign_relname} = $cref_args->{rel_name}; - $cref_args->{self_rowobj} = $cref_args->{self_resultobj}; - my ($crosstable_cond, $joinfree_cond, @extra) = $args->{condition}->($cref_args); + $cref_args->{self_rowobj} = $cref_args->{self_resultobj} + if exists $cref_args->{self_resultobj}; + + ($ret->{condition}, $ret->{join_free_condition}, my @extra) = $args->{condition}->($cref_args); # FIXME sanity check carp_unique('A custom condition coderef can return at most 2 conditions: extra return values discarded') if @extra; - my @nonvalue_cols; - if ($joinfree_cond) { + if (my $jfc = $ret->{join_free_condition}) { + + $self->throw_exception ( + "The join-free condition returned for relationship '$args->{rel_name}' must be a hash reference" + ) unless ref $jfc eq 'HASH'; my ($joinfree_alias, $joinfree_source); if (defined $args->{self_resultobj}) { @@ -1819,34 +1850,16 @@ sub _resolve_relationship_condition { "A join-free condition returned for relationship '$args->{rel_name}' without a result object to chain from" ) unless $joinfree_alias; - my $fq_col_list = { map { ( "$joinfree_alias.$_" => 1 ) } $joinfree_source->columns }; - - # FIXME another sanity check - if ( - ref $joinfree_cond ne 'HASH' - or - grep { ! $fq_col_list->{$_} } keys %$joinfree_cond - ) { - $self->throw_exception ( - "The join-free condition returned for relationship '$args->{rel_name}' must be a hash " - .'reference with all keys being fully qualified column names of the corresponding source' - ); - } - - # see which parts of the joinfree cond are *NOT* foreign-source-column equalities - my $joinfree_cond_equality_columns = - $self->schema->storage->_extract_fixed_condition_columns($joinfree_cond, 'consider_nulls'); + my $fq_col_list = { map + { ( "$joinfree_alias.$_" => 1 ) } + $joinfree_source->columns + }; - @nonvalue_cols = map - { $_ =~ /^\Q$joinfree_alias.\E(.+)/ } - grep - { ! exists $joinfree_cond_equality_columns->{$_} } - keys %$joinfree_cond; + $fq_col_list->{$_} or $self->throw_exception ( + "The join-free condition returned for relationship '$args->{rel_name}' may only " + . 'contain keys that are fully qualified column names of the corresponding source' + ) for keys %$jfc; - return ($joinfree_cond, 0, (@nonvalue_cols ? \@nonvalue_cols : undef)); - } - else { - return ($crosstable_cond, 1); } } elsif (ref $args->{condition} eq 'HASH') { @@ -1869,16 +1882,13 @@ sub _resolve_relationship_condition { push @l_cols, $lc; } - # plain values - if (! defined $args->{self_resultobj} and ! defined $args->{foreign_resultobj}) { - return ( { map - {( "$args->{foreign_alias}.$f_cols[$_]" => { -ident => "$args->{self_alias}.$l_cols[$_]" } )} - (0..$#f_cols) - }, 1 ); # is crosstable - } - else { + # construct the crosstable condition + $ret->{condition} = { map + {( "$args->{foreign_alias}.$f_cols[$_]" => { -ident => "$args->{self_alias}.$l_cols[$_]" } )} + (0..$#f_cols) + }; - my $cond; + if (exists $args->{self_resultobj} or exists $args->{foreign_resultobj}) { my ($obj, $obj_alias, $plain_alias, $obj_cols, $plain_cols) = defined $args->{self_resultobj} ? ( @{$args}{qw( self_resultobj self_alias foreign_alias )}, \@l_cols, \@f_cols ) @@ -1889,7 +1899,7 @@ sub _resolve_relationship_condition { # FIXME - temp shim if (! blessed $obj) { - $cond->{"$plain_alias.$plain_cols->[$i]"} = $obj->{$obj_cols->[$i]}; + $ret->{join_free_condition}{"$plain_alias.$plain_cols->[$i]"} = $obj->{$obj_cols->[$i]}; } elsif ( defined $args->{self_resultobj} @@ -1907,48 +1917,96 @@ sub _resolve_relationship_condition { $obj_cols->[$i], ) if $obj->in_storage; - return UNRESOLVABLE_CONDITION; + # FIXME - temporarly force-override + delete $args->{require_join_free_condition}; + $ret->{join_free_condition} = UNRESOLVABLE_CONDITION; + last; } else { - $cond->{"$plain_alias.$plain_cols->[$i]"} = $obj->get_column($obj_cols->[$i]); + $ret->{join_free_condition}{"$plain_alias.$plain_cols->[$i]"} = $obj->get_column($obj_cols->[$i]); } } - - return ($cond, 0); # joinfree } } elsif (ref $args->{condition} eq 'ARRAY') { if (@{$args->{condition}} == 0) { - return UNRESOLVABLE_CONDITION; + $ret = { + condition => UNRESOLVABLE_CONDITION, + join_free_condition => UNRESOLVABLE_CONDITION, + }; } elsif (@{$args->{condition}} == 1) { - return $self->_resolve_relationship_condition({ + $ret = $self->_resolve_relationship_condition({ %$args, condition => $args->{condition}[0], }); } else { - # FIXME - we are discarding nonvalues here... likely incorrect... - # then again - the entire thing is an OR, so we *can't* use - # the values anyway - # Return a hard crosstable => 1 to ensure nothing tries to use - # the result in such manner - my @ret; - for (@{$args->{condition}}) { - my ($cond) = $self->_resolve_relationship_condition({ - %$args, - condition => $_, - }); - push @ret, $cond; + # we are discarding inferred values here... likely incorrect... + # then again - the entire thing is an OR, so we *can't* use them anyway + for my $subcond ( map + { $self->_resolve_relationship_condition({ %$args, condition => $_ }) } + @{$args->{condition}} + ) { + $self->throw_exception('Either all or none of the OR-condition members can resolve to a join-free condition') + if $ret->{join_free_condition} and ! $subcond->{join_free_condition}; + + $subcond->{$_} and push @{$ret->{$_}}, $subcond->{$_} for (qw(condition join_free_condition)); } - return (\@ret, 1); # forced cross-tab } } else { $self->throw_exception ("Can't handle condition $args->{condition} for relationship '$args->{rel_name}' yet :("); } - die "not supposed to get here - missing return()"; + $self->throw_exception("Relationship '$args->{rel_name}' does not resolve to a join-free condition fragment") if ( + $args->{require_join_free_condition} + and + ( ! $ret->{join_free_condition} or $ret->{join_free_condition} eq UNRESOLVABLE_CONDITION ) + ); + + # we got something back - sanity check and infer values if we can + my @nonvalues; + if ( my $jfc = $ret->{join_free_condition} and $ret->{join_free_condition} ne UNRESOLVABLE_CONDITION ) { + + my $jfc_eqs = $self->schema->storage->_extract_fixed_condition_columns($jfc, 'consider_nulls'); + + if (keys %$jfc_eqs) { + + for (keys %$jfc) { + # $jfc is fully qualified by definition + my ($col) = $_ =~ /\.(.+)/; + + if (exists $jfc_eqs->{$_} and ($jfc_eqs->{$_}||'') ne UNRESOLVABLE_CONDITION) { + $ret->{inferred_values}{$col} = $jfc_eqs->{$_}; + } + elsif ( !$args->{infer_values_based_on} or ! exists $args->{infer_values_based_on}{$col} ) { + push @nonvalues, $col; + } + } + + # all or nothing + delete $ret->{inferred_values} if @nonvalues; + } + } + + # did the user explicitly ask + if ($args->{infer_values_based_on}) { + + $self->throw_exception(sprintf ( + "Unable to complete value inferrence - custom relationship '%s' returns conditions instead of values for column(s): %s", + $args->{rel_name}, + map { "'$_'" } @nonvalues + )) if @nonvalues; + + + $ret->{inferred_values} ||= {}; + + $ret->{inferred_values}{$_} = $args->{infer_values_based_on}{$_} + for keys %{$args->{infer_values_based_on}}; + } + + $ret } =head2 related_source diff --git a/t/relationship/custom.t b/t/relationship/custom.t index a623b4b8f..10ab27c9c 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -159,7 +159,7 @@ lives_ok { # try to create_related a 80s cd throws_ok { $artist->create_related('cds_80s', { title => 'related creation 1' }); -} qr/\QCustom relationship 'cds_80s' not definitive - returns conditions instead of values for column(s): 'year'/, +} qr/\QUnable to complete value inferrence - custom relationship 'cds_80s' returns conditions instead of values for column(s): 'year'/, 'Create failed - complex cond'; # now supply an explicit arg overwriting the ambiguous cond From 75ef16a70ae561085c005a9564748bb4dfb80009 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 15 Jul 2014 07:14:31 +0200 Subject: [PATCH 090/548] Remove double has_column check in new=>store_column Standardize exception texts across ::Row --- lib/DBIx/Class/Row.pm | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 2aa5b2395..7277ee27e 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -263,8 +263,6 @@ sub new { next; } } - $new->throw_exception("No such column '$key' on $class") - unless $class->has_column($key); $new->store_column($key => $attrs->{$key}); } @@ -673,7 +671,7 @@ sub get_column { )); } - $self->throw_exception( "No such column '${column}'" ) + $self->throw_exception( "No such column '${column}' on " . ref $self ) unless $self->has_column($column); return undef; @@ -801,7 +799,7 @@ really changed. sub make_column_dirty { my ($self, $column) = @_; - $self->throw_exception( "No such column '${column}'" ) + $self->throw_exception( "No such column '${column}' on " . ref $self ) unless exists $self->{_column_data}{$column} || $self->has_column($column); # the entire clean/dirty code relies on exists, not on true/false @@ -1199,7 +1197,7 @@ extend this method to catch all data setting methods. sub store_column { my ($self, $column, $value) = @_; - $self->throw_exception( "No such column '${column}'" ) + $self->throw_exception( "No such column '${column}' on " . ref $self ) unless exists $self->{_column_data}{$column} || $self->has_column($column); $self->throw_exception( "set_column called for ${column} without value" ) if @_ < 3; From 8433421f819142a4e4015993458b6df8f1583869 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 15 Jul 2014 02:36:44 +0200 Subject: [PATCH 091/548] Consolidate bits and pieces under ::_Util::refdesc No functional changes --- lib/DBIx/Class/_Util.pm | 28 ++++++++++++++++++---------- t/lib/DBICTest/Util/LeakTracer.pm | 15 +++------------ 2 files changed, 21 insertions(+), 22 deletions(-) diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index e281b6680..612efa7e5 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -57,7 +57,7 @@ use overload (); use base 'Exporter'; our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray - refcount hrefaddr is_exception + refdesc refcount hrefaddr is_exception is_plain_value is_literal_value UNRESOLVABLE_CONDITION ); @@ -74,7 +74,19 @@ sub sigwarn_silencer ($) { return sub { &$orig_sig_warn unless $_[0] =~ $pattern }; } -sub hrefaddr ($) { sprintf '0x%x', &Scalar::Util::refaddr } +sub hrefaddr ($) { sprintf '0x%x', &Scalar::Util::refaddr||0 } + +sub refdesc ($) { + croak "Expecting a reference" if ! length ref $_[0]; + + # be careful not to trigger stringification, + # reuse @_ as a scratch-pad + sprintf '%s%s(0x%x)', + ( defined( $_[1] = blessed $_[0]) ? "$_[1]=" : '' ), + reftype $_[0], + Scalar::Util::refaddr($_[0]), + ; +} sub refcount ($) { croak "Expecting a reference" if ! length ref $_[0]; @@ -106,7 +118,7 @@ sub is_exception ($) { if (defined $suberror) { if (length (my $class = blessed($e) )) { carp_unique( sprintf( - 'External exception object %s=%s(%s) implements partial (broken) ' + 'External exception object %s implements partial (broken) ' . 'overloading preventing it from being used in simple ($x eq $y) ' . 'comparisons. Given Perl\'s "globally cooperative" exception ' . 'handling this type of brokenness is extremely dangerous on ' @@ -118,9 +130,7 @@ sub is_exception ($) { . 'to the one shown at %s, in order to ensure your exception handling ' . 'is saner application-wide. What follows is the actual error text ' . "as generated by Perl itself:\n\n%s\n ", - $class, - reftype $e, - hrefaddr $e, + refdesc $e, $class, 'http://v.gd/DBIC_overload_tempfix/', $suberror, @@ -226,11 +236,9 @@ sub is_plain_value ($) { if ( (caller($cf))[0] =~ /^(?:DBIx::Class|DBICx::)/ ) { - my $obj = shift; - DBIx::Class::Exception->throw( sprintf ( - "Improper use of %s(%s) instance in list context at %s line %d\n\n\tStacktrace starts", - ref($obj), hrefaddr($obj), (caller($cf))[1,2] + "Improper use of %s instance in list context at %s line %d\n\n\tStacktrace starts", + refdesc($_[0]), (caller($cf))[1,2] ), 'with_stacktrace'); } diff --git a/t/lib/DBICTest/Util/LeakTracer.pm b/t/lib/DBICTest/Util/LeakTracer.pm index 48ec21d72..d0c29eba8 100644 --- a/t/lib/DBICTest/Util/LeakTracer.pm +++ b/t/lib/DBICTest/Util/LeakTracer.pm @@ -5,7 +5,7 @@ use strict; use Carp; use Scalar::Util qw(isweak weaken blessed reftype); -use DBIx::Class::_Util qw(refcount hrefaddr); +use DBIx::Class::_Util qw(refcount hrefaddr refdesc); use DBIx::Class::Optional::Dependencies; use Data::Dumper::Concise; use DBICTest::Util 'stacktrace'; @@ -21,15 +21,6 @@ my $refs_traced = 0; my $leaks_found = 0; my %reg_of_regs; -# so we don't trigger stringification -sub _describe_ref { - sprintf '%s%s(%s)', - (defined blessed $_[0]) ? blessed($_[0]) . '=' : '', - reftype $_[0], - hrefaddr $_[0], - ; -} - sub populate_weakregistry { my ($weak_registry, $target, $note) = @_; @@ -65,7 +56,7 @@ sub populate_weakregistry { $refs_traced++; } - my $desc = _describe_ref($target); + my $desc = refdesc $target; $weak_registry->{$refaddr}{slot_names}{$desc} = 1; if ($note) { $note =~ s/\s*\Q$desc\E\s*//g; @@ -153,7 +144,7 @@ sub visit_refs { } scalar PadWalker::closed_over($r) ] }); # scalar due to RT#92269 } 1; - } or warn "Could not descend into @{[ _describe_ref($r) ]}: $@\n"; + } or warn "Could not descend into @{[ refdesc $r ]}: $@\n"; } $visited_cnt; } From 9bea2000ccd0828327e98c3e17c0be3e7df5c593 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 15 Jul 2014 03:48:40 +0200 Subject: [PATCH 092/548] Add one extra is_exception check (missed a spot during 841efcb3f) In addition tweak the message so that carp_unique can in fact catch it properly, and test that the proper amount of warnings is in fact emitted --- Changes | 2 ++ lib/DBIx/Class/Storage/BlockRunner.pm | 2 +- lib/DBIx/Class/_Util.pm | 6 ++--- t/storage/txn.t | 37 +++++++++++++++++++++++++++ t/storage/txn_scope_guard.t | 2 +- 5 files changed, 44 insertions(+), 5 deletions(-) diff --git a/Changes b/Changes index 0b56392e0..6cd1d0158 100644 --- a/Changes +++ b/Changes @@ -34,6 +34,8 @@ Revision history for DBIx::Class without bombing out (RT#93244) - Fix set_inflated_column incorrectly handling \[] literals (GH#44) - Ensure that setting a column to a literal invariably marks it dirty + - Work around exception objects with broken string overloading in one + additional codepath (missed in 0.08260) - Fix inability to handle multiple consecutive transactions with savepoints on DBD::SQLite < 1.39 diff --git a/lib/DBIx/Class/Storage/BlockRunner.pm b/lib/DBIx/Class/Storage/BlockRunner.pm index 8dae0c9be..70ded7e18 100644 --- a/lib/DBIx/Class/Storage/BlockRunner.pm +++ b/lib/DBIx/Class/Storage/BlockRunner.pm @@ -144,7 +144,7 @@ sub _run { my $storage = $self->storage; my $cur_depth = $storage->transaction_depth; - if (defined $txn_init_depth and $run_err eq '') { + if (defined $txn_init_depth and ! is_exception $run_err) { my $delta_txn = (1 + $txn_init_depth) - $cur_depth; if ($delta_txn) { diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 612efa7e5..a7c1b5004 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -118,8 +118,8 @@ sub is_exception ($) { if (defined $suberror) { if (length (my $class = blessed($e) )) { carp_unique( sprintf( - 'External exception object %s implements partial (broken) ' - . 'overloading preventing it from being used in simple ($x eq $y) ' + 'External exception class %s implements partial (broken) overloading ' + . 'preventing its instances from being used in simple ($x eq $y) ' . 'comparisons. Given Perl\'s "globally cooperative" exception ' . 'handling this type of brokenness is extremely dangerous on ' . 'exception objects, as it may (and often does) result in silent ' @@ -130,7 +130,7 @@ sub is_exception ($) { . 'to the one shown at %s, in order to ensure your exception handling ' . 'is saner application-wide. What follows is the actual error text ' . "as generated by Perl itself:\n\n%s\n ", - refdesc $e, + $class, $class, 'http://v.gd/DBIC_overload_tempfix/', $suberror, diff --git a/t/storage/txn.t b/t/storage/txn.t index efe3641df..06af84937 100644 --- a/t/storage/txn.t +++ b/t/storage/txn.t @@ -407,4 +407,41 @@ warnings_are { } [], 'No warnings on AutoCommit => 0 with txn_do'; + +# make sure we are not fucking up the stacktrace on broken overloads +{ + package DBICTest::BrokenOverload; + + use overload '""' => sub { $_[0] }; +} + +{ + my @w; + local $SIG{__WARN__} = sub { + $_[0] =~ /\QExternal exception class DBICTest::BrokenOverload implements partial (broken) overloading preventing its instances from being used in simple (\E\$x eq \$y\Q) comparisons/ + ? push @w, @_ + : warn @_ + }; + + my $s = DBICTest->init_schema(no_deploy => 1); + $s->stacktrace(0); + my $g = $s->storage->txn_scope_guard; + my $broken_exception = bless {}, 'DBICTest::BrokenOverload'; + + # FIXME - investigate what confuses the regex engine below + + # do not reformat - line-num part of the test + my $ln = __LINE__ + 6; + throws_ok { + $s->txn_do( sub { + $s->txn_do( sub { + $s->storage->_dbh->disconnect; + die $broken_exception + }); + }) + } qr/\QTransaction aborted: $broken_exception. Rollback failed: lost connection to storage at @{[__FILE__]} line $ln\E\n/; # FIXME wtf - ...\E$/m doesn't work here + + is @w, 1, 'One matching warning only'; +} + done_testing; diff --git a/t/storage/txn_scope_guard.t b/t/storage/txn_scope_guard.t index 4a2c14b19..2f6a00d79 100644 --- a/t/storage/txn_scope_guard.t +++ b/t/storage/txn_scope_guard.t @@ -199,7 +199,7 @@ for my $post_poison (0,1) { my @w; local $SIG{__WARN__} = sub { - $_[0] =~ /External exception object .+? \Qimplements partial (broken) overloading/ + $_[0] =~ /External exception class .+? \Qimplements partial (broken) overloading/ ? push @w, @_ : warn @_ }; From 082f208f38cdd955fb034e0cac4929478e51a21d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 15:12:52 +0200 Subject: [PATCH 093/548] (travis) T::B 1.005 is superseded by 1.300 - remove handholding --- maint/travis-ci_scripts/30_before_script.bash | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index e6c2ab593..26fca7aa4 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -77,15 +77,9 @@ if [[ "$CLEANTEST" = "true" ]]; then # handholding if [[ "$DEVREL_DEPS" == "true" ]] ; then - # Many dists still do not pass tests under tb1.5 properly (and it itself - # does not even install on things like 5.10). Install the *stable-dev* - # latest T::B here, so that it will not show up as a dependency, and - # hence it will not get installed a second time as an unsatisfied dep - # under cpanm --dev + # We are not "quite ready" for SQLA 1.99, do not consider it # - # We are also not "quite ready" for SQLA 1.99, do not consider it - # - installdeps 'Test::Builder~<1.005' 'SQL::Abstract~<1.99' + installdeps 'SQL::Abstract~<1.99' elif ! CPAN_is_sane ; then # no configure_requires - we will need the usual suspects anyway @@ -99,13 +93,6 @@ else # using SQLT and set up whatever databases necessary export DBICTEST_SQLT_DEPLOY=1 - # FIXME - need new TB1.5 devrel - # if we run under --dev install latest github of TB1.5 first - # (unreleased workaround for precedence warnings) - if [[ "$DEVREL_DEPS" == "true" ]] ; then - parallel_installdeps_notest git://github.com/nthykier/test-more.git@fix-return-precedence-issue - fi - # do the preinstall in several passes to minimize amount of cross-deps installing # multiple times, and to avoid module re-architecture breaking another install # (e.g. once Carp is upgraded there's no more Carp::Heavy, From 70cf159f21f6f521b6e896b953dfdc425a3d3ce4 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 14:37:12 +0200 Subject: [PATCH 094/548] Skip the namespace tests on plain install Protect users from future spurious changes in uncleaned imports --- t/55namespaces_cleaned.t | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/t/55namespaces_cleaned.t b/t/55namespaces_cleaned.t index 176de5e79..875a77d37 100644 --- a/t/55namespaces_cleaned.t +++ b/t/55namespaces_cleaned.t @@ -36,8 +36,14 @@ use warnings; use Test::More; use lib 't/lib'; -use DBICTest; +BEGIN { + require DBICTest::RunMode; + plan( skip_all => "Skipping test on plain module install" ) + if DBICTest::RunMode->is_plain; +} + +use DBICTest; use File::Find; use File::Spec; use B qw/svref_2object/; From bbcc1fe88ebe43eae92ed14348299d70b8991f32 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 14:22:24 +0200 Subject: [PATCH 095/548] Move the "special loading" check to DBICTest::Util No functional changes --- t/lib/DBICTest.pm | 35 +---------------------------------- t/lib/DBICTest/Util.pm | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/t/lib/DBICTest.pm b/t/lib/DBICTest.pm index 7bc3dde86..aa20b0c0d 100644 --- a/t/lib/DBICTest.pm +++ b/t/lib/DBICTest.pm @@ -4,42 +4,9 @@ package # hide from PAUSE use strict; use warnings; -# this noop trick initializes the STDOUT, so that the TAP::Harness -# issued IO::Select->can_read calls (which are blocking wtf wtf wtf) -# keep spinning and scheduling jobs -# This results in an overall much smoother job-queue drainage, since -# the Harness blocks less -# (ideally this needs to be addressed in T::H, but a quick patchjob -# broke everything so tabling it for now) -BEGIN { - if ($INC{'Test/Builder.pm'}) { - local $| = 1; - print "#\n"; - } -} - -use Module::Runtime 'module_notional_filename'; -BEGIN { - for my $mod (qw( SQL::Abstract::Test SQL::Abstract )) { - if ( $INC{ module_notional_filename($mod) } ) { - # FIXME this does not seem to work in BEGIN - why?! - #require Carp; - #$Carp::Internal{ (__PACKAGE__) }++; - #Carp::croak( __PACKAGE__ . " must be loaded before $mod" ); - - my ($fr, @frame) = 1; - while (@frame = caller($fr++)) { - last if $frame[1] !~ m|^t/lib/DBICTest|; - } - - die __PACKAGE__ . " must be loaded before $mod (or modules using $mod) at $frame[1] line $frame[2]\n"; - } - } -} - +use DBICTest::Util 'local_umask'; use DBICTest::Schema; use DBICTest::Util::LeakTracer qw/populate_weakregistry assert_empty_weakregistry/; -use DBICTest::Util 'local_umask'; use Carp; use Path::Class::File (); use File::Spec; diff --git a/t/lib/DBICTest/Util.pm b/t/lib/DBICTest/Util.pm index 847fba8d8..a6c8dfd61 100644 --- a/t/lib/DBICTest/Util.pm +++ b/t/lib/DBICTest/Util.pm @@ -3,6 +3,39 @@ package DBICTest::Util; use warnings; use strict; +# this noop trick initializes the STDOUT, so that the TAP::Harness +# issued IO::Select->can_read calls (which are blocking wtf wtf wtf) +# keep spinning and scheduling jobs +# This results in an overall much smoother job-queue drainage, since +# the Harness blocks less +# (ideally this needs to be addressed in T::H, but a quick patchjob +# broke everything so tabling it for now) +BEGIN { + if ($INC{'Test/Builder.pm'}) { + local $| = 1; + print "#\n"; + } +} + +use Module::Runtime 'module_notional_filename'; +BEGIN { + for my $mod (qw( SQL::Abstract::Test SQL::Abstract )) { + if ( $INC{ module_notional_filename($mod) } ) { + # FIXME this does not seem to work in BEGIN - why?! + #require Carp; + #$Carp::Internal{ (__PACKAGE__) }++; + #Carp::croak( __PACKAGE__ . " must be loaded before $mod" ); + + my ($fr, @frame) = 1; + while (@frame = caller($fr++)) { + last if $frame[1] !~ m|^t/lib/DBICTest|; + } + + die __PACKAGE__ . " must be loaded before $mod (or modules using $mod) at $frame[1] line $frame[2]\n"; + } + } +} + use Config; use Carp 'confess'; use Scalar::Util qw(blessed refaddr); From 609fa2156887e77233e0821fd589ed75c8394f86 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 14:53:42 +0200 Subject: [PATCH 096/548] (travis) Temporary hard-spec of the SQLA dep, lockstep release --- maint/travis-ci_scripts/30_before_script.bash | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index 26fca7aa4..eb642a8a5 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -107,7 +107,7 @@ else parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant - parallel_installdeps_notest 'SQL::Abstract~<1.99' Moose Module::Install JSON SQL::Translator File::Which + parallel_installdeps_notest 'SQL::Abstract@1.78_02' Moose Module::Install JSON SQL::Translator File::Which if [[ -n "$DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then # the official version is very much outdated and does not compile on 5.14+ @@ -127,6 +127,9 @@ if [[ "$CLEANTEST" = "true" ]]; then # we may need to prepend some stuff to that list HARD_DEPS="$(echo $(make listdeps))" + # temporary + HARD_DEPS="R/RI/RIBASUSHI/SQL-Abstract-1.78_02.tar.gz $HARD_DEPS" + ##### TEMPORARY WORKAROUNDS needed in case we will be using CPAN.pm if [[ "$DEVREL_DEPS" != "true" ]] && ! CPAN_is_sane ; then # combat dzillirium on harness-wide level, otherwise breakage happens weekly From 5f35ba0fbddfcfe22694f8deff22da4db4f01846 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 09:05:19 +0200 Subject: [PATCH 097/548] Better, consistent handling of -literal/-value in the cond collapser --- lib/DBIx/Class/Storage/DBIHacks.pm | 73 ++++++++++++++++++------------ lib/DBIx/Class/_Util.pm | 2 + t/72pg.t | 6 ++- t/sqlmaker/dbihacks_internals.t | 32 ++++++++++++- 4 files changed, 83 insertions(+), 30 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index ae04942fd..7d974cffd 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1120,25 +1120,36 @@ sub _collapse_cond_unroll_pairs { if (ref $rhs eq 'HASH' and ! keys %$rhs) { # FIXME - SQLA seems to be doing... nothing...? } + elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-ident}) { + push @conds, { $lhs => { '=', $rhs } }; + } + elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-value} and is_plain_value $rhs->{-value}) { + push @conds, { $lhs => $rhs->{-value} }; + } elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) { - for my $p ($self->_collapse_cond_unroll_pairs([ [ $lhs => $rhs->{'='} ] ])) { - - # extra sanity check - if (keys %$p > 1) { - require Data::Dumper::Concise; - local $Data::Dumper::Deepcopy = 1; - $self->throw_exception( - "Internal error: unexpected collapse unroll:" - . Data::Dumper::Concise::Dumper { in => { $lhs => $rhs }, out => $p } - ); - } + if( is_literal_value $rhs->{'='}) { + push @conds, { $lhs => $rhs }; + } + else { + for my $p ($self->_collapse_cond_unroll_pairs([ [ $lhs => $rhs->{'='} ] ])) { + + # extra sanity check + if (keys %$p > 1) { + require Data::Dumper::Concise; + local $Data::Dumper::Deepcopy = 1; + $self->throw_exception( + "Internal error: unexpected collapse unroll:" + . Data::Dumper::Concise::Dumper { in => { $lhs => $rhs }, out => $p } + ); + } - my ($l, $r) = %$p; + my ($l, $r) = %$p; - push @conds, ( ! length ref $r or is_plain_value($r) ) - ? { $l => $r } - : { $l => { '=' => $r } } - ; + push @conds, ( ! length ref $r or is_plain_value($r) ) + ? { $l => $r } + : { $l => { '=' => $r } } + ; + } } } elsif (ref $rhs eq 'ARRAY') { @@ -1207,24 +1218,30 @@ sub _extract_fixed_condition_columns { if (!defined ($v = $where_hash->{$c}) ) { $vals->{$undef_marker} = $v if $consider_nulls } - elsif ( - ! length ref $v - or - is_plain_value ($v) - ) { - $vals->{$v} = $v; - } elsif ( ref $v eq 'HASH' and keys %$v == 1 - and - ref $v->{'='} - and + ) { + if (exists $v->{-value}) { + if (defined $v->{-value}) { + $vals->{$v->{-value}} = $v->{-value} + } + elsif( $consider_nulls ) { + $vals->{$undef_marker} = $v->{-value}; + } + } # do not need to check for plain values - _collapse_cond did it for us - is_literal_value($v->{'='}) + elsif(ref $v->{'='} and is_literal_value($v->{'='}) ) { + $vals->{$v->{'='}} = $v->{'='}; + } + } + elsif ( + ! length ref $v + or + is_plain_value ($v) ) { - $vals->{$v->{'='}} = $v->{'='}; + $vals->{$v} = $v; } elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') { for ( @{$v}[1..$#$v] ) { diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index a7c1b5004..a77f8a31f 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -172,6 +172,8 @@ sub is_literal_value ($) { ( ref $_[0] eq 'SCALAR' or + ( ref $_[0] eq 'HASH' and keys %{$_[0]} == 1 and defined $_[0]->{-ident} and ! length ref $_[0]->{-ident} ) + or ( ref $_[0] eq 'REF' and ref ${$_[0]} eq 'ARRAY' ) ) ? 1 : 0; } diff --git a/t/72pg.t b/t/72pg.t index 6e1ca7dbf..c02a5e32f 100644 --- a/t/72pg.t +++ b/t/72pg.t @@ -7,6 +7,7 @@ use Sub::Name; use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; +use DBIx::Class::_Util 'is_literal_value'; plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg') unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg'); @@ -291,7 +292,10 @@ for my $use_insert_returning ($test_server_supports_insert_returning { -value => [3,4] }, \[ '= ?' => [arrayfield => [3, 4]] ], ) { - local $TODO = 'No introspection of complex conditions :('; + local $TODO = 'No introspection of complex literal conditions :(' + if is_literal_value $cond; + + my $arr_rs_cond = $arr_rs->search({ arrayfield => $cond }); my $row = $arr_rs_cond->create({}); diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 66f0148d5..84abaf173 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -5,7 +5,7 @@ use Test::Warn; use lib qw(t/lib); use DBICTest ':DiffSQL'; -use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; +use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION modver_gt_or_eq); use Data::Dumper; @@ -131,6 +131,36 @@ for my $t ( sql => 'WHERE ( _macro.to LIKE ? OR _wc_macros.to LIKE ? ) AND group.is_active = ? AND me.is_active = ?', efcc_result => { 'group.is_active' => 1, 'me.is_active' => 1 }, }, + + # need fixed SQLA to correctly work with this + # + ( modver_gt_or_eq('SQL::Abstract', '1.78_01') ? { + where => { -and => [ + artistid => { -value => [1] }, + charfield => { -ident => 'foo' }, + name => { '=' => { -value => undef } }, + rank => { '=' => { -ident => 'bar' } }, + ] }, + sql => 'WHERE artistid = ? AND charfield = foo AND name IS NULL AND rank = bar', + cc_result => { + artistid => { -value => [1] }, + name => undef, + charfield => { '=', { -ident => 'foo' } }, + rank => { '=' => { -ident => 'bar' } }, + }, + efcc_result => { + artistid => [1], + charfield => { -ident => 'foo' }, + rank => { -ident => 'bar' }, + }, + efcc_n_result => { + artistid => [1], + name => undef, + charfield => { -ident => 'foo' }, + rank => { -ident => 'bar' }, + }, + } : () ), + { where => { artistid => [] }, cc_result => { artistid => [] }, From b5ce6748f58040ca877fd05e8f004b14d46b2ba9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 17 Jul 2014 11:20:30 +0200 Subject: [PATCH 098/548] is_X_value functions introduced in 3705e3b28 migrated to SQLA with fixups --- Makefile.PL | 2 +- lib/DBIx/Class/FilterColumn.pm | 2 +- lib/DBIx/Class/InflateColumn.pm | 2 +- lib/DBIx/Class/Row.pm | 2 +- lib/DBIx/Class/Storage/DBI.pm | 2 +- lib/DBIx/Class/Storage/DBI/SQLite.pm | 3 +- lib/DBIx/Class/Storage/DBIHacks.pm | 3 +- lib/DBIx/Class/_Util.pm | 43 ------------------- t/53lean_startup.t | 2 +- t/55namespaces_cleaned.t | 15 +++++-- t/72pg.t | 2 +- t/sqlmaker/dbihacks_internals.t | 8 ++-- .../prefer_stringification.t} | 27 ------------ 13 files changed, 26 insertions(+), 87 deletions(-) rename t/{internals/is_plain_value.t => storage/prefer_stringification.t} (53%) diff --git a/Makefile.PL b/Makefile.PL index 531f3f835..5a952e29e 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -85,7 +85,7 @@ my $runtime_requires = { 'namespace::clean' => '0.24', 'Path::Class' => '0.18', 'Scope::Guard' => '0.03', - 'SQL::Abstract' => '1.78', + 'SQL::Abstract' => '1.78_02', # TEMPORARY 'Try::Tiny' => '0.07', # Technically this is not a core dependency - it is only required diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index eb4666409..222dabd37 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -3,7 +3,7 @@ use strict; use warnings; use base 'DBIx::Class::Row'; -use DBIx::Class::_Util 'is_literal_value'; +use SQL::Abstract 'is_literal_value'; use namespace::clean; sub filter_column { diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index 3236b1cb3..e10af303d 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -4,7 +4,7 @@ use strict; use warnings; use base 'DBIx::Class::Row'; -use DBIx::Class::_Util 'is_literal_value'; +use SQL::Abstract 'is_literal_value'; use namespace::clean; =head1 NAME diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 7277ee27e..d856ab51d 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -9,7 +9,7 @@ use Scalar::Util 'blessed'; use List::Util 'first'; use Try::Tiny; use DBIx::Class::Carp; -use DBIx::Class::_Util 'is_literal_value'; +use SQL::Abstract 'is_literal_value'; ### ### Internal method diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 0e579e95e..230a849fa 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -14,7 +14,7 @@ use Sub::Name 'subname'; use Context::Preserve 'preserve_context'; use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture -use DBIx::Class::_Util qw(is_plain_value is_literal_value); +use SQL::Abstract qw(is_plain_value is_literal_value); use namespace::clean; # default cursor class, overridable in connect_info attributes diff --git a/lib/DBIx/Class/Storage/DBI/SQLite.pm b/lib/DBIx/Class/Storage/DBI/SQLite.pm index 3024e890d..fa6f80688 100644 --- a/lib/DBIx/Class/Storage/DBI/SQLite.pm +++ b/lib/DBIx/Class/Storage/DBI/SQLite.pm @@ -6,7 +6,8 @@ use warnings; use base qw/DBIx::Class::Storage::DBI/; use mro 'c3'; -use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer is_plain_value); +use SQL::Abstract 'is_plain_value'; +use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer); use DBIx::Class::Carp; use Try::Tiny; use namespace::clean; diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 7d974cffd..ef890921d 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -16,7 +16,8 @@ use mro 'c3'; use List::Util 'first'; use Scalar::Util 'blessed'; use Sub::Name 'subname'; -use DBIx::Class::_Util qw(is_plain_value is_literal_value UNRESOLVABLE_CONDITION); +use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; +use SQL::Abstract qw(is_plain_value is_literal_value); use namespace::clean; # diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index a77f8a31f..02de9a6dd 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -52,13 +52,11 @@ use DBIx::Class::Carp '^DBIx::Class|^DBICTest'; use Carp 'croak'; use Scalar::Util qw(weaken blessed reftype); use List::Util qw(first); -use overload (); use base 'Exporter'; our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray refdesc refcount hrefaddr is_exception - is_plain_value is_literal_value UNRESOLVABLE_CONDITION ); @@ -168,47 +166,6 @@ sub modver_gt_or_eq ($$) { eval { $mod->VERSION($ver) } ? 1 : 0; } -sub is_literal_value ($) { - ( - ref $_[0] eq 'SCALAR' - or - ( ref $_[0] eq 'HASH' and keys %{$_[0]} == 1 and defined $_[0]->{-ident} and ! length ref $_[0]->{-ident} ) - or - ( ref $_[0] eq 'REF' and ref ${$_[0]} eq 'ARRAY' ) - ) ? 1 : 0; -} - -# FIXME XSify - this can be done so much more efficiently -sub is_plain_value ($) { - no strict 'refs'; - ( - # plain scalar - (! length ref $_[0]) - or - ( - blessed $_[0] - and - # deliberately not using Devel::OverloadInfo - the checks we are - # intersted in are much more limited than the fullblown thing, and - # this is a relatively hot piece of code - ( - # FIXME - DBI needs fixing to stringify regardless of DBD - # - # either has stringification which DBI SHOULD prefer out of the box - #first { *{$_ . '::(""'}{CODE} } @{ mro::get_linear_isa( ref $_[0] ) } - overload::Method($_[0], '""') - or - # has nummification and fallback is *not* disabled - ( - $_[1] = first { *{"${_}::(0+"}{CODE} } @{ mro::get_linear_isa( ref $_[0] ) } - and - ( ! defined ${"$_[1]::()"} or ${"$_[1]::()"} ) - ) - ) - ) - ) ? 1 : 0; -} - { my $list_ctx_ok_stack_marker; diff --git a/t/53lean_startup.t b/t/53lean_startup.t index 311fa24a2..686819141 100644 --- a/t/53lean_startup.t +++ b/t/53lean_startup.t @@ -106,6 +106,7 @@ BEGIN { Class::Accessor::Grouped Class::C3::Componentised + SQL::Abstract )); require DBICTest::Schema; @@ -129,7 +130,6 @@ BEGIN { { register_lazy_loadable_requires(qw( DBI - SQL::Abstract Hash::Merge )); diff --git a/t/55namespaces_cleaned.t b/t/55namespaces_cleaned.t index 875a77d37..042806977 100644 --- a/t/55namespaces_cleaned.t +++ b/t/55namespaces_cleaned.t @@ -149,9 +149,18 @@ for my $mod (@modules) { last; } } - fail ("${mod}::${name} appears to have entered inheritance chain by import into " - . ($via || 'UNKNOWN') - ); + + # exception time + if ( + ( $name eq 'import' and $via = 'Exporter' ) + ) { + pass("${mod}::${name} is a valid uncleaned import from ${name}"); + } + else { + fail ("${mod}::${name} appears to have entered inheritance chain by import into " + . ($via || 'UNKNOWN') + ); + } } } diff --git a/t/72pg.t b/t/72pg.t index c02a5e32f..8e4b1420c 100644 --- a/t/72pg.t +++ b/t/72pg.t @@ -7,7 +7,7 @@ use Sub::Name; use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; -use DBIx::Class::_Util 'is_literal_value'; +use SQL::Abstract 'is_literal_value'; plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg') unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg'); diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 84abaf173..1f555fc4a 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -5,7 +5,7 @@ use Test::Warn; use lib qw(t/lib); use DBICTest ':DiffSQL'; -use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION modver_gt_or_eq); +use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use Data::Dumper; @@ -132,9 +132,7 @@ for my $t ( efcc_result => { 'group.is_active' => 1, 'me.is_active' => 1 }, }, - # need fixed SQLA to correctly work with this - # - ( modver_gt_or_eq('SQL::Abstract', '1.78_01') ? { + { where => { -and => [ artistid => { -value => [1] }, charfield => { -ident => 'foo' }, @@ -159,7 +157,7 @@ for my $t ( charfield => { -ident => 'foo' }, rank => { -ident => 'bar' }, }, - } : () ), + }, { where => { artistid => [] }, diff --git a/t/internals/is_plain_value.t b/t/storage/prefer_stringification.t similarity index 53% rename from t/internals/is_plain_value.t rename to t/storage/prefer_stringification.t index 81fe902bd..ffb292a05 100644 --- a/t/internals/is_plain_value.t +++ b/t/storage/prefer_stringification.t @@ -2,30 +2,11 @@ use warnings; use strict; use Test::More; -use Test::Warn; use lib qw(t/lib); use DBICTest; -use DBIx::Class::_Util 'is_plain_value'; - { - package # hideee - DBICTest::SillyInt; - - use overload - # *DELIBERATELY* unspecified - #fallback => 1, - '0+' => sub { ${$_[0]} }, - ; - - - package # hideee - DBICTest::SillyInt::Subclass; - - our @ISA = 'DBICTest::SillyInt'; - - package # hideee DBICTest::CrazyInt; @@ -49,12 +30,4 @@ use DBIx::Class::_Util 'is_plain_value'; is( $schema->resultset('Artist')->next->name, 999, 'DBI preferred stringified version' ); } - -# make sure we recognize overloaded stuff properly -{ - my $num = bless( \do { my $foo = 69 }, 'DBICTest::SillyInt::Subclass' ); - ok( is_plain_value $num, 'parent-fallback-provided stringification detected' ); - is("$num", 69, 'test overloaded object stringifies, without specified fallback'); -} - done_testing; From e89c79683fccf5cb93e5215bba92927bf32ef02b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 18 Jul 2014 10:42:58 +0200 Subject: [PATCH 099/548] Simplify no wantarray assert (a9da9b6a) - fish out args from caller --- lib/DBIx/Class/Relationship/Accessor.pm | 2 +- lib/DBIx/Class/Relationship/ManyToMany.pm | 2 +- lib/DBIx/Class/ResultSet.pm | 2 +- lib/DBIx/Class/ResultSetColumn.pm | 2 +- lib/DBIx/Class/_Util.pm | 19 +++++++++++++++---- 5 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Accessor.pm b/lib/DBIx/Class/Relationship/Accessor.pm index 3a12f2836..fee162124 100644 --- a/lib/DBIx/Class/Relationship/Accessor.pm +++ b/lib/DBIx/Class/Relationship/Accessor.pm @@ -82,7 +82,7 @@ sub add_relationship_accessor { ); } elsif ($acc_type eq 'multi') { $meth{$rel} = sub { - DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and wantarray and my $sog = fail_on_internal_wantarray($_[0]); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; shift->search_related($rel, @_) }; $meth{"${rel}_rs"} = sub { shift->search_related_rs($rel, @_) }; diff --git a/lib/DBIx/Class/Relationship/ManyToMany.pm b/lib/DBIx/Class/Relationship/ManyToMany.pm index 8237602b9..36ec18d06 100644 --- a/lib/DBIx/Class/Relationship/ManyToMany.pm +++ b/lib/DBIx/Class/Relationship/ManyToMany.pm @@ -71,7 +71,7 @@ EOW my $meth_name = join '::', $class, $meth; *$meth_name = subname $meth_name, sub { - DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and wantarray and my $sog = fail_on_internal_wantarray($_[0]); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; my $self = shift; my $rs = $self->$rs_meth( @_ ); return (wantarray ? $rs->all : $rs); diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index f71bf3834..b21415ebb 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -389,7 +389,7 @@ sub search { my $rs = $self->search_rs( @_ ); if (wantarray) { - DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray($rs); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; return $rs->all; } elsif (defined wantarray) { diff --git a/lib/DBIx/Class/ResultSetColumn.pm b/lib/DBIx/Class/ResultSetColumn.pm index 3756dbf4a..74473e7fd 100644 --- a/lib/DBIx/Class/ResultSetColumn.pm +++ b/lib/DBIx/Class/ResultSetColumn.pm @@ -406,7 +406,7 @@ sub func { my $cursor = $self->func_rs($function)->cursor; if( wantarray ) { - DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray($self); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; return map { $_->[ 0 ] } $cursor->all; } diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 02de9a6dd..d43d836b3 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -169,7 +169,7 @@ sub modver_gt_or_eq ($$) { { my $list_ctx_ok_stack_marker; - sub fail_on_internal_wantarray { + sub fail_on_internal_wantarray () { return if $list_ctx_ok_stack_marker; if (! defined wantarray) { @@ -192,12 +192,23 @@ sub modver_gt_or_eq ($$) { $cf++; } + my ($fr, $want, $argdesc); + { + package DB; + $fr = [ caller($cf) ]; + $want = ( caller($cf-1) )[5]; + $argdesc = ref $DB::args[0] + ? DBIx::Class::_Util::refdesc($DB::args[0]) + : 'non ' + ; + }; + if ( - (caller($cf))[0] =~ /^(?:DBIx::Class|DBICx::)/ + $want and $fr->[0] =~ /^(?:DBIx::Class|DBICx::)/ ) { DBIx::Class::Exception->throw( sprintf ( - "Improper use of %s instance in list context at %s line %d\n\n\tStacktrace starts", - refdesc($_[0]), (caller($cf))[1,2] + "Improper use of %s instance in list context at %s line %d\n\n Stacktrace starts", + $argdesc, @{$fr}[1,2] ), 'with_stacktrace'); } From e84ae5d1931244d9a11cd58488aeaf34e33cf234 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 18 Jul 2014 11:49:49 +0200 Subject: [PATCH 100/548] Remove duplicate arg-check in create(), adjust exception text --- lib/DBIx/Class/ResultSet.pm | 8 +++----- t/cdbi/02-Film.t | 2 +- t/cdbi/09-has_many.t | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index b21415ebb..e81fc82c9 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -2446,7 +2446,7 @@ sub new_result { $self->throw_exception( "new_result takes only one argument - a hashref of values" ) if @_ > 2; - $self->throw_exception( "new_result expects a hashref" ) + $self->throw_exception( "Result object instantiation requires a hashref as argument" ) unless (ref $values eq 'HASH'); my ($merged_cond, $cols_from_relations) = $self->_merge_with_rscond($values); @@ -2747,10 +2747,8 @@ L. =cut sub create { - my ($self, $col_data) = @_; - $self->throw_exception( "create needs a hashref" ) - unless ref $col_data eq 'HASH'; - return $self->new_result($col_data)->insert; + #my ($self, $col_data) = @_; + return shift->new_result(shift)->insert; } =head2 find_or_create diff --git a/t/cdbi/02-Film.t b/t/cdbi/02-Film.t index fe4a6917b..5d0f8607e 100644 --- a/t/cdbi/02-Film.t +++ b/t/cdbi/02-Film.t @@ -32,7 +32,7 @@ is(Film->__driver, "SQLite", "Driver set correctly"); } eval { my $duh = Film->insert; }; -like $@, qr/create needs a hashref/, "needs a hashref"; +like $@, qr/Result object instantiation requires a hashref as argument/, "needs a hashref"; ok +Film->create_test_film; diff --git a/t/cdbi/09-has_many.t b/t/cdbi/09-has_many.t index 89a59a53c..0c751a09f 100644 --- a/t/cdbi/09-has_many.t +++ b/t/cdbi/09-has_many.t @@ -46,7 +46,7 @@ eval { my $pj = Film->add_to_actors(\%pj_data) }; like $@, qr/class/, "add_to_actors must be object method"; eval { my $pj = $btaste->add_to_actors(%pj_data) }; -like $@, qr/expects a hashref/, "add_to_actors takes hash"; +like $@, qr/Result object instantiation requires a hashref as argument/, "add_to_actors takes hash"; ok( my $pj = $btaste->add_to_actors( From 209a20649200c6885697ced98d8499022c2e9eeb Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 20 Jul 2014 14:07:28 +0200 Subject: [PATCH 101/548] Greatly improve diagnostic messages of _resolve_relationship_condition --- lib/DBIx/Class/ResultSource.pm | 24 +++++++++++++----------- t/relationship/custom.t | 4 ++-- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index c67963488..6a5bbc9b7 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1782,16 +1782,19 @@ sub _resolve_relationship_condition { my $args = { ref $_[0] eq 'HASH' ? %{ $_[0] } : @_ }; for ( qw( rel_name self_alias foreign_alias ) ) { - $self->throw_exception("Mandatory argument '$_' is not a plain string") + $self->throw_exception("Mandatory argument '$_' to _resolve_relationship_condition() is not a plain string") if !defined $args->{$_} or length ref $args->{$_}; } - $self->throw_exception('No practical way to resolve a relationship between two objects') - if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; - my $rel_info = $self->relationship_info($args->{rel_name}); # or $self->throw_exception( "No such relationship '$args->{rel_name}'" ); + my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'"; + + $self->throw_exception("No practical way to resolve $exception_rel_id between two objects") + if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; + + $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) if defined blessed $args->{foreign_resultobj} and ! $args->{foreign_resultobj}->isa($rel_info->{class}); @@ -1832,7 +1835,7 @@ sub _resolve_relationship_condition { if (my $jfc = $ret->{join_free_condition}) { $self->throw_exception ( - "The join-free condition returned for relationship '$args->{rel_name}' must be a hash reference" + "The join-free condition returned for $exception_rel_id must be a hash reference" ) unless ref $jfc eq 'HASH'; my ($joinfree_alias, $joinfree_source); @@ -1847,7 +1850,7 @@ sub _resolve_relationship_condition { # FIXME sanity check until things stabilize, remove at some point $self->throw_exception ( - "A join-free condition returned for relationship '$args->{rel_name}' without a result object to chain from" + "A join-free condition returned for $exception_rel_id without a result object to chain from" ) unless $joinfree_alias; my $fq_col_list = { map @@ -1856,7 +1859,7 @@ sub _resolve_relationship_condition { }; $fq_col_list->{$_} or $self->throw_exception ( - "The join-free condition returned for relationship '$args->{rel_name}' may only " + "The join-free condition returned for $exception_rel_id may only " . 'contain keys that are fully qualified column names of the corresponding source' ) for keys %$jfc; @@ -1956,10 +1959,10 @@ sub _resolve_relationship_condition { } } else { - $self->throw_exception ("Can't handle condition $args->{condition} for relationship '$args->{rel_name}' yet :("); + $self->throw_exception ("Can't handle condition $args->{condition} for $exception_rel_id yet :("); } - $self->throw_exception("Relationship '$args->{rel_name}' does not resolve to a join-free condition fragment") if ( + $self->throw_exception(ucfirst "$exception_rel_id does not resolve to a join-free condition fragment") if ( $args->{require_join_free_condition} and ( ! $ret->{join_free_condition} or $ret->{join_free_condition} eq UNRESOLVABLE_CONDITION ) @@ -1994,8 +1997,7 @@ sub _resolve_relationship_condition { if ($args->{infer_values_based_on}) { $self->throw_exception(sprintf ( - "Unable to complete value inferrence - custom relationship '%s' returns conditions instead of values for column(s): %s", - $args->{rel_name}, + "Unable to complete value inferrence - custom $exception_rel_id returns conditions instead of values for column(s): %s", map { "'$_'" } @nonvalues )) if @nonvalues; diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 10ab27c9c..899b244a1 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -159,7 +159,7 @@ lives_ok { # try to create_related a 80s cd throws_ok { $artist->create_related('cds_80s', { title => 'related creation 1' }); -} qr/\QUnable to complete value inferrence - custom relationship 'cds_80s' returns conditions instead of values for column(s): 'year'/, +} qr/\QUnable to complete value inferrence - custom relationship 'cds_80s' on source 'Artist' returns conditions instead of values for column(s): 'year'/, 'Create failed - complex cond'; # now supply an explicit arg overwriting the ambiguous cond @@ -182,7 +182,7 @@ is( # try a specific everything via a non-simplified rel throws_ok { $artist->create_related('cds_90s', { title => 'related_creation 4', year => '2038' }); -} qr/\QRelationship 'cds_90s' does not resolve to a join-free condition fragment/, +} qr/\QRelationship 'cds_90s' on source 'Artist' does not resolve to a join-free condition fragment/, 'Create failed - non-simplified rel'; # Do a self-join last-entry search From 77c3a5dca478801246ff728f80a0c5013e57f4a2 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 18 Jul 2014 18:11:15 +0200 Subject: [PATCH 102/548] Add internal assertion guard for some indirect calls (for now only create/new) Modeled on the idea of a9da9b6a, this one blows up when a public proxy is called insted of the equivalent public chan of methods This particular set of changes to create() and new() is solely an optimisation (unlike a subsequent commit to ::ResultSourceProxy) --- lib/DBIx/Class/Admin.pm | 2 +- lib/DBIx/Class/ResultSet.pm | 17 +++++++++------ lib/DBIx/Class/Schema/Versioned.pm | 6 +++--- lib/DBIx/Class/_Util.pm | 34 +++++++++++++++++++++++++++++- 4 files changed, 48 insertions(+), 11 deletions(-) diff --git a/lib/DBIx/Class/Admin.pm b/lib/DBIx/Class/Admin.pm index 4c7c6bb13..003f4ae32 100644 --- a/lib/DBIx/Class/Admin.pm +++ b/lib/DBIx/Class/Admin.pm @@ -451,7 +451,7 @@ sub insert { $rs ||= $self->resultset(); $set ||= $self->set(); my $resultset = $self->schema->resultset($rs); - my $obj = $resultset->create( $set ); + my $obj = $resultset->new_result($set)->insert; print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n" if (!$self->quiet); } diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index e81fc82c9..02625379b 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -7,7 +7,7 @@ use DBIx::Class::Carp; use DBIx::Class::ResultSetColumn; use Scalar::Util qw/blessed weaken reftype/; use DBIx::Class::_Util qw( - fail_on_internal_wantarray UNRESOLVABLE_CONDITION + fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION ); use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture @@ -301,7 +301,11 @@ creation B. See also warning pertaining to L. sub new { my $class = shift; - return $class->new_result(@_) if ref $class; + + if (ref $class) { + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; + return $class->new_result(@_); + } my ($source, $attrs) = @_; $source = $source->resolve @@ -2236,7 +2240,7 @@ sub populate { return unless @$data; if(defined wantarray) { - my @created = map { $self->create($_) } @$data; + my @created = map { $self->new_result($_)->insert } @$data; return wantarray ? @created : \@created; } else { @@ -2272,7 +2276,7 @@ sub populate { foreach my $rel (@rels) { next unless ref $data->[$index]->{$rel} eq "HASH"; - my $result = $self->related_resultset($rel)->create($data->[$index]->{$rel}); + my $result = $self->related_resultset($rel)->new_result($data->[$index]->{$rel})->insert; my (undef, $reverse_relinfo) = %{$rsrc->reverse_relationship_info($rel)}; my $related = $result->result_source->_resolve_condition( $reverse_relinfo->{cond}, @@ -2748,6 +2752,7 @@ L. sub create { #my ($self, $col_data) = @_; + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call; return shift->new_result(shift)->insert; } @@ -2830,7 +2835,7 @@ sub find_or_create { if (keys %$hash and my $row = $self->find($hash, $attrs) ) { return $row; } - return $self->create($hash); + return $self->new_result($hash)->insert; } =head2 update_or_create @@ -2900,7 +2905,7 @@ sub update_or_create { return $row; } - return $self->create($cond); + return $self->new_result($cond)->insert; } =head2 update_or_new diff --git a/lib/DBIx/Class/Schema/Versioned.pm b/lib/DBIx/Class/Schema/Versioned.pm index 114064a10..95adc66b0 100644 --- a/lib/DBIx/Class/Schema/Versioned.pm +++ b/lib/DBIx/Class/Schema/Versioned.pm @@ -603,7 +603,7 @@ sub _on_connect my $vtable_compat = DBIx::Class::VersionCompat->connect(@$conn_info)->resultset('TableCompat'); if ($self->_source_exists($vtable_compat)) { $self->{vschema}->deploy; - map { $vtable->create({ installed => $_->Installed, version => $_->Version }) } $vtable_compat->all; + map { $vtable->new_result({ installed => $_->Installed, version => $_->Version })->insert } $vtable_compat->all; $self->storage->_get_dbh->do("DROP TABLE " . $vtable_compat->result_source->from); } } @@ -710,7 +710,7 @@ sub _set_db_version { # formatted by this new function will sort _after_ any existing 200... strings. my @tm = gettimeofday(); my @dt = gmtime ($tm[0]); - my $o = $vtable->create({ + my $o = $vtable->new_result({ version => $version, installed => sprintf("v%04d%02d%02d_%02d%02d%02d.%03.0f", $dt[5] + 1900, @@ -721,7 +721,7 @@ sub _set_db_version { $dt[0], int($tm[1] / 1000), # convert to millisecs ), - }); + })->insert; } sub _read_sql_file { diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index d43d836b3..5a35ab3f1 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -30,6 +30,8 @@ BEGIN { ASSERT_NO_INTERNAL_WANTARRAY => $ENV{DBIC_ASSERT_NO_INTERNAL_WANTARRAY} ? 1 : 0, + ASSERT_NO_INTERNAL_INDIRECT_CALLS => $ENV{DBIC_ASSERT_NO_INTERNAL_INDIRECT_CALLS} ? 1 : 0, + IV_SIZE => $Config{ivsize}, OS_NAME => $^O, @@ -55,7 +57,8 @@ use List::Util qw(first); use base 'Exporter'; our @EXPORT_OK = qw( - sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray + sigwarn_silencer modver_gt_or_eq + fail_on_internal_wantarray fail_on_internal_call refdesc refcount hrefaddr is_exception UNRESOLVABLE_CONDITION ); @@ -218,4 +221,33 @@ sub modver_gt_or_eq ($$) { } } +sub fail_on_internal_call { + my ($fr, $argdesc); + { + package DB; + $fr = [ caller(1) ]; + $argdesc = ref $DB::args[0] + ? DBIx::Class::_Util::refdesc($DB::args[0]) + : undef + ; + }; + + if ( + $argdesc + and + $fr->[0] =~ /^(?:DBIx::Class|DBICx::)/ + and + $fr->[1] !~ /\b(?:CDBICompat|ResultSetProxy)\b/ # no point touching there + ) { + DBIx::Class::Exception->throw( sprintf ( + "Illegal internal call of indirect proxy-method %s() with argument %s: examine the last lines of the proxy method deparse below to determine what to call directly instead at %s on line %d\n\n%s\n\n Stacktrace starts", + $fr->[3], $argdesc, @{$fr}[1,2], ( $fr->[6] || do { + require B::Deparse; + no strict 'refs'; + B::Deparse->new->coderef2text(\&{$fr->[3]}) + }), + ), 'with_stacktrace'); + } +} + 1; From 01b25f121c701218255c44a5037f1c57c8a00382 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 21 Jul 2014 09:57:24 +0200 Subject: [PATCH 103/548] Replace B::perlstring with our own implmentation thereof Haarg++ for pointing this out --- lib/DBIx/Class/ResultSource/RowParser/Util.pm | 2 +- lib/DBIx/Class/_Util.pm | 3 +++ t/resultset/rowparser_internals.t | 3 ++- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/DBIx/Class/ResultSource/RowParser/Util.pm b/lib/DBIx/Class/ResultSource/RowParser/Util.pm index 6203efa39..4a03a554d 100644 --- a/lib/DBIx/Class/ResultSource/RowParser/Util.pm +++ b/lib/DBIx/Class/ResultSource/RowParser/Util.pm @@ -5,7 +5,7 @@ use strict; use warnings; use List::Util 'first'; -use B 'perlstring'; +use DBIx::Class::_Util 'perlstring'; use constant HAS_DOR => ( $] < 5.010 ? 0 : 1 ); diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 5a35ab3f1..8c2ef12b2 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -60,6 +60,7 @@ our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray fail_on_internal_call refdesc refcount hrefaddr is_exception + perlstring UNRESOLVABLE_CONDITION ); @@ -75,6 +76,8 @@ sub sigwarn_silencer ($) { return sub { &$orig_sig_warn unless $_[0] =~ $pattern }; } +sub perlstring ($) { q{"}. quotemeta( shift ). q{"} }; + sub hrefaddr ($) { sprintf '0x%x', &Scalar::Util::refaddr||0 } sub refdesc ($) { diff --git a/t/resultset/rowparser_internals.t b/t/resultset/rowparser_internals.t index b089ecc00..dd89b4096 100644 --- a/t/resultset/rowparser_internals.t +++ b/t/resultset/rowparser_internals.t @@ -5,6 +5,7 @@ use Test::More; use lib qw(t/lib); use DBICTest; use B::Deparse; +use DBIx::Class::_Util 'perlstring'; # globally set for the rest of test # the rowparser maker does not order its hashes by default for the miniscule @@ -758,7 +759,7 @@ sub is_same_src { SKIP: { skip "Not testing equality of source containing defined-or operator on this perl $]", 1 if ($] < 5.010 and$expect =~ m!\Q//=!); - $expect =~ s/__NBC__/B::perlstring($DBIx::Class::ResultSource::RowParser::Util::null_branch_class)/ge; + $expect =~ s/__NBC__/perlstring($DBIx::Class::ResultSource::RowParser::Util::null_branch_class)/ge; $expect = " { use strict; use warnings FATAL => 'all';\n$expect\n }"; From 7f9a3f70074c5d4eb4e8260648f055b7556a7a4f Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 18 Jul 2014 18:21:20 +0200 Subject: [PATCH 104/548] Standardize the Moo import block, move quote_sub/qsub into ::_Util This way we will have less boilerplate in subsequent commits where we will use quote_sub standalone Add yet another test to make absolutely sure quote_sub won't leak any of the strictures insanity --- lib/DBIx/Class/Storage/BlockRunner.pm | 32 ++++++++++-------- lib/DBIx/Class/Storage/Statistics.pm | 7 ++-- lib/DBIx/Class/_Util.pm | 13 +++++++- t/53lean_startup.t | 2 +- xt/quote_sub.t | 48 +++++++++++++++++++++++++++ 5 files changed, 82 insertions(+), 20 deletions(-) create mode 100644 xt/quote_sub.t diff --git a/lib/DBIx/Class/Storage/BlockRunner.pm b/lib/DBIx/Class/Storage/BlockRunner.pm index 70ded7e18..d65595cb0 100644 --- a/lib/DBIx/Class/Storage/BlockRunner.pm +++ b/lib/DBIx/Class/Storage/BlockRunner.pm @@ -1,22 +1,26 @@ package # hide from pause until we figure it all out DBIx::Class::Storage::BlockRunner; +use warnings; use strict; +# DO NOT edit away without talking to riba first, he will just put it back +# BEGIN pre-Moo2 import block +BEGIN { + my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all}; + local $ENV{PERL_STRICTURES_EXTRA} = 0; + require Moo; Moo->import; + ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} ); +} +# END pre-Moo2 import block + use DBIx::Class::Exception; use DBIx::Class::Carp; use Context::Preserve 'preserve_context'; -use DBIx::Class::_Util 'is_exception'; +use DBIx::Class::_Util qw(is_exception qsub); use Scalar::Util qw(weaken blessed reftype); use Try::Tiny; -# DO NOT edit away without talking to riba first, he will just put it back -BEGIN { - local $ENV{PERL_STRICTURES_EXTRA} = 0; - require Moo; Moo->import; - require Sub::Quote; Sub::Quote->import('quote_sub'); -} -use warnings NONFATAL => 'all'; use namespace::clean; =head1 NAME @@ -43,16 +47,16 @@ has wrap_txn => ( has retry_handler => ( is => 'ro', required => 1, - isa => quote_sub( q{ + isa => qsub q{ (Scalar::Util::reftype($_[0])||'') eq 'CODE' or DBIx::Class::Exception->throw('retry_handler must be a CODE reference') - }), + }, ); has retry_debug => ( is => 'rw', # use a sub - to be evaluated on the spot lazily - default => quote_sub( '$ENV{DBIC_STORAGE_RETRY_DEBUG}' ), + default => qsub '$ENV{DBIC_STORAGE_RETRY_DEBUG}', lazy => 1, ); @@ -67,19 +71,19 @@ has failed_attempt_count => ( writer => '_set_failed_attempt_count', default => 0, lazy => 1, - trigger => quote_sub(q{ + trigger => qsub q{ $_[0]->throw_exception( sprintf ( 'Reached max_attempts amount of %d, latest exception: %s', $_[0]->max_attempts, $_[0]->last_exception )) if $_[0]->max_attempts <= ($_[1]||0); - }), + }, ); has exception_stack => ( is => 'ro', init_arg => undef, clearer => '_reset_exception_stack', - default => quote_sub(q{ [] }), + default => qsub q{ [] }, lazy => 1, ); diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index 0248936ec..e241ad429 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -1,21 +1,20 @@ package DBIx::Class::Storage::Statistics; + use strict; use warnings; # DO NOT edit away without talking to riba first, he will just put it back # BEGIN pre-Moo2 import block BEGIN { - require warnings; my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all}; local $ENV{PERL_STRICTURES_EXTRA} = 0; require Moo; Moo->import; - require Sub::Quote; Sub::Quote->import('quote_sub'); ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} ); } # END pre-Moo2 import block extends 'DBIx::Class'; -use DBIx::Class::_Util 'sigwarn_silencer'; +use DBIx::Class::_Util qw(sigwarn_silencer qsub); use namespace::clean; =head1 NAME @@ -64,7 +63,7 @@ sub debugfh { has _debugfh => ( is => 'rw', lazy => 1, - trigger => quote_sub( '$_[0]->_defaulted_to_stderr(undef)' ), + trigger => qsub '$_[0]->_defaulted_to_stderr(undef)', builder => '_build_debugfh', ); diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 8c2ef12b2..384d3e015 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -55,12 +55,23 @@ use Carp 'croak'; use Scalar::Util qw(weaken blessed reftype); use List::Util qw(first); +# DO NOT edit away without talking to riba first, he will just put it back +# BEGIN pre-Moo2 import block +BEGIN { + my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all}; + local $ENV{PERL_STRICTURES_EXTRA} = 0; + require Sub::Quote; Sub::Quote->import('quote_sub'); + ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} ); +} +sub qsub ($) { goto "e_sub } # no point depping on new Moo just for this +# END pre-Moo2 import block + use base 'Exporter'; our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray fail_on_internal_call refdesc refcount hrefaddr is_exception - perlstring + quote_sub qsub perlstring UNRESOLVABLE_CONDITION ); diff --git a/t/53lean_startup.t b/t/53lean_startup.t index 686819141..27a4dd486 100644 --- a/t/53lean_startup.t +++ b/t/53lean_startup.t @@ -99,6 +99,7 @@ BEGIN { namespace::clean Try::Tiny Sub::Name + Sub::Quote Scalar::Util List::Util @@ -117,7 +118,6 @@ BEGIN { { register_lazy_loadable_requires(qw( Moo - Sub::Quote Context::Preserve )); diff --git a/xt/quote_sub.t b/xt/quote_sub.t new file mode 100644 index 000000000..7918cc55a --- /dev/null +++ b/xt/quote_sub.t @@ -0,0 +1,48 @@ +use warnings; +use strict; + +use Test::More; +use Test::Warn; + +use DBIx::Class::_Util 'quote_sub'; + +my $q = do { + no strict 'vars'; + quote_sub '$x = $x . "buh"; $x += 42'; +}; + +warnings_exist { + is $q->(), 42, 'Expected result after uninit and string/num conversion' +} [ + qr/Use of uninitialized value/i, + qr/isn't numeric in addition/, +], 'Expected warnings, strict did not leak inside the qsub' + or do { + require B::Deparse; + diag( B::Deparse->new->coderef2text( Sub::Quote::unquote_sub($q) ) ) + } +; + +my $no_nothing_q = do { + no strict; + no warnings; + quote_sub <<'EOC'; + my $n = "Test::Warn::warnings_exist"; + warn "-->@{[ *{$n}{CODE} ]}<--\n"; + warn "-->@{[ ${^WARNING_BITS} || '' ]}<--\n"; +EOC +}; + +my $we_cref = Test::Warn->can('warnings_exist'); + +warnings_exist { $no_nothing_q->() } [ + qr/^\Q-->$we_cref<--\E$/m, + qr/^\-\-\>\0*\<\-\-$/m, # some perls have a string of nulls, some just an empty string +], 'Expected warnings, strict did not leak inside the qsub' + or do { + require B::Deparse; + diag( B::Deparse->new->coderef2text( Sub::Quote::unquote_sub($no_nothing_q) ) ) + } +; + +done_testing; From 6243d42b1ac9b614382e6126ca76a3a42953a7e9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 20 Jul 2014 13:47:09 +0200 Subject: [PATCH 105/548] Reformat add_relationship_accessor for future edit No functional changes, read diff under -w --- lib/DBIx/Class/Relationship/Accessor.pm | 74 ++++++++++++++----------- 1 file changed, 42 insertions(+), 32 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Accessor.pm b/lib/DBIx/Class/Relationship/Accessor.pm index fee162124..568e71fa8 100644 --- a/lib/DBIx/Class/Relationship/Accessor.pm +++ b/lib/DBIx/Class/Relationship/Accessor.pm @@ -23,17 +23,21 @@ sub register_relationship { sub add_relationship_accessor { my ($class, $rel, $acc_type) = @_; + my %meth; if ($acc_type eq 'single') { - my $rel_info = $class->relationship_info($rel); $meth{$rel} = sub { my $self = shift; + if (@_) { $self->set_from_related($rel, @_); return $self->{_relationship_data}{$rel} = $_[0]; - } elsif (exists $self->{_relationship_data}{$rel}) { + } + elsif (exists $self->{_relationship_data}{$rel}) { return $self->{_relationship_data}{$rel}; - } else { + } + else { + my $rel_info = $class->relationship_info($rel); my $cond = $self->result_source->_resolve_condition( $rel_info->{cond}, $rel, $self, $rel ); @@ -47,49 +51,55 @@ sub add_relationship_accessor { return $self->{_relationship_data}{$rel} = $val; } }; - } elsif ($acc_type eq 'filter') { + } + elsif ($acc_type eq 'filter') { $class->throw_exception("No such column '$rel' to filter") unless $class->has_column($rel); + my $f_class = $class->relationship_info($rel)->{class}; - $class->inflate_column($rel, - { inflate => sub { - my ($val, $self) = @_; - return $self->find_or_new_related($rel, {}, {}); - }, - deflate => sub { - my ($val, $self) = @_; - $self->throw_exception("'$val' isn't a $f_class") unless $val->isa($f_class); - # MASSIVE FIXME - this code assumes we pointed at the PK, but the belongs_to - # helper does not check any of this - # fixup the code a bit to make things saner, but ideally 'filter' needs to - # be deprecated ASAP and removed shortly after - # Not doing so before 0.08250 however, too many things in motion already - my ($pk_col, @rest) = $val->result_source->_pri_cols_or_die; - $self->throw_exception( - "Relationship '$rel' of type 'filter' can not work with a multicolumn primary key on source '$f_class'" - ) if @rest; + $class->inflate_column($rel, { + inflate => sub { + my ($val, $self) = @_; + return $self->find_or_new_related($rel, {}, {}); + }, + deflate => sub { + my ($val, $self) = @_; + $self->throw_exception("'$val' isn't a $f_class") unless $val->isa($f_class); - my $pk_val = $val->get_column($pk_col); - carp_unique ( - "Unable to deflate 'filter'-type relationship '$rel' (related object " - . "primary key not retrieved), assuming undef instead" - ) if ( ! defined $pk_val and $val->in_storage ); + # MASSIVE FIXME - this code assumes we pointed at the PK, but the belongs_to + # helper does not check any of this + # fixup the code a bit to make things saner, but ideally 'filter' needs to + # be deprecated ASAP and removed shortly after + # Not doing so before 0.08250 however, too many things in motion already + my ($pk_col, @rest) = $val->result_source->_pri_cols_or_die; + $self->throw_exception( + "Relationship '$rel' of type 'filter' can not work with a multicolumn primary key on source '$f_class'" + ) if @rest; + + my $pk_val = $val->get_column($pk_col); + carp_unique ( + "Unable to deflate 'filter'-type relationship '$rel' (related object " + . "primary key not retrieved), assuming undef instead" + ) if ( ! defined $pk_val and $val->in_storage ); + + return $pk_val; + }, + }); + } + elsif ($acc_type eq 'multi') { - return $pk_val; - } - } - ); - } elsif ($acc_type eq 'multi') { $meth{$rel} = sub { DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; shift->search_related($rel, @_) }; $meth{"${rel}_rs"} = sub { shift->search_related_rs($rel, @_) }; $meth{"add_to_${rel}"} = sub { shift->create_related($rel, @_); }; - } else { + } + else { $class->throw_exception("No such relationship accessor type '$acc_type'"); } + { no strict 'refs'; no warnings 'redefine'; From 8d73fcd44e0441f0252744be32bada6816c5ff6b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 18 Jul 2014 18:26:57 +0200 Subject: [PATCH 106/548] Replace many closure-based proxy methods with static qsubs Not only does this buy a quantum of performance, but it greatly enhances readability of these methods on deparse Deliberately not converting the ManyToMany helper - this code needs a thorough regut :( --- lib/DBIx/Class/CDBICompat/Constructor.pm | 24 +++++------ lib/DBIx/Class/CDBICompat/ImaDBI.pm | 36 +++++++--------- lib/DBIx/Class/CDBICompat/Relationship.pm | 16 +++---- lib/DBIx/Class/CDBICompat/Relationships.pm | 21 ++++------ lib/DBIx/Class/Relationship/Accessor.pm | 46 ++++++++------------- lib/DBIx/Class/Relationship/ProxyMethods.pm | 31 +++++++------- lib/DBIx/Class/ResultSourceProxy.pm | 15 +++---- lib/DBIx/Class/Schema.pm | 17 ++------ lib/DBIx/Class/Storage/DBI.pm | 39 ++++++++--------- lib/DBIx/Class/Storage/DBIHacks.pm | 1 - lib/DBIx/Class/_Util.pm | 2 + t/40compose_connection.t | 12 +----- t/55namespaces_cleaned.t | 2 + t/lib/DBICTest/Util/LeakTracer.pm | 29 +++++++++---- t/storage/error.t | 2 +- 15 files changed, 130 insertions(+), 163 deletions(-) diff --git a/lib/DBIx/Class/CDBICompat/Constructor.pm b/lib/DBIx/Class/CDBICompat/Constructor.pm index f2e78b925..65ce576f1 100644 --- a/lib/DBIx/Class/CDBICompat/Constructor.pm +++ b/lib/DBIx/Class/CDBICompat/Constructor.pm @@ -1,14 +1,13 @@ package # hide from PAUSE DBIx::Class::CDBICompat::Constructor; -use base qw(DBIx::Class::CDBICompat::ImaDBI); - -use Sub::Name(); - use strict; use warnings; +use base 'DBIx::Class::CDBICompat::ImaDBI'; + use Carp; +use DBIx::Class::_Util qw(quote_sub perlstring); __PACKAGE__->set_sql(Retrieve => <<''); SELECT __ESSENTIAL__ @@ -17,17 +16,16 @@ WHERE %s sub add_constructor { my ($class, $method, $fragment) = @_; - return croak("constructors needs a name") unless $method; - no strict 'refs'; - my $meth = "$class\::$method"; - return carp("$method already exists in $class") - if *$meth{CODE}; + croak("constructors needs a name") unless $method; + + carp("$method already exists in $class") && return + if $class->can($method); - *$meth = Sub::Name::subname $meth => sub { - my $self = shift; - $self->sth_to_objects($self->sql_Retrieve($fragment), \@_); - }; + quote_sub "${class}::${method}" => sprintf( <<'EOC', perlstring $fragment ); + my $self = shift; + $self->sth_to_objects($self->sql_Retrieve(%s), \@_); +EOC } 1; diff --git a/lib/DBIx/Class/CDBICompat/ImaDBI.pm b/lib/DBIx/Class/CDBICompat/ImaDBI.pm index aaa19a02e..10270b84f 100644 --- a/lib/DBIx/Class/CDBICompat/ImaDBI.pm +++ b/lib/DBIx/Class/CDBICompat/ImaDBI.pm @@ -4,7 +4,7 @@ package # hide from PAUSE use strict; use warnings; use DBIx::ContextualFetch; -use Sub::Name (); +use DBIx::Class::_Util qw(quote_sub perlstring); use base qw(Class::Data::Inheritable); @@ -81,26 +81,20 @@ sub __driver { sub set_sql { my ($class, $name, $sql) = @_; - no strict 'refs'; - my $sql_name = "sql_${name}"; - my $full_sql_name = join '::', $class, $sql_name; - *$full_sql_name = Sub::Name::subname $full_sql_name, - sub { - my $sql = $sql; - my $class = shift; - return $class->storage->dbh_do( - _prepare_sth => $class->transform_sql($sql, @_) - ); - }; - if ($sql =~ /select/i) { - my $search_name = "search_${name}"; - my $full_search_name = join '::', $class, $search_name; - *$full_search_name = Sub::Name::subname $full_search_name, - sub { - my ($class, @args) = @_; - my $sth = $class->$sql_name; - return $class->sth_to_objects($sth, \@args); - }; + + quote_sub "${class}::sql_${name}", sprintf( <<'EOC', perlstring $sql ); + my $class = shift; + return $class->storage->dbh_do( + _prepare_sth => $class->transform_sql(%s, @_) + ); +EOC + + + if ($sql =~ /select/i) { # FIXME - this should be anchore surely...? + quote_sub "${class}::search_${name}", sprintf( <<'EOC', "sql_$name" ); + my ($class, @args) = @_; + $class->sth_to_objects( $class->%s, \@args); +EOC } } diff --git a/lib/DBIx/Class/CDBICompat/Relationship.pm b/lib/DBIx/Class/CDBICompat/Relationship.pm index b0c10fae6..061469354 100644 --- a/lib/DBIx/Class/CDBICompat/Relationship.pm +++ b/lib/DBIx/Class/CDBICompat/Relationship.pm @@ -3,7 +3,8 @@ package use strict; use warnings; -use Sub::Name (); + +use DBIx::Class::_Util 'quote_sub'; =head1 NAME @@ -23,20 +24,13 @@ my %method2key = ( args => 'args', ); +quote_sub __PACKAGE__ . "::$_" => "\$_[0]->{$method2key{$_}}" + for keys %method2key; + sub new { my($class, $args) = @_; return bless $args, $class; } -for my $method (keys %method2key) { - my $key = $method2key{$method}; - my $code = sub { - $_[0]->{$key}; - }; - - no strict 'refs'; - *{$method} = Sub::Name::subname $method, $code; -} - 1; diff --git a/lib/DBIx/Class/CDBICompat/Relationships.pm b/lib/DBIx/Class/CDBICompat/Relationships.pm index 3ce3ef53d..66fe9734d 100644 --- a/lib/DBIx/Class/CDBICompat/Relationships.pm +++ b/lib/DBIx/Class/CDBICompat/Relationships.pm @@ -3,11 +3,11 @@ package # hide from PAUSE use strict; use warnings; -use Sub::Name (); -use base qw/Class::Data::Inheritable/; +use base 'Class::Data::Inheritable'; use Clone; use DBIx::Class::CDBICompat::Relationship; +use DBIx::Class::_Util qw(quote_sub perlstring); __PACKAGE__->mk_classdata('__meta_info' => {}); @@ -119,19 +119,14 @@ sub has_many { ); if (@f_method) { - no strict 'refs'; - no warnings 'redefine'; - my $post_proc = sub { my $o = shift; $o = $o->$_ for @f_method; $o; }; - my $name = join '::', $class, $rel; - *$name = Sub::Name::subname $name, - sub { - my $rs = shift->search_related($rel => @_); - $rs->{attrs}{record_filter} = $post_proc; - return (wantarray ? $rs->all : $rs); - }; + quote_sub "${class}::${rel}", sprintf( <<'EOC', perlstring $rel), { '$rf' => \sub { my $o = shift; $o = $o->$_ for @f_method; $o } }; + my $rs = shift->search_related( %s => @_); + $rs->{attrs}{record_filter} = $rf; + return (wantarray ? $rs->all : $rs); +EOC + return 1; } - } diff --git a/lib/DBIx/Class/Relationship/Accessor.pm b/lib/DBIx/Class/Relationship/Accessor.pm index 568e71fa8..aeefa84d7 100644 --- a/lib/DBIx/Class/Relationship/Accessor.pm +++ b/lib/DBIx/Class/Relationship/Accessor.pm @@ -3,9 +3,8 @@ package # hide from PAUSE use strict; use warnings; -use Sub::Name; use DBIx::Class::Carp; -use DBIx::Class::_Util 'fail_on_internal_wantarray'; +use DBIx::Class::_Util qw(quote_sub perlstring); use namespace::clean; our %_pod_inherit_config = @@ -24,33 +23,32 @@ sub register_relationship { sub add_relationship_accessor { my ($class, $rel, $acc_type) = @_; - my %meth; if ($acc_type eq 'single') { - $meth{$rel} = sub { + quote_sub "${class}::${rel}" => sprintf(<<'EOC', perlstring $rel); my $self = shift; if (@_) { - $self->set_from_related($rel, @_); - return $self->{_relationship_data}{$rel} = $_[0]; + $self->set_from_related( %1$s => @_ ); + return $self->{_relationship_data}{%1$s} = $_[0]; } - elsif (exists $self->{_relationship_data}{$rel}) { - return $self->{_relationship_data}{$rel}; + elsif (exists $self->{_relationship_data}{%1$s}) { + return $self->{_relationship_data}{%1$s}; } else { - my $rel_info = $class->relationship_info($rel); + my $rel_info = $self->result_source->relationship_info(%1$s); my $cond = $self->result_source->_resolve_condition( - $rel_info->{cond}, $rel, $self, $rel + $rel_info->{cond}, %1$s, $self, %1$s ); if ($rel_info->{attrs}->{undef_on_null_fk}){ return undef unless ref($cond) eq 'HASH'; - return undef if grep { not defined $_ } values %$cond; + return undef if grep { not defined $_ } values %%$cond; } - my $val = $self->find_related($rel, {}, {}); + my $val = $self->find_related( %1$s => {} ); return $val unless $val; # $val instead of undef so that null-objects can go through - return $self->{_relationship_data}{$rel} = $val; + return $self->{_relationship_data}{%1$s} = $val; } - }; +EOC } elsif ($acc_type eq 'filter') { $class->throw_exception("No such column '$rel' to filter") @@ -89,25 +87,17 @@ sub add_relationship_accessor { } elsif ($acc_type eq 'multi') { - $meth{$rel} = sub { - DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray; - shift->search_related($rel, @_) - }; - $meth{"${rel}_rs"} = sub { shift->search_related_rs($rel, @_) }; - $meth{"add_to_${rel}"} = sub { shift->create_related($rel, @_); }; + quote_sub "${class}::${rel}_rs", "shift->search_related_rs( $rel => \@_ )"; + quote_sub "${class}::add_to_${rel}", "shift->create_related( $rel => \@_ )"; + quote_sub "${class}::${rel}", sprintf( <<'EOC', perlstring $rel ); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = DBIx::Class::_Util::fail_on_internal_wantarray; + shift->search_related( %s => @_ ) +EOC } else { $class->throw_exception("No such relationship accessor type '$acc_type'"); } - { - no strict 'refs'; - no warnings 'redefine'; - foreach my $meth (keys %meth) { - my $name = join '::', $class, $meth; - *$name = subname($name, $meth{$meth}); - } - } } 1; diff --git a/lib/DBIx/Class/Relationship/ProxyMethods.pm b/lib/DBIx/Class/Relationship/ProxyMethods.pm index 6f204f617..0db5780da 100644 --- a/lib/DBIx/Class/Relationship/ProxyMethods.pm +++ b/lib/DBIx/Class/Relationship/ProxyMethods.pm @@ -3,8 +3,9 @@ package # hide from PAUSE use strict; use warnings; -use Sub::Name (); -use base qw/DBIx::Class/; +use base 'DBIx::Class'; +use DBIx::Class::_Util 'quote_sub'; +use namespace::clean; our %_pod_inherit_config = ( @@ -22,21 +23,17 @@ sub register_relationship { sub proxy_to_related { my ($class, $rel, $proxy_args) = @_; my %proxy_map = $class->_build_proxy_map_from($proxy_args); - no strict 'refs'; - no warnings 'redefine'; - foreach my $meth_name ( keys %proxy_map ) { - my $proxy_to_col = $proxy_map{$meth_name}; - my $name = join '::', $class, $meth_name; - *$name = Sub::Name::subname $name => sub { - my $self = shift; - my $relobj = $self->$rel; - if (@_ && !defined $relobj) { - $relobj = $self->create_related($rel, { $proxy_to_col => $_[0] }); - @_ = (); - } - return ($relobj ? $relobj->$proxy_to_col(@_) : undef); - } - } + + quote_sub "${class}::$_", sprintf( <<'EOC', $rel, $proxy_map{$_} ) + my $self = shift; + my $relobj = $self->%1$s; + if (@_ && !defined $relobj) { + $relobj = $self->create_related( %1$s => { %2$s => $_[0] } ); + @_ = (); + } + $relobj ? $relobj->%2$s(@_) : undef; +EOC + for keys %proxy_map } sub _build_proxy_map_from { diff --git a/lib/DBIx/Class/ResultSourceProxy.pm b/lib/DBIx/Class/ResultSourceProxy.pm index c3bef1511..db4337a95 100644 --- a/lib/DBIx/Class/ResultSourceProxy.pm +++ b/lib/DBIx/Class/ResultSourceProxy.pm @@ -4,9 +4,10 @@ package # hide from PAUSE use strict; use warnings; -use base qw/DBIx::Class/; -use Scalar::Util qw/blessed/; -use Sub::Name qw/subname/; +use base 'DBIx::Class'; + +use Scalar::Util 'blessed'; +use DBIx::Class::_Util 'quote_sub'; use namespace::clean; __PACKAGE__->mk_group_accessors('inherited_ro_instance' => 'source_name'); @@ -80,10 +81,10 @@ for my $method_to_proxy (qw/ relationship_info has_relationship /) { - no strict qw/refs/; - *{__PACKAGE__."::$method_to_proxy"} = subname $method_to_proxy => sub { - shift->result_source_instance->$method_to_proxy (@_); - }; + quote_sub + __PACKAGE__."::$method_to_proxy" + => "shift->result_source_instance->$method_to_proxy (\@_);" + ; } 1; diff --git a/lib/DBIx/Class/Schema.pm b/lib/DBIx/Class/Schema.pm index 4c3cce50e..c83dc87d0 100644 --- a/lib/DBIx/Class/Schema.pm +++ b/lib/DBIx/Class/Schema.pm @@ -8,8 +8,7 @@ use base 'DBIx::Class'; use DBIx::Class::Carp; use Try::Tiny; use Scalar::Util qw/weaken blessed/; -use DBIx::Class::_Util 'refcount'; -use Sub::Name 'subname'; +use DBIx::Class::_Util qw(refcount quote_sub); use Devel::GlobalDestruction; use namespace::clean; @@ -897,7 +896,6 @@ sub compose_namespace { local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO; use warnings qw/redefine/; - no strict qw/refs/; foreach my $source_name ($self->sources) { my $orig_source = $self->source($source_name); @@ -919,11 +917,8 @@ sub compose_namespace { } } - foreach my $meth (qw/class source resultset/) { - no warnings 'redefine'; - *{"${target}::${meth}"} = subname "${target}::${meth}" => - sub { shift->schema->$meth(@_) }; - } + quote_sub "${target}::${_}" => "shift->schema->$_(\@_)" + for qw(class source resultset); } Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO; @@ -1497,11 +1492,7 @@ sub compose_connection { } my $schema = $self->compose_namespace($target, $base); - { - no strict 'refs'; - my $name = join '::', $target, 'schema'; - *$name = subname $name, sub { $schema }; - } + quote_sub "${target}::schema", '$s', { '$s' => \$schema }; $schema->connection(@info); foreach my $source_name ($schema->sources) { diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 230a849fa..18dbbb994 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -10,11 +10,11 @@ use mro 'c3'; use DBIx::Class::Carp; use Scalar::Util qw/refaddr weaken reftype blessed/; use List::Util qw/first/; -use Sub::Name 'subname'; use Context::Preserve 'preserve_context'; use Try::Tiny; use Data::Compare (); # no imports!!! guard against insane architecture use SQL::Abstract qw(is_plain_value is_literal_value); +use DBIx::Class::_Util qw(quote_sub perlstring); use namespace::clean; # default cursor class, overridable in connect_info attributes @@ -119,9 +119,11 @@ for my $meth (keys %$storage_accessor_idx, qw( my $orig = __PACKAGE__->can ($meth) or die "$meth is not a ::Storage::DBI method!"; - no strict 'refs'; - no warnings 'redefine'; - *{__PACKAGE__ ."::$meth"} = subname $meth => sub { + my $is_getter = $storage_accessor_idx->{$meth} ? 0 : 1; + + quote_sub + __PACKAGE__ ."::$meth", sprintf( <<'EOC', $is_getter, perlstring $meth ), { '$orig' => \$orig }; + if ( # only fire when invoked on an instance, a valid class-based invocation # would e.g. be setting a default for an inherited accessor @@ -133,7 +135,7 @@ for my $meth (keys %$storage_accessor_idx, qw( and # if this is a known *setter* - just set it, no need to connect # and determine the driver - ! ( $storage_accessor_idx->{$meth} and @_ > 1 ) + ( %1$s or @_ <= 1 ) and # Only try to determine stuff if we have *something* that either is or can # provide a DSN. Allows for bare $schema's generated with a plain ->connect() @@ -142,16 +144,15 @@ for my $meth (keys %$storage_accessor_idx, qw( ) { $_[0]->_determine_driver; - # This for some reason crashes and burns on perl 5.8.1 - # IFF the method ends up throwing an exception - #goto $_[0]->can ($meth); + # work around http://rt.perl.org/rt3/Public/Bug/Display.html?id=35878 + goto $_[0]->can(%2$s) unless DBIx::Class::_ENV_::BROKEN_GOTO; - my $cref = $_[0]->can ($meth); + my $cref = $_[0]->can(%2$s); goto $cref; } goto $orig; - }; +EOC } =head1 NAME @@ -1633,17 +1634,13 @@ sub _exec_txn_rollback { shift->_dbh->rollback; } -# generate some identical methods -for my $meth (qw/svp_begin svp_release svp_rollback/) { - no strict qw/refs/; - *{__PACKAGE__ ."::$meth"} = subname $meth => sub { - my $self = shift; - $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK; - $self->throw_exception("Unable to $meth() on a disconnected storage") - unless $self->_dbh; - $self->next::method(@_); - }; -} +# generate the DBI-specific stubs, which then fallback to ::Storage proper +quote_sub __PACKAGE__ . "::$_" => sprintf (<<'EOS', $_) for qw(svp_begin svp_release svp_rollback); + $_[0]->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK; + $_[0]->throw_exception('Unable to %s() on a disconnected storage') + unless $_[0]->_dbh; + shift->next::method(@_); +EOS # This used to be the top-half of _execute. It was split out to make it # easier to override in NoBindVars without duping the rest. It takes up diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index ef890921d..26f8dcac1 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -15,7 +15,6 @@ use mro 'c3'; use List::Util 'first'; use Scalar::Util 'blessed'; -use Sub::Name 'subname'; use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use SQL::Abstract qw(is_plain_value is_literal_value); use namespace::clean; diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 384d3e015..83bca471c 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -17,6 +17,8 @@ BEGIN { # but of course BROKEN_FORK => ($^O eq 'MSWin32') ? 1 : 0, + BROKEN_GOTO => ($] < '5.008003') ? 1 : 0, + HAS_ITHREADS => $Config{useithreads} ? 1 : 0, # ::Runmode would only be loaded by DBICTest, which in turn implies t/ diff --git a/t/40compose_connection.t b/t/40compose_connection.t index 6cd62fff0..a68a2c26c 100644 --- a/t/40compose_connection.t +++ b/t/40compose_connection.t @@ -16,15 +16,7 @@ warnings_exist { DBICTest->init_schema( compose_connection => 1, sqlite_use_file cmp_ok(DBICTest->resultset('Artist')->count, '>', 0, 'count is valid'); -# cleanup globals so we do not trigger the leaktest -for ( map { DBICTest->schema->class($_) } DBICTest->schema->sources ) { - $_->class_resolver(undef); - $_->resultset_instance(undef); - $_->result_source_instance(undef); -} -{ - no warnings qw/redefine once/; - *DBICTest::schema = sub {}; -} +# cleanup globaly cached handle so we do not trigger the leaktest +DBICTest->schema->storage->disconnect; done_testing; diff --git a/t/55namespaces_cleaned.t b/t/55namespaces_cleaned.t index 042806977..a8a966df7 100644 --- a/t/55namespaces_cleaned.t +++ b/t/55namespaces_cleaned.t @@ -93,6 +93,8 @@ my $skip_idx = { map { $_ => 1 } ( my $has_moose = eval { require Moose::Util }; +Sub::Defer::undefer_all(); + # can't use Class::Inspector for the mundane parts as it does not # distinguish imports from anything else, what a crock of... # Moose is not always available either - hence just do it ourselves diff --git a/t/lib/DBICTest/Util/LeakTracer.pm b/t/lib/DBICTest/Util/LeakTracer.pm index d0c29eba8..1a56f415c 100644 --- a/t/lib/DBICTest/Util/LeakTracer.pm +++ b/t/lib/DBICTest/Util/LeakTracer.pm @@ -164,7 +164,7 @@ sub visit_namespaces { $visited += visit_namespaces({ %$args, package => $_ }) for map - { $_ =~ /(.+?)::$/ && "${base}::$1" } + { $_ =~ /(.+?)::$/ ? "${base}::$1" : () } grep { $_ =~ /(?{$addr}{weakref} and ! isweak( $weak_registry->{$addr}{weakref} ); } - # the walk is very expensive - if we are $quiet (running in an END block) - # we do not really need to be too thorough - unless ($quiet) { - delete $weak_registry->{$_} for keys %{ symtable_referenced_addresses() }; - } - + # the symtable walk is very expensive + # if we are $quiet (running in an END block) we do not really need to be + # that thorough - can get by with only %Sub::Quote::QUOTED + delete $weak_registry->{$_} for $quiet + ? do { + my $refs = {}; + visit_refs ( + # only look at the closed over stuffs + refs => [ grep { length ref $_ } map { values %{$_->[2]} } grep { ref $_ eq 'ARRAY' } values %Sub::Quote::QUOTED ], + seen_refs => $refs, + action => sub { 1 }, + ); + keys %$refs; + } + : ( + # full sumtable walk, starting from :: + keys %{ symtable_referenced_addresses() } + ) + ; for my $addr (sort { $weak_registry->{$a}{display_name} cmp $weak_registry->{$b}{display_name} } keys %$weak_registry) { diff --git a/t/storage/error.t b/t/storage/error.t index 61d678237..6c9b15cd9 100644 --- a/t/storage/error.t +++ b/t/storage/error.t @@ -38,7 +38,7 @@ throws_ok ( # exception fallback: SKIP: { - if (DBIx::Class::_ENV_::PEEPEENESS) { + if ( !!DBIx::Class::_ENV_::PEEPEENESS ) { skip "Your perl version $] appears to leak like a sieve - skipping garbage collected \$schema test", 1; } From 12b348d9331e09e2a7ab55bf57fbc33476cf174c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 21 Jul 2014 23:45:50 +0200 Subject: [PATCH 107/548] Rebase Oracle datetime inflation test on top of DBICTest::Schema::Event The butchering of Track made sense in 2007, not so much today The test seems to behave identically, hope I didn't fuck up anything... https://travis-ci.org/dbsrgits/dbix-class/builds/30513847#L398 --- t/inflate/datetime_oracle.t | 63 +++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/t/inflate/datetime_oracle.t b/t/inflate/datetime_oracle.t index 26a535777..9182f2356 100644 --- a/t/inflate/datetime_oracle.t +++ b/t/inflate/datetime_oracle.t @@ -14,7 +14,7 @@ my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/}; if (not ($dsn && $user && $pass)) { plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' . - 'Warning: This test drops and creates a table called \'track\''; + 'Warning: This test drops and creates a table called \'event\''; } # DateTime::Format::Oracle needs this set @@ -32,21 +32,25 @@ my $timestamp_datatype = ($schema->storage->_server_info->{normalized_dbms_versi : 'TIMESTAMP' ; -# Need to redefine the last_updated_on column -my $col_metadata = $schema->class('Track')->column_info('last_updated_on'); -$schema->class('Track')->add_column( 'last_updated_on' => { - data_type => 'date' }); -$schema->class('Track')->add_column( 'last_updated_at' => { - data_type => $timestamp_datatype }); - my $dbh = $schema->storage->dbh; #$dbh->do("alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SSXFF'"); eval { - $dbh->do("DROP TABLE track"); + $dbh->do("DROP TABLE event"); }; -$dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at $timestamp_datatype)"); +$dbh->do(<create({ trackid => 1, cd => 1, position => 1, title => 'Track1', last_updated_on => '06-MAY-07', last_updated_at => '2009-05-03 21:17:18.5' }); -is($new->trackid, 1, "insert sucessful"); +my $new = $schema->resultset('Event')->create({ id => 1, starts_at => '06-MAY-07', created_on => '2009-05-03 21:17:18.5' }); +is($new->id, 1, "insert sucessful"); -my $track = $schema->resultset('Track')->find( 1 ); +my $event = $schema->resultset('Event')->find( 1 ); -is( ref($track->last_updated_on), 'DateTime', "last_updated_on inflated ok"); +is( ref($event->starts_at), 'DateTime', "starts_at inflated ok"); -is( $track->last_updated_on->month, 5, "DateTime methods work on inflated column"); +is( $event->starts_at->month, 5, "DateTime methods work on inflated column"); -#note '$track->last_updated_at => ', $track->last_updated_at; -is( ref($track->last_updated_at), 'DateTime', "last_updated_at inflated ok"); +is( ref($event->created_on), 'DateTime', "created_on inflated ok"); -is( $track->last_updated_at->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision"); +is( $event->created_on->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision"); my $dt = DateTime->now(); -$track->last_updated_on($dt); -$track->last_updated_at($dt); -$track->update; +$event->starts_at($dt); +$event->created_on($dt); +$event->update; -is( $track->last_updated_on->month, $dt->month, "deflate ok"); -is( int $track->last_updated_at->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision"); +is( $event->starts_at->month, $dt->month, "deflate ok"); +is( int $event->created_on->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision"); # test datetime_setup @@ -93,15 +96,15 @@ $dt = DateTime->now(); my $timestamp = $dt->clone; $timestamp->set_nanosecond( int 500_000_000 ); -$track = $schema->resultset('Track')->find( 1 ); -$track->update({ last_updated_on => $dt, last_updated_at => $timestamp }); +$event = $schema->resultset('Event')->find( 1 ); +$event->update({ starts_at => $dt, created_on => $timestamp }); -$track = $schema->resultset('Track')->find(1); +$event = $schema->resultset('Event')->find(1); -is( $track->last_updated_on, $dt, 'DateTime round-trip as DATE' ); -is( $track->last_updated_at, $timestamp, 'DateTime round-trip as TIMESTAMP' ); +is( $event->starts_at, $dt, 'DateTime round-trip as DATE' ); +is( $event->created_on, $timestamp, 'DateTime round-trip as TIMESTAMP' ); -is( int $track->last_updated_at->nanosecond, int 500_000_000, +is( int $event->created_on->nanosecond, int 500_000_000, 'TIMESTAMP nanoseconds survived' ); } 'dateteime operations executed correctly'; @@ -111,7 +114,7 @@ done_testing; # clean up our mess END { if($schema && (my $dbh = $schema->storage->dbh)) { - $dbh->do("DROP TABLE track"); + $dbh->do("DROP TABLE event"); } undef $schema; } From 4006691d207a6c257012c4b9a07d674b211349b0 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 21 Jul 2014 12:41:46 +0200 Subject: [PATCH 108/548] Avoid ResultSourceProxy calls whenever possible Along with efficiency gains this commit makes a very subtle but crucially important change: From here now on when we operate on an instance, we are guaranteed to query this instance's result source. The previous codepaths would nearly randomly switch between the current rsrc instance and the one registered with the corresponding result class. This will allow for proper synthetic result instance construction further on --- Changes | 2 ++ lib/DBIx/Class/FilterColumn.pm | 22 ++++++------- lib/DBIx/Class/InflateColumn.pm | 6 ++-- lib/DBIx/Class/InflateColumn/File.pm | 14 ++++++--- lib/DBIx/Class/Relationship/Base.pm | 6 ++-- lib/DBIx/Class/ResultSourceProxy.pm | 9 +++--- lib/DBIx/Class/Row.pm | 46 ++++++++++++++++++---------- 7 files changed, 62 insertions(+), 43 deletions(-) diff --git a/Changes b/Changes index 6cd1d0158..d69a07122 100644 --- a/Changes +++ b/Changes @@ -40,6 +40,8 @@ Revision history for DBIx::Class savepoints on DBD::SQLite < 1.39 * Misc + - Ensure source metadata calls always take place on the result source + instance registered with the caller - IFF DBIC_TRACE output defaults to STDERR we now silence the possible wide-char warnings if the trace happens to contain unicode - Remove ::ResultSource::resolve_condition - the underlying machinery diff --git a/lib/DBIx/Class/FilterColumn.pm b/lib/DBIx/Class/FilterColumn.pm index 222dabd37..2e8fbd527 100644 --- a/lib/DBIx/Class/FilterColumn.pm +++ b/lib/DBIx/Class/FilterColumn.pm @@ -34,7 +34,7 @@ sub _column_from_storage { return $value if is_literal_value($value); - my $info = $self->column_info($col) + my $info = $self->result_source->column_info($col) or $self->throw_exception("No column info for $col"); return $value unless exists $info->{_filter_info}; @@ -49,7 +49,7 @@ sub _column_to_storage { return $value if is_literal_value($value); - my $info = $self->column_info($col) or + my $info = $self->result_source->column_info($col) or $self->throw_exception("No column info for $col"); return $value unless exists $info->{_filter_info}; @@ -63,7 +63,7 @@ sub get_filtered_column { my ($self, $col) = @_; $self->throw_exception("$col is not a filtered column") - unless exists $self->column_info($col)->{_filter_info}; + unless exists $self->result_source->column_info($col)->{_filter_info}; return $self->{_filtered_column}{$col} if exists $self->{_filtered_column}{$col}; @@ -140,12 +140,10 @@ sub set_filtered_column { sub update { my ($self, $data, @rest) = @_; + my $colinfos = $self->result_source->columns_info; + foreach my $col (keys %{$data||{}}) { - if ( - $self->has_column($col) - && - exists $self->column_info($col)->{_filter_info} - ) { + if ( exists $colinfos->{$col}{_filter_info} ) { $self->set_filtered_column($col, delete $data->{$col}); # FIXME update() reaches directly into the object-hash @@ -160,14 +158,16 @@ sub update { sub new { my ($class, $data, @rest) = @_; - my $source = $data->{-result_source} + + my $rsrc = $data->{-result_source} or $class->throw_exception('Sourceless rows are not supported with DBIx::Class::FilterColumn'); my $obj = $class->next::method($data, @rest); + my $colinfos = $rsrc->columns_info; + foreach my $col (keys %{$data||{}}) { - if ($obj->has_column($col) && - exists $obj->column_info($col)->{_filter_info} ) { + if (exists $colinfos->{$col}{_filter_info} ) { $obj->set_filtered_column($col, $data->{$col}); } } diff --git a/lib/DBIx/Class/InflateColumn.pm b/lib/DBIx/Class/InflateColumn.pm index e10af303d..b235a4dd5 100644 --- a/lib/DBIx/Class/InflateColumn.pm +++ b/lib/DBIx/Class/InflateColumn.pm @@ -111,7 +111,7 @@ sub _inflated_column { is_literal_value($value) #that would be a not-yet-reloaded literal update ); - my $info = $self->column_info($col) + my $info = $self->result_source->column_info($col) or $self->throw_exception("No column info for $col"); return $value unless exists $info->{_inflate_info}; @@ -133,7 +133,7 @@ sub _deflated_column { is_literal_value($value) ); - my $info = $self->column_info($col) or + my $info = $self->result_source->column_info($col) or $self->throw_exception("No column info for $col"); return $value unless exists $info->{_inflate_info}; @@ -160,7 +160,7 @@ sub get_inflated_column { my ($self, $col) = @_; $self->throw_exception("$col is not an inflated column") - unless exists $self->column_info($col)->{_inflate_info}; + unless exists $self->result_source->column_info($col)->{_inflate_info}; # we take care of keeping things in sync return $self->{_inflated_column}{$col} diff --git a/lib/DBIx/Class/InflateColumn/File.pm b/lib/DBIx/Class/InflateColumn/File.pm index aa06dbc84..195e6efb5 100644 --- a/lib/DBIx/Class/InflateColumn/File.pm +++ b/lib/DBIx/Class/InflateColumn/File.pm @@ -43,7 +43,7 @@ sub register_column { sub _file_column_file { my ($self, $column, $filename) = @_; - my $column_info = $self->column_info($column); + my $column_info = $self->result_source->column_info($column); return unless $column_info->{is_file_column}; @@ -60,8 +60,10 @@ sub _file_column_file { sub delete { my ( $self, @rest ) = @_; - for ( $self->columns ) { - if ( $self->column_info($_)->{is_file_column} ) { + my $colinfos = $self->result_source->columns_info; + + for ( keys %$colinfos ) { + if ( $colinfos->{$_}{is_file_column} ) { rmtree( [$self->_file_column_file($_)->dir], 0, 0 ); last; # if we've deleted one, we've deleted them all } @@ -75,9 +77,11 @@ sub insert { # cache our file columns so we can write them to the fs # -after- we have a PK + my $colinfos = $self->result_source->columns_info; + my %file_column; - for ( $self->columns ) { - if ( $self->column_info($_)->{is_file_column} ) { + for ( keys %$colinfos ) { + if ( $colinfos->{$_}{is_file_column} ) { $file_column{$_} = $self->$_; $self->store_column($_ => $self->$_->{filename}); } diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index e0007ffee..ab7f33cd7 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -475,7 +475,9 @@ sub related_resultset { return $self->{related_resultsets}{$rel} = do { - my $rel_info = $self->relationship_info($rel) + my $rsrc = $self->result_source; + + my $rel_info = $rsrc->relationship_info($rel) or $self->throw_exception( "No such relationship '$rel'" ); my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {}); @@ -485,8 +487,6 @@ sub related_resultset { if (@_ > 1 && (@_ % 2 == 1)); my $query = ((@_ > 1) ? {@_} : shift); - my $rsrc = $self->result_source; - # condition resolution may fail if an incomplete master-object prefetch # is encountered - that is ok during prefetch construction (not yet in_storage) my ($cond, $is_crosstable) = try { diff --git a/lib/DBIx/Class/ResultSourceProxy.pm b/lib/DBIx/Class/ResultSourceProxy.pm index db4337a95..1e1f307d3 100644 --- a/lib/DBIx/Class/ResultSourceProxy.pm +++ b/lib/DBIx/Class/ResultSourceProxy.pm @@ -81,10 +81,11 @@ for my $method_to_proxy (qw/ relationship_info has_relationship /) { - quote_sub - __PACKAGE__."::$method_to_proxy" - => "shift->result_source_instance->$method_to_proxy (\@_);" - ; + quote_sub __PACKAGE__."::$method_to_proxy", sprintf( <<'EOC', $method_to_proxy ); + DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and DBIx::Class::_Util::fail_on_internal_call; + shift->result_source_instance->%s (@_); +EOC + } 1; diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index d856ab51d..d356218f5 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -257,8 +257,12 @@ sub new { } $inflated->{$key} = $rel_obj; next; - } elsif ($class->has_column($key) - && $class->column_info($key)->{_inflate_info}) { + } + elsif ( + $rsrc->has_column($key) + and + $rsrc->column_info($key)->{_inflate_info} + ) { $inflated->{$key} = $attrs->{$key}; next; } @@ -672,7 +676,7 @@ sub get_column { } $self->throw_exception( "No such column '${column}' on " . ref $self ) - unless $self->has_column($column); + unless $self->result_source->has_column($column); return undef; } @@ -800,7 +804,7 @@ sub make_column_dirty { my ($self, $column) = @_; $self->throw_exception( "No such column '${column}' on " . ref $self ) - unless exists $self->{_column_data}{$column} || $self->has_column($column); + unless exists $self->{_column_data}{$column} || $self->result_source->has_column($column); # the entire clean/dirty code relies on exists, not on true/false return 1 if exists $self->{_dirty_columns}{$column}; @@ -842,9 +846,9 @@ See L for how to setup inflation. sub get_inflated_columns { my $self = shift; - my $loaded_colinfo = $self->columns_info ([ - grep { $self->has_column_loaded($_) } $self->columns - ]); + my $loaded_colinfo = $self->result_source->columns_info; + $self->has_column_loaded($_) or delete $loaded_colinfo->{$_} + for keys %$loaded_colinfo; my %cols_to_return = ( %{$self->{_column_data}}, %$loaded_colinfo ); @@ -887,7 +891,7 @@ sub get_inflated_columns { sub _is_column_numeric { my ($self, $column) = @_; - my $colinfo = $self->column_info ($column); + my $colinfo = $self->result_source->column_info ($column); # cache for speed (the object may *not* have a resultsource instance) if ( @@ -1018,7 +1022,7 @@ sub _eq_column_values { # value tracked between column changes and commitment to storage sub _track_storage_value { my ($self, $col) = @_; - return defined first { $col eq $_ } ($self->primary_columns); + return defined first { $col eq $_ } ($self->result_source->primary_columns); } =head2 set_columns @@ -1080,9 +1084,11 @@ See also L. sub set_inflated_columns { my ( $self, $upd ) = @_; + my $rsrc; foreach my $key (keys %$upd) { if (ref $upd->{$key}) { - my $info = $self->relationship_info($key); + $rsrc ||= $self->result_source; + my $info = $rsrc->relationship_info($key); my $acc_type = $info->{attrs}{accessor} || ''; if ($acc_type eq 'single') { @@ -1095,7 +1101,11 @@ sub set_inflated_columns { "Recursive update is not supported over relationships of type '$acc_type' ($key)" ); } - elsif ($self->has_column($key) && exists $self->column_info($key)->{_inflate_info}) { + elsif ( + $rsrc->has_column($key) + and + exists $rsrc->column_info($key)->{_inflate_info} + ) { $self->set_inflated_column($key, delete $upd->{$key}); } } @@ -1135,7 +1145,9 @@ sub copy { $changes ||= {}; my $col_data = { %{$self->{_column_data}} }; - my $colinfo = $self->columns_info([ keys %$col_data ]); + my $rsrc = $self->result_source; + + my $colinfo = $rsrc->columns_info([ keys %$col_data ]); foreach my $col (keys %$col_data) { delete $col_data->{$col} if $colinfo->{$col}{is_auto_increment}; @@ -1144,7 +1156,7 @@ sub copy { my $new = { _column_data => $col_data }; bless $new, ref $self; - $new->result_source($self->result_source); + $new->result_source($rsrc); $new->set_inflated_columns($changes); $new->insert; @@ -1153,12 +1165,12 @@ sub copy { # constraints my $rel_names_copied = {}; - foreach my $rel_name ($self->result_source->relationships) { - my $rel_info = $self->result_source->relationship_info($rel_name); + foreach my $rel_name ($rsrc->relationships) { + my $rel_info = $rsrc->relationship_info($rel_name); next unless $rel_info->{attrs}{cascade_copy}; - my $resolved = $self->result_source->_resolve_condition( + my $resolved = $rsrc->_resolve_condition( $rel_info->{cond}, $rel_name, $new, $rel_name ); @@ -1198,7 +1210,7 @@ extend this method to catch all data setting methods. sub store_column { my ($self, $column, $value) = @_; $self->throw_exception( "No such column '${column}' on " . ref $self ) - unless exists $self->{_column_data}{$column} || $self->has_column($column); + unless exists $self->{_column_data}{$column} || $self->result_source->has_column($column); $self->throw_exception( "set_column called for ${column} without value" ) if @_ < 3; return $self->{_column_data}{$column} = $value; From 350e8d57bf21e4006e2a5e5c26648cb5ca4903ea Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 21 Jul 2014 18:45:06 +0200 Subject: [PATCH 109/548] Re-fix relcond resolver: revert 5592d633 (in turn partial revert of 03f6d1f7) I had the right hunch during 03f6d1f7 but could not substantiate it: the issue is that the custom coderef expects objects or nothing. Yet here and there internals pass around bare hashes of data (or sometimes even undef). Asking users to complicate their coderefs further is just not an option - it is mindbending enough as it is. So the only way to go forward is indeed to create "synthetic result objects" and pass them down the stack. This time however there is a twist - after the overhaul in 4006691d we now *can* indeed construct such objects on top of the bare DBIx::Class::Core - in other words mission fucking accomplished. This commit *may* need to be reverted in case it turns out that 4006691d is a no-go (check the test change to t/inflate/datetime_oracle.t in 12b348d9 for an example of what was taken for granted wrt direct $class-> calls) If this is the case - not all is lost. We should be able to use a hidden class with an actual source instance that we would ammend on the fly... But let's hope we will never get to this bridge :( --- lib/DBIx/Class/ResultSource.pm | 44 ++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 6a5bbc9b7..cc865b4eb 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1786,24 +1786,46 @@ sub _resolve_relationship_condition { if !defined $args->{$_} or length ref $args->{$_}; } - my $rel_info = $self->relationship_info($args->{rel_name}); - # or $self->throw_exception( "No such relationship '$args->{rel_name}'" ); - my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'"; + my $rel_info = $self->relationship_info($args->{rel_name}); + # or $self->throw_exception( "No such $exception_rel_id" ); + $self->throw_exception("No practical way to resolve $exception_rel_id between two objects") if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; + $self->throw_exception( "Argument to infer_values_based_on must be a hash" ) + if exists $args->{infer_values_based_on} and ref $args->{infer_values_based_on} ne 'HASH'; - $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) - if defined blessed $args->{foreign_resultobj} and ! $args->{foreign_resultobj}->isa($rel_info->{class}); + $args->{require_join_free_condition} ||= !!$args->{infer_values_based_on}; $args->{condition} ||= $rel_info->{cond}; - $self->throw_exception( "Argument to infer_values_based_on must be a hash" ) - if exists $args->{infer_values_based_on} and ref $args->{infer_values_based_on} ne 'HASH'; + if (exists $args->{self_resultobj}) { + if (defined blessed $args->{self_resultobj}) { +# $self->throw_exception( "Object '$args->{self_resultobj}' must be of class '@{[ $self->result_class ]}'" ) +# unless $args->{self_resultobj}->isa($self->result_class); + } + else { + $args->{self_resultobj} = DBIx::Class::Core->new({ + -result_source => $self, + %{ $args->{self_resultobj}||{} } + }); + } + } - $args->{require_join_free_condition} ||= !!$args->{infer_values_based_on}; + if (exists $args->{foreign_resultobj}) { + if (defined blessed $args->{foreign_resultobj}) { +# $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) +# unless $args->{foreign_resultobj}->isa($rel_info->{class}); + } + else { + $args->{foreign_resultobj} = DBIx::Class::Core->new({ + -result_source => $self->related_source($args->{rel_name}), + %{ $args->{foreign_resultobj}||{} } + }); + } + } my $ret; @@ -1900,11 +1922,7 @@ sub _resolve_relationship_condition { for my $i (0..$#$obj_cols) { - # FIXME - temp shim - if (! blessed $obj) { - $ret->{join_free_condition}{"$plain_alias.$plain_cols->[$i]"} = $obj->{$obj_cols->[$i]}; - } - elsif ( + if ( defined $args->{self_resultobj} and ! $obj->has_column_loaded($obj_cols->[$i]) From c19ca6e801814466346f4b0f3793303561438312 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 23 Jul 2014 20:56:54 +0200 Subject: [PATCH 110/548] One more output for the resolver - used in next commit Note that the column equivalency map is calculated for both types of rels, which is crucial down the road --- lib/DBIx/Class/ResultSource.pm | 52 ++++++++++++++++++++++++++++++---- 1 file changed, 46 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index cc865b4eb..c3a318ff3 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -10,6 +10,7 @@ use DBIx::Class::ResultSourceHandle; use DBIx::Class::Carp; use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; +use SQL::Abstract 'is_literal_value'; use Devel::GlobalDestruction; use Try::Tiny; use List::Util 'first'; @@ -1773,8 +1774,9 @@ Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); # ## returns a hash # condition -# join_free_condition (maybe undef) -# inferred_values (maybe undef, always complete or empty) +# identity_map +# join_free_condition (maybe unset) +# inferred_values (always either complete or unset) # sub _resolve_relationship_condition { my $self = shift; @@ -1907,10 +1909,10 @@ sub _resolve_relationship_condition { push @l_cols, $lc; } - # construct the crosstable condition - $ret->{condition} = { map - {( "$args->{foreign_alias}.$f_cols[$_]" => { -ident => "$args->{self_alias}.$l_cols[$_]" } )} - (0..$#f_cols) + # construct the crosstable condition and the identity map + for (0..$#f_cols) { + $ret->{condition}{"$args->{foreign_alias}.$f_cols[$_]"} = { -ident => "$args->{self_alias}.$l_cols[$_]" }; + $ret->{identity_map}{$l_cols[$_]} = $f_cols[$_]; }; if (exists $args->{self_resultobj} or exists $args->{foreign_resultobj}) { @@ -2026,6 +2028,44 @@ sub _resolve_relationship_condition { for keys %{$args->{infer_values_based_on}}; } + # add the identities based on the main condition + # (may already be there, since easy to calculate on the fly in the HASH case) + if ( ! $ret->{identity_map} ) { + + my $col_eqs = $self->schema->storage->_extract_fixed_condition_columns($ret->{condition}); + + my $colinfos; + for my $lhs (keys %$col_eqs) { + + next if $col_eqs->{$lhs} eq UNRESOLVABLE_CONDITION; + my ($rhs) = @{ is_literal_value( $ret->{condition}{$lhs} ) || next }; + + # there is no way to know who is right and who is left + # therefore the ugly scan below + $colinfos ||= $self->schema->storage->_resolve_column_info([ + { -alias => $args->{self_alias}, -rsrc => $self }, + { -alias => $args->{foreign_alias}, -rsrc => $self->related_source($args->{rel_name}) }, + ]); + + my ($l_col, $l_alias, $r_col, $r_alias) = map { + ( reverse $_ =~ / ^ (?: ([^\.]+) $ | ([^\.]+) \. (.+) ) /x )[0,1] + } ($lhs, $rhs); + + if ( + $colinfos->{$l_col} + and + $colinfos->{$r_col} + and + $colinfos->{$l_col}{-source_alias} ne $colinfos->{$r_col}{-source_alias} + ) { + ( $colinfos->{$l_col}{-source_alias} eq $args->{self_alias} ) + ? ( $ret->{identity_map}{$l_col} = $r_col ) + : ( $ret->{identity_map}{$r_col} = $l_col ) + ; + } + } + } + $ret } From d0cefd99a98e7fb2304fe6a5182d321fe7c551fc Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 10 Dec 2013 09:37:32 +0100 Subject: [PATCH 111/548] Entirely and utterly rewrite populate(), fixing the variable hash issue Yes, it took ~3 years to properly fix it. The changeset size alone should make it pretty clear why this happened, but this is not the entire story. At first the bug was deemed fixed back in a9bac98f. Due to miscommunication and lack of tests this issue did not come up again until last winter (the devs thought they nailed it, the users thought it's a wontfix). Then when the actual tests got written it became clear that... words fail me. In short - some folks had the bright idea that a fast-path insert got to be able to understand *MULTI-CREATE* semantics. There was a great deal of tests that did all the wrong things from a conceptual point of view but they were passing tests nonetheless. So the thing got tabled again... In the meantime a recent flurry of improvements to the relcond resolver took place, mainly centered around fixing the custom relconds call modes. A side effect was uncovering that populate() is invoking relationship cond resolution in an... insane for the lack of a better word way. Some shims were put in place and the only remaining bit were warnings, however with the improvements available it in fact possible to cleanly implement "subquery-based multicreate". So instead of punting - the entire codeflow of populate was reworked with the new toys in mind. The data mangler _normalize_populate_args is gone for good (no more mindless copy of passed in data). The amount of fallbacks to create() is reduced and each instance now properly warns of the problem. All in all this is another one of "if something changed - I fucked up" changes. As an added benefit - we now have a foothold into validating subquery-based multicreate implementations - soon (one would hope) the code will migrate to wider use within ::ResultSet. A notable part of this commit is the *undocumented* implementing part of the Swindon consensus - the is_depends_on flag is now added to all belongs_to relationship attrs. The actual implementation of the rest is subject to another battle. Now on to 0.082800 --- Changes | 2 + lib/DBIx/Class/Relationship/BelongsTo.pm | 1 + lib/DBIx/Class/ResultSet.pm | 311 ++++++++++++++++------- t/100populate.t | 90 ++++++- t/101populate_rs.t | 23 +- t/multi_create/standard.t | 10 +- 6 files changed, 337 insertions(+), 100 deletions(-) diff --git a/Changes b/Changes index d69a07122..6dcba2633 100644 --- a/Changes +++ b/Changes @@ -25,6 +25,8 @@ Revision history for DBIx::Class resultsets with no rows - Fix incorrect handling of custom relationship conditions returning SQLA literal expressions + - Fix long standing bug with populate() getting confused by hashrefs + with different sets of keys: http://is.gd/2011_dbic_populate_gotcha - Fix multi-value literal populate not working with simplified bind specifications - Massively improve the implied resultset condition parsing - now all diff --git a/lib/DBIx/Class/Relationship/BelongsTo.pm b/lib/DBIx/Class/Relationship/BelongsTo.pm index b594d3abe..4b1577c4f 100644 --- a/lib/DBIx/Class/Relationship/BelongsTo.pm +++ b/lib/DBIx/Class/Relationship/BelongsTo.pm @@ -89,6 +89,7 @@ sub belongs_to { $class->add_relationship($rel, $f_class, $cond, { + is_depends_on => 1, accessor => $acc_type, $fk_columns ? ( fk_columns => $fk_columns ) : (), %{$attrs || {}} diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 02625379b..c6b725b66 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -2234,127 +2234,266 @@ case there are obviously no benefits to using this method over L. sub populate { my $self = shift; - # cruft placed in standalone method - my $data = $self->_normalize_populate_args(@_); + my ($data, $guard); - return unless @$data; + # this is naive and just a quick check + # the types will need to be checked more thoroughly when the + # multi-source populate gets added + if (ref $_[0] eq 'ARRAY') { + return unless @{$_[0]}; - if(defined wantarray) { - my @created = map { $self->new_result($_)->insert } @$data; - return wantarray ? @created : \@created; + $data = $_[0] if (ref $_[0][0] eq 'HASH' or ref $_[0][0] eq 'ARRAY'); } - else { - my $first = $data->[0]; - # if a column is a registered relationship, and is a non-blessed hash/array, consider - # it relationship data - my (@rels, @columns); - my $rsrc = $self->result_source; - my $rels = { map { $_ => $rsrc->relationship_info($_) } $rsrc->relationships }; - for (keys %$first) { - my $ref = ref $first->{$_}; - $rels->{$_} && ($ref eq 'ARRAY' or $ref eq 'HASH') - ? push @rels, $_ - : push @columns, $_ + $self->throw_exception('Populate expects an arrayref of hashrefs or arrayref of arrayrefs') + unless $data; + + # FIXME - no cref handling + # At this point assume either hashes or arrays + + if(defined wantarray) { + my @results; + + $guard = $self->result_source->schema->storage->txn_scope_guard + if ( @$data > 2 or ( @$data == 2 and ref $data->[0] eq 'ARRAY' ) ); + + if (ref $data->[0] eq 'ARRAY') { + @results = map + { my $vals = $_; $self->new_result({ map { $data->[0][$_] => $vals->[$_] } 0..$#{$data->[0]} })->insert } + @{$data}[1 .. $#$data] ; } + else { + @results = map { $self->new_result($_)->insert } @$data; + } + + $guard->commit if $guard; + return wantarray ? @results : \@results; + } + + # we have to deal with *possibly incomplete* related data + # this means we have to walk the data structure twice + # whether we want this or not + # jnap, I hate you ;) + my $rsrc = $self->result_source; + my $rel_info = { map { $_ => $rsrc->relationship_info($_) } $rsrc->relationships }; + + my ($colinfo, $colnames, $slices_with_rels); + my $data_start = 0; + + DATA_SLICE: + for my $i (0 .. $#$data) { + + my $current_slice_seen_rel_infos; - my @pks = $rsrc->primary_columns; +### Determine/Supplement collists +### BEWARE - This is a hot piece of code, a lot of weird idioms were used + if( ref $data->[$i] eq 'ARRAY' ) { - ## do the belongs_to relationships - foreach my $index (0..$#$data) { + # positional(!) explicit column list + if ($i == 0) { - # delegate to create() for any dataset without primary keys with specified relationships - if (grep { !defined $data->[$index]->{$_} } @pks ) { - for my $r (@rels) { - if (grep { ref $data->[$index]{$r} eq $_ } qw/HASH ARRAY/) { # a related set must be a HASH or AoH - my @ret = $self->populate($data); - return; + $colinfo->{$data->[0][$_]} = { pos => $_, name => $data->[0][$_] } and push @$colnames, $data->[0][$_] + for 0 .. $#{$data->[0]}; + + $data_start = 1; + + next DATA_SLICE; + } + else { + for (values %$colinfo) { + if ($_->{is_rel} ||= ( + $rel_info->{$_->{name}} + and + ( + ref $data->[$i][$_->{pos}] eq 'ARRAY' + or + ref $data->[$i][$_->{pos}] eq 'HASH' + or + ( defined blessed $data->[$i][$_->{pos}] and $data->[$i][$_->{pos}]->isa('DBIx::Class::Row') ) + ) + and + 1 + )) { + + # moar sanity check... sigh + for ( ref $data->[$i][$_->{pos}] eq 'ARRAY' ? @{$data->[$i][$_->{pos}]} : $data->[$i][$_->{pos}] ) { + if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) { + carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()"); + return my $throwaway = $self->populate(@_); + } + } + + push @$current_slice_seen_rel_infos, $rel_info->{$_->{name}}; } } } - foreach my $rel (@rels) { - next unless ref $data->[$index]->{$rel} eq "HASH"; - my $result = $self->related_resultset($rel)->new_result($data->[$index]->{$rel})->insert; - my (undef, $reverse_relinfo) = %{$rsrc->reverse_relationship_info($rel)}; - my $related = $result->result_source->_resolve_condition( - $reverse_relinfo->{cond}, - $self, - $result, - $rel, - ); - - delete $data->[$index]->{$rel}; - $data->[$index] = {%{$data->[$index]}, %$related}; - - push @columns, keys %$related if $index == 0; + if ($current_slice_seen_rel_infos) { + push @$slices_with_rels, { map { $colnames->[$_] => $data->[$i][$_] } 0 .. $#$colnames }; + + # this is needed further down to decide whether or not to fallback to create() + $colinfo->{$colnames->[$_]}{seen_null} ||= ! defined $data->[$i][$_] + for 0 .. $#$colnames; } } + elsif( ref $data->[$i] eq 'HASH' ) { - ## inherit the data locked in the conditions of the resultset - my ($rs_data) = $self->_merge_with_rscond({}); - delete @{$rs_data}{@columns}; - - ## do bulk insert on current row - $rsrc->storage->insert_bulk( - $rsrc, - [@columns, keys %$rs_data], - [ map { [ @$_{@columns}, values %$rs_data ] } @$data ], - ); + for ( sort keys %{$data->[$i]} ) { - ## do the has_many relationships - foreach my $item (@$data) { + $colinfo->{$_} ||= do { - my $main_row; + $self->throw_exception("Column '$_' must be present in supplied explicit column list") + if $data_start; # it will be 0 on AoH, 1 on AoA - foreach my $rel (@rels) { - next unless ref $item->{$rel} eq "ARRAY" && @{ $item->{$rel} }; + push @$colnames, $_; - $main_row ||= $self->new_result({map { $_ => $item->{$_} } @pks}); + # RV + { pos => $#$colnames, name => $_ } + }; - my $child = $main_row->$rel; + if ($colinfo->{$_}{is_rel} ||= ( + $rel_info->{$_} + and + ( + ref $data->[$i]{$_} eq 'ARRAY' + or + ref $data->[$i]{$_} eq 'HASH' + or + ( defined blessed $data->[$i]{$_} and $data->[$i]{$_}->isa('DBIx::Class::Row') ) + ) + and + 1 + )) { + + # moar sanity check... sigh + for ( ref $data->[$i]{$_} eq 'ARRAY' ? @{$data->[$i]{$_}} : $data->[$i]{$_} ) { + if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) { + carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()"); + return my $throwaway = $self->populate(@_); + } + } - my $related = $child->result_source->_resolve_condition( - $rels->{$rel}{cond}, - $child, - $main_row, - $rel, - ); + push @$current_slice_seen_rel_infos, $rel_info->{$_}; + } + } - my @rows_to_add = ref $item->{$rel} eq 'ARRAY' ? @{$item->{$rel}} : ($item->{$rel}); - my @populate = map { {%$_, %$related} } @rows_to_add; + if ($current_slice_seen_rel_infos) { + push @$slices_with_rels, $data->[$i]; - $child->populate( \@populate ); + # this is needed further down to decide whether or not to fallback to create() + $colinfo->{$_}{seen_null} ||= ! defined $data->[$i]{$_} + for keys %{$data->[$i]}; } } + else { + $self->throw_exception('Unexpected populate() data structure member type: ' . ref $data->[$i] ); + } + + if ( grep + { $_->{attrs}{is_depends_on} } + @{ $current_slice_seen_rel_infos || [] } + ) { + carp_unique("Fast-path populate() of belongs_to relationship data is not possible - falling back to regular create()"); + return my $throwaway = $self->populate(@_); + } } -} + if( $slices_with_rels ) { -# populate() arguments went over several incarnations -# What we ultimately support is AoH -sub _normalize_populate_args { - my ($self, $arg) = @_; + # need to exclude the rel "columns" + $colnames = [ grep { ! $colinfo->{$_}{is_rel} } @$colnames ]; - if (ref $arg eq 'ARRAY') { - if (!@$arg) { - return []; - } - elsif (ref $arg->[0] eq 'HASH') { - return $arg; + # extra sanity check - ensure the main source is in fact identifiable + # the localizing of nullability is insane, but oh well... the use-case is legit + my $ci = $rsrc->columns_info($colnames); + + $ci->{$_} = { %{$ci->{$_}}, is_nullable => 0 } + for grep { ! $colinfo->{$_}{seen_null} } keys %$ci; + + unless( $rsrc->_identifying_column_set($ci) ) { + carp_unique("Fast-path populate() of non-uniquely identifiable rows with related data is not possible - falling back to regular create()"); + return my $throwaway = $self->populate(@_); } - elsif (ref $arg->[0] eq 'ARRAY') { - my @ret; - my @colnames = @{$arg->[0]}; - foreach my $values (@{$arg}[1 .. $#$arg]) { - push @ret, { map { $colnames[$_] => $values->[$_] } (0 .. $#colnames) }; + } + +### inherit the data locked in the conditions of the resultset + my ($rs_data) = $self->_merge_with_rscond({}); + delete @{$rs_data}{@$colnames}; # passed-in stuff takes precedence + + # if anything left - decompose rs_data + my $rs_data_vals; + if (keys %$rs_data) { + push @$rs_data_vals, $rs_data->{$_} + for sort keys %$rs_data; + } + +### start work + $guard = $rsrc->schema->storage->txn_scope_guard + if $slices_with_rels; + +### main source data + # FIXME - need to switch entirely to a coderef-based thing, + # so that large sets aren't copied several times... I think + $rsrc->storage->insert_bulk( + $rsrc, + [ @$colnames, sort keys %$rs_data ], + [ map { + ref $data->[$_] eq 'ARRAY' + ? ( + $slices_with_rels ? [ @{$data->[$_]}[0..$#$colnames], @{$rs_data_vals||[]} ] # the collist changed + : $rs_data_vals ? [ @{$data->[$_]}, @$rs_data_vals ] + : $data->[$_] + ) + : [ @{$data->[$_]}{@$colnames}, @{$rs_data_vals||[]} ] + } $data_start .. $#$data ], + ); + +### do the children relationships + if ( $slices_with_rels ) { + my @rels = grep { $colinfo->{$_}{is_rel} } keys %$colinfo + or die 'wtf... please report a bug with DBIC_TRACE=1 output (stacktrace)'; + + for my $sl (@$slices_with_rels) { + + my ($main_proto, $main_proto_rs); + for my $rel (@rels) { + next unless defined $sl->{$rel}; + + $main_proto ||= { + %$rs_data, + (map { $_ => $sl->{$_} } @$colnames), + }; + + unless (defined $colinfo->{$rel}{rs}) { + + $colinfo->{$rel}{rs} = $rsrc->related_source($rel)->resultset; + + $colinfo->{$rel}{fk_map} = { reverse %{ $rsrc->_resolve_relationship_condition( + rel_name => $rel, + self_alias => "\xFE", # irrelevant + foreign_alias => "\xFF", # irrelevant + )->{identity_map} || {} } }; + + } + + $colinfo->{$rel}{rs}->search({ map # only so that we inherit them values properly, no actual search + { + $_ => { '=' => + ( $main_proto_rs ||= $rsrc->resultset->search($main_proto) ) + ->get_column( $colinfo->{$rel}{fk_map}{$_} ) + ->as_query + } + } + keys %{$colinfo->{$rel}{fk_map}} + })->populate( ref $sl->{$rel} eq 'ARRAY' ? $sl->{$rel} : [ $sl->{$rel} ] ); + + 1; } - return \@ret; } } - $self->throw_exception('Populate expects an arrayref of hashrefs or arrayref of arrayrefs'); + $guard->commit if $guard; } =head2 pager diff --git a/t/100populate.t b/t/100populate.t index 4a3f0ac7a..16c1e6db4 100644 --- a/t/100populate.t +++ b/t/100populate.t @@ -88,6 +88,15 @@ is($link4->id, 4, 'Link 4 id'); is($link4->url, undef, 'Link 4 url'); is($link4->title, 'dtitle', 'Link 4 title'); +## variable size dataset +@links = $schema->populate('Link', [ +[ qw/id title url/ ], +[ 41 ], +[ 42, undef, 'url42' ], +]); +is(scalar @links, 2); +is($links[0]->url, undef); +is($links[1]->url, 'url42'); ## make sure populate -> insert_bulk honors fields/orders in void context ## schema order @@ -120,6 +129,63 @@ is($link7->id, 7, 'Link 7 id'); is($link7->url, undef, 'Link 7 url'); is($link7->title, 'gtitle', 'Link 7 title'); +## variable size dataset in void ctx +$schema->populate('Link', [ +[ qw/id title url/ ], +[ 71 ], +[ 72, undef, 'url72' ], +]); +@links = $schema->resultset('Link')->search({ id => [71, 72]}, { order_by => 'id' })->all; +is(scalar @links, 2); +is($links[0]->url, undef); +is($links[1]->url, 'url72'); + +## variable size dataset in void ctx, hash version +$schema->populate('Link', [ + { id => 73 }, + { id => 74, title => 't74' }, + { id => 75, url => 'u75' }, +]); +@links = $schema->resultset('Link')->search({ id => [73..75]}, { order_by => 'id' })->all; +is(scalar @links, 3); +is($links[0]->url, undef); +is($links[0]->title, undef); +is($links[1]->url, undef); +is($links[1]->title, 't74'); +is($links[2]->url, 'u75'); +is($links[2]->title, undef); + +## Make sure the void ctx trace is sane +{ + for ( + [ + [ qw/id title url/ ], + [ 81 ], + [ 82, 't82' ], + [ 83, undef, 'url83' ], + ], + [ + { id => 91 }, + { id => 92, title => 't92' }, + { id => 93, url => 'url93' }, + ] + ) { + $schema->is_executed_sql_bind( + sub { + $schema->populate('Link', $_); + }, + [ + [ 'BEGIN' ], + [ + 'INSERT INTO link( id, title, url ) VALUES( ?, ?, ? )', + "__BULK_INSERT__" + ], + [ 'COMMIT' ], + ] + ); + } +} + # populate with literals { my $rs = $schema->resultset('Link'); @@ -419,7 +485,8 @@ warnings_like { : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 2 ], 'Data integrity warnings as planned'; -lives_ok { +$schema->is_executed_sql_bind( + sub { $schema->resultset('TwoKeys')->populate([{ artist => 1, cd => 5, @@ -437,7 +504,26 @@ lives_ok { autopilot => 'b', }] }]) -} 'multicol-PK has_many populate works'; + }, + [ + [ 'BEGIN' ], + [ 'INSERT INTO twokeys ( artist, cd) + VALUES ( ?, ? )', + '__BULK_INSERT__' + ], + [ 'INSERT INTO fourkeys_to_twokeys ( autopilot, f_bar, f_foo, f_goodbye, f_hello, t_artist, t_cd) + VALUES ( + ?, ?, ?, ?, ?, + ( SELECT me.artist FROM twokeys me WHERE artist = ? AND cd = ? ), + ( SELECT me.cd FROM twokeys me WHERE artist = ? AND cd = ? ) + ) + ', + '__BULK_INSERT__' + ], + [ 'COMMIT' ], + ], + 'multicol-PK has_many populate expected trace' +); lives_ok ( sub { $schema->populate('CD', [ diff --git a/t/101populate_rs.t b/t/101populate_rs.t index 8a0eea431..a592e56eb 100644 --- a/t/101populate_rs.t +++ b/t/101populate_rs.t @@ -12,6 +12,7 @@ use strict; use warnings; use Test::More; +use Test::Warn; use lib qw(t/lib); use DBICTest; @@ -37,10 +38,10 @@ ok( $cd_rs, 'Got Good CD Resultset'); SCHEMA_POPULATE1: { - ## Test to make sure that the old $schema->populate is using the new method - ## for $resultset->populate when in void context and with sub objects. + # throw a monkey wrench + my $post_jnap_monkeywrench = $schema->resultset('Artist')->find(1)->update({ name => undef }); - $schema->populate('Artist', [ + warnings_exist { $schema->populate('Artist', [ [qw/name cds/], ["001First Artist", [ @@ -55,13 +56,13 @@ SCHEMA_POPULATE1: { [undef, [ {title=>"004Title1", year=>2010} ]], - ]); + ]) } qr/\QFast-path populate() of non-uniquely identifiable rows with related data is not possible/; isa_ok $schema, 'DBIx::Class::Schema'; - my ($undef, $artist1, $artist2, $artist3 ) = $schema->resultset('Artist')->search({ + my ( $preexisting_undef, $artist1, $artist2, $artist3, $undef ) = $schema->resultset('Artist')->search({ name=>["001First Artist","002Second Artist","003Third Artist", undef]}, - {order_by=>'name ASC'})->all; + {order_by => { -asc => 'artistid' }})->all; isa_ok $artist1, 'DBICTest::Artist'; isa_ok $artist2, 'DBICTest::Artist'; @@ -78,6 +79,8 @@ SCHEMA_POPULATE1: { ok $artist3->cds->count eq 1, "Got Right number of CDs for Artist3"; ok $undef->cds->count eq 1, "Got Right number of CDs for Artist4"; + $post_jnap_monkeywrench->delete; + ARTIST1CDS: { my ($cd1, $cd2, $cd3) = $artist1->cds->search(undef, {order_by=>'year ASC'}); @@ -475,7 +478,9 @@ VOID_CONTEXT: { }, ]; - $cd_rs->populate($cds); + warnings_exist { + $cd_rs->populate($cds) + } qr/\QFast-path populate() of belongs_to relationship data is not possible/; my ($cdA, $cdB) = $cd_rs->search( {title=>[sort map {$_->{title}} @$cds]}, @@ -515,7 +520,9 @@ VOID_CONTEXT: { }, ]; - $cd_rs->populate($cds); + warnings_exist { + $cd_rs->populate($cds); + } qr/\QFast-path populate() of belongs_to relationship data is not possible/; my ($cdA, $cdB, $cdC) = $cd_rs->search( {title=>[sort map {$_->{title}} @$cds]}, diff --git a/t/multi_create/standard.t b/t/multi_create/standard.t index 6c1efd816..54cf04ee3 100644 --- a/t/multi_create/standard.t +++ b/t/multi_create/standard.t @@ -3,11 +3,10 @@ use warnings; use Test::More; use Test::Exception; +use Test::Warn; use lib qw(t/lib); use DBICTest; -plan tests => 91; - my $schema = DBICTest->init_schema(); lives_ok ( sub { @@ -403,8 +402,11 @@ lives_ok ( sub { $kurt_cobain->{cds} = [ $in_utero ]; + warnings_exist { + $schema->resultset('Artist')->populate([ $kurt_cobain ]); + } qr/\QFast-path populate() with supplied related objects is not possible/; + - $schema->resultset('Artist')->populate([ $kurt_cobain ]); # %) my $artist = $schema->resultset('Artist')->find({name => 'Kurt Cobain'}); is($artist->name, 'Kurt Cobain', 'Artist insertion ok'); @@ -468,4 +470,4 @@ lives_ok ( sub { is ($m2m_cd->first->producers->first->name, 'Cowboy Neal', 'Correct producer row created'); }, 'Test multi create over many_to_many'); -1; +done_testing; From 7cb7914bc08e12acaea711c94c8f547926b8f2b3 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 23 Jul 2014 22:24:27 +0200 Subject: [PATCH 112/548] Now that populate is rewritten: finalize the resolver fatal sanity checks --- lib/DBIx/Class/ResultSource.pm | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index c3a318ff3..72edcbd40 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1712,9 +1712,6 @@ sub _resolve_condition { } } - $self->throw_exception('No practical way to resolve a relationship between two structures') - if $is_objlike[0] and $is_objlike[1]; - my $args = { condition => $cond, @@ -1788,12 +1785,15 @@ sub _resolve_relationship_condition { if !defined $args->{$_} or length ref $args->{$_}; } + $self->throw_exception("Arguments 'self_alias' and 'foreign_alias' may not be identical") + if $args->{self_alias} eq $args->{foreign_alias}; + my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'"; - my $rel_info = $self->relationship_info($args->{rel_name}); - # or $self->throw_exception( "No such $exception_rel_id" ); + my $rel_info = $self->relationship_info($args->{rel_name}) + or $self->throw_exception( "No such $exception_rel_id" ); - $self->throw_exception("No practical way to resolve $exception_rel_id between two objects") + $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures") if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; $self->throw_exception( "Argument to infer_values_based_on must be a hash" ) @@ -1805,8 +1805,8 @@ sub _resolve_relationship_condition { if (exists $args->{self_resultobj}) { if (defined blessed $args->{self_resultobj}) { -# $self->throw_exception( "Object '$args->{self_resultobj}' must be of class '@{[ $self->result_class ]}'" ) -# unless $args->{self_resultobj}->isa($self->result_class); + $self->throw_exception( "Object '$args->{self_resultobj}' must be of class '@{[ $self->result_class ]}'" ) + unless $args->{self_resultobj}->isa($self->result_class); } else { $args->{self_resultobj} = DBIx::Class::Core->new({ @@ -1818,8 +1818,8 @@ sub _resolve_relationship_condition { if (exists $args->{foreign_resultobj}) { if (defined blessed $args->{foreign_resultobj}) { -# $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) -# unless $args->{foreign_resultobj}->isa($rel_info->{class}); + $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) + unless $args->{foreign_resultobj}->isa($rel_info->{class}); } else { $args->{foreign_resultobj} = DBIx::Class::Core->new({ From 98def3efbed614ff1514c79b9da7e03b5ceb06c0 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 24 Jul 2014 01:29:46 +0200 Subject: [PATCH 113/548] Change once more the signature of the custom cond coderef (augment a446d7f8) Since there hasn't yet been a stable release with the new naming, and given I've already mistyped the long-form twice, might as well rename things and stay consistent. --- Changes | 4 +- lib/DBIx/Class/Relationship/Base.pm | 26 +++++------ lib/DBIx/Class/ResultSource.pm | 54 +++++++++++----------- t/lib/DBICTest/Schema/Artist.pm | 16 +++---- t/lib/DBICTest/Schema/Artwork.pm | 4 +- t/lib/DBICTest/Schema/Artwork_to_Artist.pm | 4 +- t/lib/DBICTest/Schema/Track.pm | 14 +++--- t/lib/DBICTest/Util.pm | 14 +++--- 8 files changed, 68 insertions(+), 68 deletions(-) diff --git a/Changes b/Changes index 6dcba2633..b05ea3d84 100644 --- a/Changes +++ b/Changes @@ -7,9 +7,9 @@ Revision history for DBIx::Class returned from storage - Custom condition relationships are now invoked with a slightly different signature (existing coderefs will continue to work) - - Add extra custom condition coderef attribute 'foreign_resultobj' + - Add extra custom condition coderef attribute 'foreign_result_object' to allow for proper reverse-relationship emulation - (i.e. $result->set_from_related($custom_cond, $foreign_resultobj) + (i.e. $result->set_from_related($custom_rel, $foreign_result_object) - When in a transaction, DBIC::Ordered now seamlesly handles result objects that went out of sync with the storage (RT#96499) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index ab7f33cd7..42d09553d 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -184,7 +184,7 @@ While every coderef-based condition must return a valid C clause, it may elect to additionally return a simplified join-free condition hashref when invoked as C<< $result->relationship >>, as opposed to C<< $rs->related_resultset('relationship') >>. In this case C<$result> is -passed to the coderef as C<< $args->{self_resultobj} >>, so a user can do the +passed to the coderef as C<< $args->{self_result_object} >>, so a user can do the following: sub { @@ -195,8 +195,8 @@ following: "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, + $args->{self_result_object} && { + "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, ); @@ -233,20 +233,20 @@ clause, the C<$args> hashref passed to the subroutine contains some extra metadata. Currently the supplied coderef is executed as: $relationship_info->{cond}->({ - self_resultsource => The resultsource instance on which rel_name is registered - rel_name => The relationship name (does *NOT* always match foreign_alias) + self_resultsource => The resultsource instance on which rel_name is registered + rel_name => The relationship name (does *NOT* always match foreign_alias) - self_alias => The alias of the invoking resultset - foreign_alias => The alias of the to-be-joined resultset (does *NOT* always match rel_name) + self_alias => The alias of the invoking resultset + foreign_alias => The alias of the to-be-joined resultset (does *NOT* always match rel_name) # only one of these (or none at all) will ever be supplied to aid in the # construction of a join-free condition - self_resultobj => The invocant object itself in case of a $resultobj->$rel_name() call - foreign_resultobj => The related object in case of $resultobj->set_from_related($rel_name, $foreign_resultobj) + self_result_object => The invocant object itself in case of a $result_object->$rel_name( ... ) call + foreign_result_object => The related object in case of $result_object->set_from_related( $rel_name, $foreign_result_object ) # deprecated inconsistent names, will be forever available for legacy code - self_rowobj => Old deprecated slot for self_resultobj - foreign_relname => Old deprecated slot for rel_name + self_rowobj => Old deprecated slot for self_result_object + foreign_relname => Old deprecated slot for rel_name }); =head3 attributes @@ -636,7 +636,7 @@ sub new_related { return $self->search_related($rel)->new_result( $self->result_source->_resolve_relationship_condition ( infer_values_based_on => $data, rel_name => $rel, - self_resultobj => $self, + self_result_object => $self, foreign_alias => $rel, self_alias => 'me', )->{inferred_values} ); @@ -784,7 +784,7 @@ sub set_from_related { $self->set_columns( $self->result_source->_resolve_relationship_condition ( infer_values_based_on => {}, rel_name => $rel, - foreign_resultobj => $f_obj, + foreign_result_object => $f_obj, foreign_alias => $rel, self_alias => 'me', )->{inferred_values} ); diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 72edcbd40..4f04be8c9 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1717,8 +1717,8 @@ sub _resolve_condition { # where-is-waldo block guesses relname, then further down we override it if available ( - $is_objlike[1] ? ( rel_name => $res_args[0], self_alias => $res_args[0], foreign_alias => 'me', self_resultobj => $res_args[1] ) - : $is_objlike[0] ? ( rel_name => $res_args[1], self_alias => 'me', foreign_alias => $res_args[1], foreign_resultobj => $res_args[0] ) + $is_objlike[1] ? ( rel_name => $res_args[0], self_alias => $res_args[0], foreign_alias => 'me', self_result_object => $res_args[1] ) + : $is_objlike[0] ? ( rel_name => $res_args[1], self_alias => 'me', foreign_alias => $res_args[1], foreign_result_object => $res_args[0] ) : ( rel_name => $res_args[0], self_alias => $res_args[1], foreign_alias => $res_args[0] ) ), @@ -1762,9 +1762,9 @@ Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); ## self-explanatory API, modeled on the custom cond coderef: # rel_name # foreign_alias -# foreign_resultobj +# foreign_result_object # self_alias -# self_resultobj +# self_result_object # require_join_free_condition # infer_values_based_on (optional, mandatory hashref argument) # condition (optional, derived from $self->rel_info(rel_name)) @@ -1794,7 +1794,7 @@ sub _resolve_relationship_condition { or $self->throw_exception( "No such $exception_rel_id" ); $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures") - if defined $args->{self_resultobj} and defined $args->{foreign_resultobj}; + if defined $args->{self_result_object} and defined $args->{foreign_result_object}; $self->throw_exception( "Argument to infer_values_based_on must be a hash" ) if exists $args->{infer_values_based_on} and ref $args->{infer_values_based_on} ne 'HASH'; @@ -1803,28 +1803,28 @@ sub _resolve_relationship_condition { $args->{condition} ||= $rel_info->{cond}; - if (exists $args->{self_resultobj}) { - if (defined blessed $args->{self_resultobj}) { - $self->throw_exception( "Object '$args->{self_resultobj}' must be of class '@{[ $self->result_class ]}'" ) - unless $args->{self_resultobj}->isa($self->result_class); + if (exists $args->{self_result_object}) { + if (defined blessed $args->{self_result_object}) { + $self->throw_exception( "Object '$args->{self_result_object}' must be of class '@{[ $self->result_class ]}'" ) + unless $args->{self_result_object}->isa($self->result_class); } else { - $args->{self_resultobj} = DBIx::Class::Core->new({ + $args->{self_result_object} = DBIx::Class::Core->new({ -result_source => $self, - %{ $args->{self_resultobj}||{} } + %{ $args->{self_result_object}||{} } }); } } - if (exists $args->{foreign_resultobj}) { - if (defined blessed $args->{foreign_resultobj}) { - $self->throw_exception( "Object '$args->{foreign_resultobj}' must be of class '$rel_info->{class}'" ) - unless $args->{foreign_resultobj}->isa($rel_info->{class}); + if (exists $args->{foreign_result_object}) { + if (defined blessed $args->{foreign_result_object}) { + $self->throw_exception( "Object '$args->{foreign_result_object}' must be of class '$rel_info->{class}'" ) + unless $args->{foreign_result_object}->isa($rel_info->{class}); } else { - $args->{foreign_resultobj} = DBIx::Class::Core->new({ + $args->{foreign_result_object} = DBIx::Class::Core->new({ -result_source => $self->related_source($args->{rel_name}), - %{ $args->{foreign_resultobj}||{} } + %{ $args->{foreign_result_object}||{} } }); } } @@ -1840,15 +1840,15 @@ sub _resolve_relationship_condition { foreign_alias => $args->{foreign_alias}, ( map { (exists $args->{$_}) ? ( $_ => $args->{$_} ) : () } - qw( self_resultobj foreign_resultobj ) + qw( self_result_object foreign_result_object ) ), }; # legacy - never remove these!!! $cref_args->{foreign_relname} = $cref_args->{rel_name}; - $cref_args->{self_rowobj} = $cref_args->{self_resultobj} - if exists $cref_args->{self_resultobj}; + $cref_args->{self_rowobj} = $cref_args->{self_result_object} + if exists $cref_args->{self_result_object}; ($ret->{condition}, $ret->{join_free_condition}, my @extra) = $args->{condition}->($cref_args); @@ -1863,11 +1863,11 @@ sub _resolve_relationship_condition { ) unless ref $jfc eq 'HASH'; my ($joinfree_alias, $joinfree_source); - if (defined $args->{self_resultobj}) { + if (defined $args->{self_result_object}) { $joinfree_alias = $args->{foreign_alias}; $joinfree_source = $self->related_source($args->{rel_name}); } - elsif (defined $args->{foreign_resultobj}) { + elsif (defined $args->{foreign_result_object}) { $joinfree_alias = $args->{self_alias}; $joinfree_source = $self; } @@ -1915,17 +1915,17 @@ sub _resolve_relationship_condition { $ret->{identity_map}{$l_cols[$_]} = $f_cols[$_]; }; - if (exists $args->{self_resultobj} or exists $args->{foreign_resultobj}) { + if (exists $args->{self_result_object} or exists $args->{foreign_result_object}) { - my ($obj, $obj_alias, $plain_alias, $obj_cols, $plain_cols) = defined $args->{self_resultobj} - ? ( @{$args}{qw( self_resultobj self_alias foreign_alias )}, \@l_cols, \@f_cols ) - : ( @{$args}{qw( foreign_resultobj foreign_alias self_alias )}, \@f_cols, \@l_cols ) + my ($obj, $obj_alias, $plain_alias, $obj_cols, $plain_cols) = defined $args->{self_result_object} + ? ( @{$args}{qw( self_result_object self_alias foreign_alias )}, \@l_cols, \@f_cols ) + : ( @{$args}{qw( foreign_result_object foreign_alias self_alias )}, \@f_cols, \@l_cols ) ; for my $i (0..$#$obj_cols) { if ( - defined $args->{self_resultobj} + defined $args->{self_result_object} and ! $obj->has_column_loaded($obj_cols->[$i]) ) { diff --git a/t/lib/DBICTest/Schema/Artist.pm b/t/lib/DBICTest/Schema/Artist.pm index 470796a21..80872926c 100644 --- a/t/lib/DBICTest/Schema/Artist.pm +++ b/t/lib/DBICTest/Schema/Artist.pm @@ -62,8 +62,8 @@ __PACKAGE__->has_many( return ( { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, + $args->{self_result_object} && { + "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid, # keep old rowobj syntax as a test } ); }, @@ -81,8 +81,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { '=' => \ "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artist" => { '=' => \[ '?', $args->{self_resultobj}->artistid ] }, + $args->{self_result_object} && { + "$args->{foreign_alias}.artist" => { '=' => \[ '?', $args->{self_result_object}->artistid ] }, "$args->{foreign_alias}.year" => { '>' => 1979, '<' => 1990 }, } ); @@ -102,8 +102,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => 1984, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, + $args->{self_result_object} && { + "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid, "$args->{foreign_alias}.year" => 1984, } ); @@ -161,8 +161,8 @@ __PACKAGE__->has_many( { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.genreid" => undef, - }, $args->{self_resultobj} && { - "$args->{foreign_alias}.artist" => $args->{self_resultobj}->artistid, + }, $args->{self_result_object} && { + "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid, "$args->{foreign_alias}.genreid" => undef, } ), diff --git a/t/lib/DBICTest/Schema/Artwork.pm b/t/lib/DBICTest/Schema/Artwork.pm index d9ddc332e..ddc87cdcd 100644 --- a/t/lib/DBICTest/Schema/Artwork.pm +++ b/t/lib/DBICTest/Schema/Artwork.pm @@ -36,8 +36,8 @@ __PACKAGE__->has_many('artwork_to_artist_test_m2m', 'DBICTest::Schema::Artwork_t return ( { "$args->{foreign_alias}.artwork_cd_id" => { -ident => "$args->{self_alias}.cd_id" }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artwork_cd_id" => $args->{self_resultobj}->cd_id, + $args->{self_result_object} && { + "$args->{foreign_alias}.artwork_cd_id" => $args->{self_result_object}->cd_id, } ); } diff --git a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm index e4c4cf2d4..8a33928ec 100644 --- a/t/lib/DBICTest/Schema/Artwork_to_Artist.pm +++ b/t/lib/DBICTest/Schema/Artwork_to_Artist.pm @@ -33,8 +33,8 @@ __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist', { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" }, "$args->{foreign_alias}.rank" => { '<' => 10 }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.artistid" => $args->{self_resultobj}->artist_id, + $args->{self_result_object} && { + "$args->{foreign_alias}.artistid" => $args->{self_result_object}->artist_id, "$args->{foreign_alias}.rank" => { '<' => 10 }, } ); diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index a1cb27a1b..a466c39fd 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -66,12 +66,12 @@ sub { "$args->{foreign_alias}.cdid" => { -ident => "$args->{self_alias}.cd" }, }, - ( $args->{self_resultobj} ? { - "$args->{foreign_alias}.cdid" => $args->{self_resultobj}->cd + ( $args->{self_result_object} ? { + "$args->{foreign_alias}.cdid" => $args->{self_result_object}->cd } : () ), - ( $args->{foreign_resultobj} ? { - "$args->{self_alias}.cd" => $args->{foreign_resultobj}->cdid + ( $args->{foreign_result_object} ? { + "$args->{self_alias}.cd" => $args->{foreign_result_object}->cdid } : () ), ); } @@ -108,9 +108,9 @@ __PACKAGE__->has_many ( { "$args->{foreign_alias}.cd" => { -ident => "$args->{self_alias}.cd" }, "$args->{foreign_alias}.position" => { '>' => { -ident => "$args->{self_alias}.position" } }, }, - $args->{self_resultobj} && { - "$args->{foreign_alias}.cd" => $args->{self_resultobj}->get_column('cd'), - "$args->{foreign_alias}.position" => { '>' => $args->{self_resultobj}->pos }, + $args->{self_result_object} && { + "$args->{foreign_alias}.cd" => $args->{self_result_object}->get_column('cd'), + "$args->{foreign_alias}.position" => { '>' => $args->{self_result_object}->pos }, } ) } diff --git a/t/lib/DBICTest/Util.pm b/t/lib/DBICTest/Util.pm index a6c8dfd61..0588a9dd7 100644 --- a/t/lib/DBICTest/Util.pm +++ b/t/lib/DBICTest/Util.pm @@ -101,22 +101,22 @@ sub check_customcond_args ($) { my $rowobj_cnt = 0; - if (defined $args->{self_resultobj} or defined $args->{self_rowobj} ) { + if (defined $args->{self_result_object} or defined $args->{self_rowobj} ) { $rowobj_cnt++; - for (qw(self_resultobj self_rowobj)) { + for (qw(self_result_object self_rowobj)) { confess "Custom condition argument '$_' must be a result instance" unless defined blessed $args->{$_} and $args->{$_}->isa('DBIx::Class::Row'); } - confess "Current and legacy self_resultobj arguments do not match" - if refaddr($args->{self_resultobj}) != refaddr($args->{self_rowobj}); + confess "Current and legacy self_result_object arguments do not match" + if refaddr($args->{self_result_object}) != refaddr($args->{self_rowobj}); } - if (defined $args->{foreign_resultobj}) { + if (defined $args->{foreign_result_object}) { $rowobj_cnt++; - confess "Custom condition argument 'foreign_resultobj' must be a result instance" - unless defined blessed $args->{foreign_resultobj} and $args->{foreign_resultobj}->isa('DBIx::Class::Row'); + confess "Custom condition argument 'foreign_result_object' must be a result instance" + unless defined blessed $args->{foreign_result_object} and $args->{foreign_result_object}->isa('DBIx::Class::Row'); } confess "Result objects supplied on both ends of a relationship" From ef0845bad4b2945f8d5bb4157ba3aa9fe95ef790 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 24 Jul 2014 02:11:38 +0200 Subject: [PATCH 114/548] Document and add example of foreign_related_object --- lib/DBIx/Class/Relationship/Base.pm | 26 ++++++++++++++++++++------ t/lib/DBICTest/Schema/Track.pm | 8 ++++---- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 42d09553d..e2efc9987 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -181,11 +181,14 @@ L and the resulting SQL will be used verbatim as the C clause of the C statement associated with this relationship. While every coderef-based condition must return a valid C clause, it may -elect to additionally return a simplified join-free condition hashref when -invoked as C<< $result->relationship >>, as opposed to +elect to additionally return a simplified B join-free condition +hashref when invoked as C<< $result->$relationship >>, as opposed to C<< $rs->related_resultset('relationship') >>. In this case C<$result> is -passed to the coderef as C<< $args->{self_result_object} >>, so a user can do the -following: +passed to the coderef as C<< $args->{self_result_object} >>. Alternatively +the user-space could be calling C<< $result->set_from_related( $rel => +$foreign_related_object ) >>, in which case C<$foreign_related_object> will +be passed to the coderef as C<< $args->{foreign_result_object >>. In other +words if you define your condition coderef as: sub { my $args = shift; @@ -195,14 +198,17 @@ following: "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" }, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, - $args->{self_result_object} && { + ! $args->{self_result_object} ? () : { "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid, "$args->{foreign_alias}.year" => { '>', "1979", '<', "1990" }, }, + ! $args->{foreign_result_object} ? () : { + "$args->{self_alias}.artistid" => $args->{foreign_result_object}->artist, + } ); } -Now this code: +Then this code: my $artist = $schema->resultset("Artist")->find({ id => 4 }); $artist->cds_80s->all; @@ -219,6 +225,14 @@ With the bind values: '4', '1990', '1979' +While this code: + + my $cd = $schema->resultset("CD")->search({ artist => 1 }, { rows => 1 })->single; + my $artist = $schema->resultset("Artist")->new({}); + $artist->set_from_related('cds_80s'); + +Will properly set the C<< $artist->artistid >> field of this new object to C<1> + Note that in order to be able to use L<< $result->create_related|DBIx::Class::Relationship::Base/create_related >>, the coderef must not only return as its second such a "simple" condition diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index a466c39fd..3cfbc3122 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -66,13 +66,13 @@ sub { "$args->{foreign_alias}.cdid" => { -ident => "$args->{self_alias}.cd" }, }, - ( $args->{self_result_object} ? { + ! $args->{self_result_object} ? () : { "$args->{foreign_alias}.cdid" => $args->{self_result_object}->cd - } : () ), + }, - ( $args->{foreign_result_object} ? { + ! $args->{foreign_result_object} ? () : { "$args->{self_alias}.cd" => $args->{foreign_result_object}->cdid - } : () ), + }, ); } ); From c480ff4ac5ed304ec470a37516ee9c74eb843998 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 23 Jul 2014 20:15:10 +0200 Subject: [PATCH 115/548] Correct test of ${^WARNING_BITS} - as haarg++ noted: runtime != compile time Also bump Moo prereq for proper * presence of undefer_all * fixes of meory leaks in Sub::Quote/Sub::Defer --- Makefile.PL | 2 +- xt/quote_sub.t | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile.PL b/Makefile.PL index 5a952e29e..eb1cb5bc3 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -79,7 +79,7 @@ my $runtime_requires = { 'Data::Page' => '2.00', 'Devel::GlobalDestruction' => '0.09', 'Hash::Merge' => '0.12', - 'Moo' => '1.002', + 'Moo' => '1.004005', 'MRO::Compat' => '0.12', 'Module::Find' => '0.07', 'namespace::clean' => '0.24', diff --git a/xt/quote_sub.t b/xt/quote_sub.t index 7918cc55a..77b490507 100644 --- a/xt/quote_sub.t +++ b/xt/quote_sub.t @@ -27,17 +27,17 @@ my $no_nothing_q = do { no strict; no warnings; quote_sub <<'EOC'; + BEGIN { warn "-->${^WARNING_BITS}<--\n" }; my $n = "Test::Warn::warnings_exist"; warn "-->@{[ *{$n}{CODE} ]}<--\n"; - warn "-->@{[ ${^WARNING_BITS} || '' ]}<--\n"; EOC }; my $we_cref = Test::Warn->can('warnings_exist'); warnings_exist { $no_nothing_q->() } [ + qr/^\-\-\>\0+\<\-\-$/m, qr/^\Q-->$we_cref<--\E$/m, - qr/^\-\-\>\0*\<\-\-$/m, # some perls have a string of nulls, some just an empty string ], 'Expected warnings, strict did not leak inside the qsub' or do { require B::Deparse; From d0435d7535acac7611d188dbaabf63afe33f1dfb Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 23 Jul 2014 20:02:52 +0200 Subject: [PATCH 116/548] Bundle trial SQLA for the trial of DBIC (reuse dq technique f4a8b21e) Supersedes 609fa215, to be reverted before 0.082800 --- .gitignore | 2 ++ Makefile.PL | 36 ++++++++++++++++++- lib/DBIx/Class/_Util.pm | 28 +++++++++++++++ maint/careless_ssh.bash | 3 ++ maint/travis-ci_scripts/30_before_script.bash | 5 +-- t/53lean_startup.t | 3 ++ xt/podcoverage.t | 2 ++ 7 files changed, 74 insertions(+), 5 deletions(-) create mode 100755 maint/careless_ssh.bash diff --git a/.gitignore b/.gitignore index c8cda3ed8..81d2445c2 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,5 @@ t/var/ *~ maint/.Generated_Pod examples/Schema/db + +lib/DBIx/Class/_TempExtlib diff --git a/Makefile.PL b/Makefile.PL index eb1cb5bc3..e2de85ee1 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -15,6 +15,40 @@ BEGIN { $Module::Install::AUTHOR = 0 if (grep { $ENV{"PERL5_${_}_IS_RUNNING"} } (qw/CPANM CPANPLUS CPAN/) ); } +## +## TEMPORARY (and non-portable) +## Get trial SQLA +## +BEGIN { + my $target_libdir = 'lib/DBIx/Class/_TempExtlib'; + + if ( ($Module::Install::AUTHOR or $ENV{TRAVIS}) and ! $ENV{MAKELEVEL} ) { + + `rm -rf $target_libdir`; + `mkdir $target_libdir`; + for ( + [ 'SQL-Abstract' => 'master' ], + ) { + my $tdir = "/tmp/dbictemplib/$_->[0]/"; + + `rm -rf $tdir`; + + `GIT_SSH=maint/careless_ssh.bash git clone --bare --quiet --branch=$_->[1] --depth=1 git://git.shadowcat.co.uk/dbsrgits/$_->[0] $tdir`; + printf "\nIncluding %s git rev %s\n", + $_->[0], + scalar `GIT_DIR=$tdir git rev-parse $_->[1]`, + ; + `git archive --format=tar --remote=file://$tdir $_->[1] lib/ | tar --strip-components=1 -xC $target_libdir`; + + #`rm -rf $tdir`; + } + + unshift @INC, $target_libdir; + + no_index directory => $target_libdir; + } +} + homepage 'http://www.dbix-class.org/'; resources 'x_IRC' => 'irc://irc.perl.org/#dbix-class'; resources 'x_WebIRC' => 'https://chat.mibbit.com/#dbix-class@irc.perl.org'; @@ -85,7 +119,7 @@ my $runtime_requires = { 'namespace::clean' => '0.24', 'Path::Class' => '0.18', 'Scope::Guard' => '0.03', - 'SQL::Abstract' => '1.78_02', # TEMPORARY + 'SQL::Abstract' => '1.78', 'Try::Tiny' => '0.07', # Technically this is not a core dependency - it is only required diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 83bca471c..35f8ad06a 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -4,6 +4,34 @@ package # hide from PAUSE use warnings; use strict; +# Temporary - tempextlib +use namespace::clean; +BEGIN { + require Module::Runtime; + require File::Spec; + + # There can be only one of these, make sure we get the bundled part and + # *not* something off the site lib + for (qw( + DBIx::Class::SQLMaker + SQL::Abstract + SQL::Abstract::Test + )) { + if ($INC{Module::Runtime::module_notional_filename($_)}) { + die "\nUnable to continue - a part of the bundled templib contents " + . "was already loaded (likely an older version from CPAN). " + . "Make sure that @{[ __PACKAGE__ ]} is loaded before $_\n\n" + ; + } + } + + our ($HERE) = File::Spec->rel2abs( + File::Spec->catdir( (File::Spec->splitpath(__FILE__))[1], '_TempExtlib' ) + ) =~ /^(.*)$/; # screw you, taint mode + + unshift @INC, $HERE; +} + use constant SPURIOUS_VERSION_CHECK_WARNINGS => ($] < 5.010 ? 1 : 0); BEGIN { diff --git a/maint/careless_ssh.bash b/maint/careless_ssh.bash new file mode 100755 index 000000000..1b9e0bcd8 --- /dev/null +++ b/maint/careless_ssh.bash @@ -0,0 +1,3 @@ +#!/bin/bash + +/usr/bin/ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "$@" diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index eb642a8a5..6aadca4b4 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -107,7 +107,7 @@ else parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant - parallel_installdeps_notest 'SQL::Abstract@1.78_02' Moose Module::Install JSON SQL::Translator File::Which + parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which if [[ -n "$DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then # the official version is very much outdated and does not compile on 5.14+ @@ -127,9 +127,6 @@ if [[ "$CLEANTEST" = "true" ]]; then # we may need to prepend some stuff to that list HARD_DEPS="$(echo $(make listdeps))" - # temporary - HARD_DEPS="R/RI/RIBASUSHI/SQL-Abstract-1.78_02.tar.gz $HARD_DEPS" - ##### TEMPORARY WORKAROUNDS needed in case we will be using CPAN.pm if [[ "$DEVREL_DEPS" != "true" ]] && ! CPAN_is_sane ; then # combat dzillirium on harness-wide level, otherwise breakage happens weekly diff --git a/t/53lean_startup.t b/t/53lean_startup.t index 27a4dd486..0745b3499 100644 --- a/t/53lean_startup.t +++ b/t/53lean_startup.t @@ -108,6 +108,9 @@ BEGIN { Class::Accessor::Grouped Class::C3::Componentised SQL::Abstract + + Module::Runtime + File::Spec )); require DBICTest::Schema; diff --git a/xt/podcoverage.t b/xt/podcoverage.t index a16a3653a..dff0acf7e 100644 --- a/xt/podcoverage.t +++ b/xt/podcoverage.t @@ -114,6 +114,8 @@ my $exceptions = { /] }, + 'DBIx::Class::_TempExtlib*' => { skip => 1 }, + 'DBIx::Class::Admin::*' => { skip => 1 }, 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 }, 'DBIx::Class::Componentised' => { skip => 1 }, From 0f0d19a4b85980b3fad8cdbc66ca74c11f092ac6 Mon Sep 17 00:00:00 2001 From: Karen Etheridge Date: Wed, 23 Jul 2014 12:25:55 -0700 Subject: [PATCH 117/548] The current_source_alias is prepended, not a literal "me." --- lib/DBIx/Class/ResultSet.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index c6b725b66..b703c8fc6 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -4072,7 +4072,7 @@ syntax as outlined above. Shortcut to request a particular set of columns to be retrieved. Each column spec may be a string (a table column name), or a hash (in which case the key is the C value, and the value is used as the C from that, then auto-populates C from C, to instruct DBIx::Class how to store the result of the column. -Also note that the L attribute has nothing to do with the SQL-side 'AS' -identifier aliasing. You can however alias a function, so you can use it in -e.g. an C clause. This is done via the C<-as> B supplied as shown in the example above. =head2 +select @@ -4189,8 +4190,10 @@ Indicates DBIC-side names for object inflation. That is L indicates the slot name in which the column value will be stored within the L object. The value will then be accessible via this identifier by the C method (or via the object accessor B) as shown below. The L attribute has -B with the SQL-side C. See L for details. +with the same name already exists>) as shown below. + +The L attribute has B with the SQL-side identifier +aliasing C. See L for details. $rs = $schema->resultset('Employee')->search(undef, { select => [ From 8848b5bd9ece2c0320b99ce616bd6f3ecd205159 Mon Sep 17 00:00:00 2001 From: "Stefan Hornburg (Racke)" Date: Thu, 14 Aug 2014 13:56:55 +0200 Subject: [PATCH 156/548] Fix "muse" typo in Relationship::Base's POD. --- lib/DBIx/Class/Relationship/Base.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 882d47bb8..c4d111186 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -406,7 +406,7 @@ the relationship attributes. The C relationship does not update across relationships by default, so if you have a 'proxy' attribute on a belongs_to and want to -use 'update' on it, you muse set C<< cascade_update => 1 >>. +use 'update' on it, you must set C<< cascade_update => 1 >>. This is not a RDMS style cascade update - it purely means that when an object has update called on it, all the related objects also From 6565d2c3170bbfd73fb9712b6b0d587f690bb976 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 27 Aug 2014 08:01:31 +0200 Subject: [PATCH 157/548] Remove a superfluous pair of []s in cond collapser --- lib/DBIx/Class/Storage/DBIHacks.pm | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index c7910be6f..71ddf7c1b 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -998,17 +998,17 @@ sub _collapse_cond { my $chunk = shift @pieces; if (ref $chunk eq 'HASH') { - push @pairs, map { [ $_ => $chunk->{$_} ] } sort keys %$chunk; + push @pairs, map { $_ => $chunk->{$_} } sort keys %$chunk; } elsif (ref $chunk eq 'ARRAY') { - push @pairs, [ -or => $chunk ] + push @pairs, -or => $chunk if @$chunk; } elsif ( ! ref $chunk) { - push @pairs, [ $chunk, shift @pieces ]; + push @pairs, $chunk, shift @pieces; } else { - push @pairs, [ '', $chunk ]; + push @pairs, '', $chunk; } } @@ -1103,7 +1103,7 @@ sub _collapse_cond_unroll_pairs { my @conds; while (@$pairs) { - my ($lhs, $rhs) = @{ shift @$pairs }; + my ($lhs, $rhs) = splice @$pairs, 0, 2; if ($lhs eq '') { push @conds, $self->_collapse_cond($rhs); @@ -1131,7 +1131,7 @@ sub _collapse_cond_unroll_pairs { push @conds, { $lhs => $rhs }; } else { - for my $p ($self->_collapse_cond_unroll_pairs([ [ $lhs => $rhs->{'='} ] ])) { + for my $p ($self->_collapse_cond_unroll_pairs([ $lhs => $rhs->{'='} ])) { # extra sanity check if (keys %$p > 1) { @@ -1163,18 +1163,18 @@ sub _collapse_cond_unroll_pairs { if @$rhs == 1; if( $rhs->[0] =~ /^\-and$/i ) { - unshift @$pairs, map { [ $lhs => $_ ] } @{$rhs}[1..$#$rhs]; + unshift @$pairs, map { $lhs => $_ } @{$rhs}[1..$#$rhs]; } # if not an AND then it's an OR elsif(@$rhs == 2) { - unshift @$pairs, [ $lhs => $rhs->[1] ]; + unshift @$pairs, $lhs => $rhs->[1]; } else { push @conds, { $lhs => $rhs }; } } elsif (@$rhs == 1) { - unshift @$pairs, [ $lhs => $rhs->[0] ]; + unshift @$pairs, $lhs => $rhs->[0]; } else { push @conds, { $lhs => $rhs }; From b34d93310401fd6f4fd68dc965b4aec592913eb1 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 22 Aug 2014 10:53:46 +0200 Subject: [PATCH 158/548] Switch to a sane deduplication system The original naive approach from 8e40a627f was utterly braindead and prone to false positives. Fix this for good --- lib/DBIx/Class/Storage/DBIHacks.pm | 30 +++++++++++++++++------------- lib/DBIx/Class/_Util.pm | 8 +++++++- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 71ddf7c1b..f21759c8a 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -15,7 +15,7 @@ use mro 'c3'; use List::Util 'first'; use Scalar::Util 'blessed'; -use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; +use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize); use SQL::Abstract qw(is_plain_value is_literal_value); use namespace::clean; @@ -1004,7 +1004,7 @@ sub _collapse_cond { push @pairs, -or => $chunk if @$chunk; } - elsif ( ! ref $chunk) { + elsif ( ! length ref $chunk) { push @pairs, $chunk, shift @pieces; } else { @@ -1045,6 +1045,7 @@ sub _collapse_cond { } } + # unroll single-element -and nodes if ( ref $fin->{-and} eq 'ARRAY' and @{$fin->{-and}} == 1 ) { my $piece = (delete $fin->{-and})->[0]; if (ref $piece eq 'ARRAY') { @@ -1069,12 +1070,12 @@ sub _collapse_cond { return unless @w; if ( @w == 1 ) { - return ( ref $w[0] ) + return ( length ref $w[0] ) ? $self->_collapse_cond($w[0]) : { $w[0] => undef } ; } - elsif ( @w == 2 and ! ref $w[0]) { + elsif ( @w == 2 and ! length ref $w[0]) { if ( ( $w[0]||'' ) =~ /^\-and$/i ) { return (ref $w[1] eq 'HASH' or ref $w[1] eq 'ARRAY') ? $self->_collapse_cond($w[1], (ref $w[1] eq 'ARRAY') ) @@ -1205,7 +1206,6 @@ sub _collapse_cond_unroll_pairs { # is instead used to infer inambiguous values from conditions # (e.g. the inheritance of resultset conditions on new_result) # -my $undef_marker = \ do{ my $x = 'undef' }; sub _extract_fixed_condition_columns { my ($self, $where, $consider_nulls) = @_; my $where_hash = $self->_collapse_cond($_[1]); @@ -1216,7 +1216,7 @@ sub _extract_fixed_condition_columns { my $vals; if (!defined ($v = $where_hash->{$c}) ) { - $vals->{$undef_marker} = $v if $consider_nulls + $vals->{UNDEF} = $v if $consider_nulls } elsif ( ref $v eq 'HASH' @@ -1225,15 +1225,15 @@ sub _extract_fixed_condition_columns { ) { if (exists $v->{-value}) { if (defined $v->{-value}) { - $vals->{$v->{-value}} = $v->{-value} + $vals->{"VAL_$v->{-value}"} = $v->{-value} } elsif( $consider_nulls ) { - $vals->{$undef_marker} = $v->{-value}; + $vals->{UNDEF} = $v->{-value}; } } # do not need to check for plain values - _collapse_cond did it for us - elsif(ref $v->{'='} and is_literal_value($v->{'='}) ) { - $vals->{$v->{'='}} = $v->{'='}; + elsif(length ref $v->{'='} and is_literal_value($v->{'='}) ) { + $vals->{ 'SER_' . serialize $v->{'='} } = $v->{'='}; } } elsif ( @@ -1241,19 +1241,23 @@ sub _extract_fixed_condition_columns { or is_plain_value ($v) ) { - $vals->{$v} = $v; + $vals->{"VAL_$v"} = $v; } elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') { for ( @{$v}[1..$#$v] ) { my $subval = $self->_extract_fixed_condition_columns({ $c => $_ }, 'consider nulls'); # always fish nulls out on recursion next unless exists $subval->{$c}; # didn't find anything - $vals->{defined $subval->{$c} ? $subval->{$c} : $undef_marker} = $subval->{$c}; + $vals->{ + ! defined $subval->{$c} ? 'UNDEF' + : ( ! length ref $subval->{$c} or is_plain_value $subval->{$c} ) ? "VAL_$subval->{$c}" + : ( 'SER_' . serialize $subval->{$c} ) + } = $subval->{$c}; } } if (keys %$vals == 1) { ($res->{$c}) = (values %$vals) - unless !$consider_nulls and exists $vals->{$undef_marker}; + unless !$consider_nulls and exists $vals->{UNDEF}; } elsif (keys %$vals > 1) { $res->{$c} = UNRESOLVABLE_CONDITION; diff --git a/lib/DBIx/Class/_Util.pm b/lib/DBIx/Class/_Util.pm index 9f984d297..37dddfcb8 100644 --- a/lib/DBIx/Class/_Util.pm +++ b/lib/DBIx/Class/_Util.pm @@ -104,7 +104,7 @@ our @EXPORT_OK = qw( sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray fail_on_internal_call refdesc refcount hrefaddr is_exception - quote_sub qsub perlstring + quote_sub qsub perlstring serialize UNRESOLVABLE_CONDITION ); @@ -145,6 +145,12 @@ sub refcount ($) { B::svref_2object($_[0])->REFCNT; } +sub serialize ($) { + require Storable; + local $Storable::canonical = 1; + Storable::nfreeze($_[0]); +} + sub is_exception ($) { my $e = $_[0]; From 5268b1da661134493695d0c8f364b2d094da616e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 22 Aug 2014 13:40:14 +0200 Subject: [PATCH 159/548] Deduplicate (and stabilize) the result of _collapse_cond Among other things set the stage to a fix of RT#98161 (later commit) --- lib/DBIx/Class/ResultSet.pm | 26 +++------- lib/DBIx/Class/Storage/DBIHacks.pm | 17 +++++++ t/53lean_startup.t | 2 +- t/search/stack_cond.t | 79 ++++++++++++++++++++++++++++++ t/sqlmaker/dbihacks_internals.t | 21 ++++---- 5 files changed, 117 insertions(+), 28 deletions(-) create mode 100644 t/search/stack_cond.t diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 9a478e4a1..97417fa2d 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -10,7 +10,6 @@ use DBIx::Class::_Util qw( fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION ); use Try::Tiny; -use Data::Compare (); # no imports!!! guard against insane architecture # not importing first() as it will clash with our own method use List::Util (); @@ -656,26 +655,17 @@ sub _stack_cond { (ref $_ eq 'HASH' and ! keys %$_) ) and $_ = undef for ($left, $right); - # either on of the two undef or both undef - if ( ( (defined $left) xor (defined $right) ) or ! defined $left ) { + # either one of the two undef + if ( (defined $left) xor (defined $right) ) { return defined $left ? $left : $right; } - - my $cond = $self->result_source->schema->storage->_collapse_cond({ -and => [$left, $right] }); - - for my $c (grep { ref $cond->{$_} eq 'ARRAY' and ($cond->{$_}[0]||'') eq '-and' } keys %$cond) { - - my @vals = sort @{$cond->{$c}}[ 1..$#{$cond->{$c}} ]; - my @fin = shift @vals; - - for my $v (@vals) { - push @fin, $v unless Data::Compare::Compare( $fin[-1], $v ); - } - - $cond->{$c} = (@fin == 1) ? $fin[0] : [-and => @fin ]; + # both undef + elsif ( ! defined $left ) { + return undef + } + else { + return $self->result_source->schema->storage->_collapse_cond({ -and => [$left, $right] }); } - - $cond; } =head2 search_literal diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index f21759c8a..d2d8f63dc 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1056,6 +1056,23 @@ sub _collapse_cond { } } + # compress same-column conds found in $fin + for my $col ( keys %$fin ) { + next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') eq '-and'; + my $val_bag = { map { + (! defined $_ ) ? ( UNDEF => undef ) + : ( ! ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) + : ( ( 'SER_' . serialize $_ ) => $_ ) + } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] }; + + if (keys %$val_bag == 1 ) { + ($fin->{$col}) = values %$val_bag; + } + else { + $fin->{$col} = [ -and => map { $val_bag->{$_} } sort keys %$val_bag ]; + } + } + return $fin; } elsif (ref $where eq 'ARRAY') { diff --git a/t/53lean_startup.t b/t/53lean_startup.t index 6dd37f793..2943507c3 100644 --- a/t/53lean_startup.t +++ b/t/53lean_startup.t @@ -104,7 +104,6 @@ BEGIN { Scalar::Util List::Util - Data::Compare Class::Accessor::Grouped Class::C3::Componentised @@ -123,6 +122,7 @@ BEGIN { register_lazy_loadable_requires(qw( Moo Context::Preserve + Data::Compare )); my $s = DBICTest::Schema->connect('dbi:SQLite::memory:'); diff --git a/t/search/stack_cond.t b/t/search/stack_cond.t new file mode 100644 index 000000000..a68f69226 --- /dev/null +++ b/t/search/stack_cond.t @@ -0,0 +1,79 @@ +use strict; +use warnings; + +use Test::More; +use lib qw(t/lib); +use DBICTest ':DiffSQL'; +use SQL::Abstract qw(is_plain_value is_literal_value); +use List::Util 'shuffle'; +use Data::Dumper; +$Data::Dumper::Terse = 1; +$Data::Dumper::Useqq = 1; +$Data::Dumper::Indent = 0; + +my $schema = DBICTest->init_schema(); + +for my $c ( + { cond => undef, sql => 'IS NULL' }, + { cond => { -value => undef }, sql => 'IS NULL' }, + { cond => \'foo', sql => '= foo' }, + { cond => 'foo', sql => '= ?', bind => [ + [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ], + [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ], + ]}, + { cond => { -value => 'foo' }, sql => '= ?', bind => [ + [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ], + [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ], + ]}, + { cond => \[ '?', "foo" ], sql => '= ?', bind => [ + [ {} => 'foo' ], + [ {} => 'foo' ], + ]}, +) { + my $rs = $schema->resultset('CD')->search({}, { columns => 'title' }); + + my $bare_cond = is_literal_value($c->{cond}) ? { '=', $c->{cond} } : $c->{cond}; + + my @query_steps = ( + # this is a monkey-wrench, always there + { title => { '!=', [ -and => \'bar' ] }, year => { '!=', [ -and => 'bar' ] } }, + + { title => $bare_cond, year => { '=', $c->{cond} } }, + { -and => [ year => $bare_cond, { title => { '=', $c->{cond} } } ] }, + [ year => $bare_cond ], + [ title => $bare_cond ], + { -and => [ { year => { '=', $c->{cond} } }, { title => { '=', $c->{cond} } } ] }, + { -and => { -or => { year => { '=', $c->{cond} } } }, -or => { title => $bare_cond } }, + ); + + if (my $v = is_plain_value($c->{cond})) { + push @query_steps, + { year => $v->[0] }, + { title => $v->[0] }, + { -and => [ year => $v->[0], title => $v->[0] ] }, + ; + } + + @query_steps = shuffle @query_steps; + + $rs = $rs->search($_) for @query_steps; + + my @bind = @{$c->{bind} || []}; + { + no warnings 'misc'; + splice @bind, 1, 0, [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'bar' ]; + } + + is_same_sql_bind ( + $rs->as_query, + "( + SELECT me.title + FROM cd me + WHERE title != bar AND title $c->{sql} AND year != ? AND year $c->{sql} + )", + \@bind, + 'Double condition correctly collapsed for steps' . Dumper \@query_steps, + ); +} + +done_testing; diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 1f555fc4a..b15dc428b 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -95,14 +95,16 @@ for my $t ( }, { where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ $num, $num ] } }, - cc_result => { artistid => 1, charfield => [-and => { '=' => \'1' }, { '=' => \['?',2] } ], rank => { '=' => [$num, $num] } }, + cc_result => { artistid => 1, charfield => [ -and => { '=' => \['?',2] }, { '=' => \'1' } ], rank => { '=' => [$num, $num] } }, sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )', + collapsed_sql => 'WHERE artistid = ? AND charfield = ? AND charfield = 1 AND ( rank = ? OR rank = ? )', efcc_result => { artistid => 1, charfield => UNRESOLVABLE_CONDITION }, }, { where => { -and => [ artistid => 1, artistid => 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => [ -or => { '=', 2 } ], rank => [-and => undef, { '=', undef }, { '!=', 2 } ] }, - cc_result => { artistid => [ -and => 1, 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => 2, rank => [ -and => undef, undef, { '!=', 2 } ] }, + cc_result => { artistid => [ -and => 1, 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => 2, rank => [ -and => { '!=', 2 }, undef ] }, sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank IS NULL AND rank IS NULL AND rank != ?', + collapsed_sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank != ? AND rank IS NULL', efcc_result => { artistid => UNRESOLVABLE_CONDITION, name => 2, @@ -194,8 +196,9 @@ for my $t ( # batshit insanity, just to be thorough { where => { -and => [ [ 'artistid' ], [ -and => [ artistid => { '!=', 69 }, artistid => undef, artistid => { '=' => 200 } ]], artistid => [], { -or => [] }, { -and => [] }, [ 'charfield' ], { name => [] }, 'rank' ] }, - cc_result => { artistid => [ -and => undef, { '!=', 69 }, undef, 200, [] ], charfield => undef, name => [], rank => undef }, + cc_result => { artistid => [ -and => [], { '!=', 69 }, undef, 200 ], charfield => undef, name => [], rank => undef }, sql => 'WHERE artistid IS NULL AND artistid != ? AND artistid IS NULL AND artistid = ? AND 0=1 AND charfield IS NULL AND 0=1 AND rank IS NULL', + collapsed_sql => 'WHERE 0=1 AND artistid != ? AND artistid IS NULL AND artistid = ? AND charfield IS NULL AND 0=1 AND rank IS NULL', efcc_result => { artistid => UNRESOLVABLE_CONDITION }, efcc_n_result => { artistid => UNRESOLVABLE_CONDITION, charfield => undef, rank => undef }, }, @@ -236,17 +239,17 @@ for my $t ( ) { my $name = do { local ($Data::Dumper::Indent, $Data::Dumper::Terse, $Data::Dumper::Sortkeys) = (0, 1, 1); Dumper $w }; - my @orig_sql_bind = $sm->where($w); + my ($generated_sql) = $sm->where($w); - is_same_sql ( $orig_sql_bind[0], $t->{sql}, "Expected SQL from $name" ) + is_same_sql ( $generated_sql, $t->{sql}, "Expected SQL from $name" ) if exists $t->{sql}; my $collapsed_cond = $schema->storage->_collapse_cond($w); - is_same_sql_bind( - \[ $sm->where($collapsed_cond) ], - \\@orig_sql_bind, - "Collapse did not alter final SQL based on $name", + is_same_sql( + ($sm->where($collapsed_cond))[0], + ( $t->{collapsed_sql} || $t->{sql} || $generated_sql ), + "Collapse did not alter *the semantics* of the final SQL based on $name", ); is_deeply( From 953d5b7d978136fb5f43339f1b7b41d140b3e4a5 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 27 Aug 2014 08:32:52 +0200 Subject: [PATCH 160/548] Make sure cond collapser works case insensitively --- lib/DBIx/Class/Storage/DBIHacks.pm | 4 ++-- t/sqlmaker/dbihacks_internals.t | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index d2d8f63dc..90bade8f8 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1031,7 +1031,7 @@ sub _collapse_cond { (ref $_ ne 'ARRAY' or !@$_) and $_ = [ -and => $_ ] for ($l, $r); - if (@$l and @$r and $l->[0] eq $r->[0] and $l->[0] eq '-and') { + if (@$l and @$r and $l->[0] eq $r->[0] and $l->[0] =~ /^\-and$/i) { $fin->{$col} = [ -and => map { @$_[1..$#$_] } ($l, $r) ]; } else { @@ -1188,7 +1188,7 @@ sub _collapse_cond_unroll_pairs { unshift @$pairs, $lhs => $rhs->[1]; } else { - push @conds, { $lhs => $rhs }; + push @conds, { $lhs => [ @{$rhs}[1..$#$rhs] ] }; } } elsif (@$rhs == 1) { diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index b15dc428b..121463873 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -94,8 +94,8 @@ for my $t ( efcc_n_result => { artistid => 1, charfield => undef }, }, { - where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [-and => \'1', \['?',2] ] }, rank => { '=' => [ $num, $num ] } }, - cc_result => { artistid => 1, charfield => [ -and => { '=' => \['?',2] }, { '=' => \'1' } ], rank => { '=' => [$num, $num] } }, + where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [ -AND => \'1', \['?',2] ] }, rank => { '=' => [ -OR => $num, $num ] } }, + cc_result => { artistid => 1, charfield => [-and => { '=' => \['?',2] }, { '=' => \'1' } ], rank => { '=' => [$num, $num] } }, sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )', collapsed_sql => 'WHERE artistid = ? AND charfield = ? AND charfield = 1 AND ( rank = ? OR rank = ? )', efcc_result => { artistid => 1, charfield => UNRESOLVABLE_CONDITION }, @@ -227,7 +227,10 @@ for my $t ( for my $w ( $t->{where}, + $t->{where}, # do it twice, make sure we didn't destory the condition [ -and => $t->{where} ], + [ -AND => $t->{where} ], + { -OR => [ -AND => $t->{where} ] }, ( keys %{$t->{where}} <= 1 ? [ %{$t->{where}} ] : () ), ( (keys %{$t->{where}} == 1 and $t->{where}{-or}) ? ( ref $t->{where}{-or} eq 'HASH' From 22485a7ea98d355acc7dc5432550d10dba0dee5e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 2 Sep 2014 04:20:26 +0200 Subject: [PATCH 161/548] Make sure empty cond collapser works on all positions Adds another round of sql stabilization (akin to 5268b1da6) --- lib/DBIx/Class/Storage/DBIHacks.pm | 51 ++++++++++++++++-------------- t/sqlmaker/dbihacks_internals.t | 19 +++++++++++ 2 files changed, 47 insertions(+), 23 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 90bade8f8..e1722991e 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1076,36 +1076,41 @@ sub _collapse_cond { return $fin; } elsif (ref $where eq 'ARRAY') { - my @w = @$where; - while ( @w and ( - (ref $w[0] eq 'ARRAY' and ! @{$w[0]} ) - or - (ref $w[0] eq 'HASH' and ! keys %{$w[0]}) - )) { shift @w }; + # we are always at top-level here, it is safe to dump empty *standalone* pieces + my $fin_idx; - return unless @w; + for (my $i = 0; $i <= $#$where; $i++ ) { - if ( @w == 1 ) { - return ( length ref $w[0] ) - ? $self->_collapse_cond($w[0]) - : { $w[0] => undef } - ; - } - elsif ( @w == 2 and ! length ref $w[0]) { - if ( ( $w[0]||'' ) =~ /^\-and$/i ) { - return (ref $w[1] eq 'HASH' or ref $w[1] eq 'ARRAY') - ? $self->_collapse_cond($w[1], (ref $w[1] eq 'ARRAY') ) - : $self->throw_exception("Unsupported top-level op/arg pair: [ $w[0] => $w[1] ]") - ; + my $logic_mod = lc ( ($where->[$i] =~ /^(\-(?:and|or))$/i)[0] || '' ); + + if ($logic_mod) { + $i++; + $self->throw_exception("Unsupported top-level op/arg pair: [ $logic_mod => $where->[$i] ]") + unless ref $where->[$i] eq 'HASH' or ref $where->[$i] eq 'ARRAY'; + + my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] }) + or next; + + $fin_idx->{ serialize $sub_elt } = $sub_elt; + } + elsif (! length ref $where->[$i] ) { + $fin_idx->{"$where->[$i]_$i"} = $self->_collapse_cond({ @{$where}[$i, $i+1] }) || next; + $i++; } else { - return $self->_collapse_cond({ @w }); + $fin_idx->{ serialize $where->[$i] } = $self->_collapse_cond( $where->[$i] ) || next; } } - else { - return { -or => \@w }; - } + + return unless $fin_idx; + + return ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { + -or => [ map + { ref $fin_idx->{$_} eq 'HASH' ? %{$fin_idx->{$_}} : $fin_idx->{$_} } + sort keys %$fin_idx + ] + }; } else { # not a hash not an array diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 121463873..1ad550ad4 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -179,6 +179,25 @@ for my $t ( efcc_result => {}, sql => '', }, + { + where => { -or => [ foo => 1, $_ ] }, + cc_result => { foo => 1 }, + efcc_result => { foo => 1 }, + sql => 'WHERE foo = ?', + }, + { + where => { -or => [ $_, foo => 1 ] }, + cc_result => { foo => 1 }, + efcc_result => { foo => 1 }, + sql => 'WHERE foo = ?', + }, + { + where => { -and => [ fuu => 2, $_, foo => 1 ] }, + sql => 'WHERE fuu = ? AND foo = ?', + collapsed_sql => 'WHERE foo = ? AND fuu = ?', + cc_result => { foo => 1, fuu => 2 }, + efcc_result => { foo => 1, fuu => 2 }, + }, } ( # bare [], {}, From 135ac69ddafd158cbfa4082871599ee104bbd205 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 3 Sep 2014 12:44:46 +0200 Subject: [PATCH 162/548] Fix condition collapser corrupting -X operators This is (fingercross) the complete fix for RT#98161 --- lib/DBIx/Class/Storage/DBIHacks.pm | 116 +++++++++++++++++------------ t/search/stack_cond.t | 17 ++++- t/sqlmaker/dbihacks_internals.t | 99 ++++++++++++++++++++++++ 3 files changed, 181 insertions(+), 51 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index e1722991e..da09d12a0 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -986,6 +986,8 @@ sub _extract_colinfo_of_stable_main_source_order_by_portion { sub _collapse_cond { my ($self, $where, $where_is_anded_array) = @_; + my $fin; + if (! $where) { return; } @@ -1018,25 +1020,31 @@ sub _collapse_cond { or return; # Consolidate various @conds back into something more compact - my $fin; - for my $c (@conds) { if (ref $c ne 'HASH') { push @{$fin->{-and}}, $c; } else { for my $col (sort keys %$c) { - if (exists $fin->{$col}) { - my ($l, $r) = ($fin->{$col}, $c->{$col}); - - (ref $_ ne 'ARRAY' or !@$_) and $_ = [ -and => $_ ] for ($l, $r); - if (@$l and @$r and $l->[0] eq $r->[0] and $l->[0] =~ /^\-and$/i) { - $fin->{$col} = [ -and => map { @$_[1..$#$_] } ($l, $r) ]; - } - else { - $fin->{$col} = [ -and => $fin->{$col}, $c->{$col} ]; - } + # consolidate all -and nodes + if ($col =~ /^\-and$/i) { + push @{$fin->{-and}}, + ref $c->{$col} eq 'ARRAY' ? @{$c->{$col}} + : ref $c->{$col} eq 'HASH' ? %{$c->{$col}} + : { $col => $c->{$col} } + ; + } + elsif ($col =~ /^\-/) { + push @{$fin->{-and}}, { $col => $c->{$col} }; + } + elsif (exists $fin->{$col}) { + $fin->{$col} = [ -and => map { + (ref $_ eq 'ARRAY' and ($_->[0]||'') =~ /^\-and$/i ) + ? @{$_}[1..$#$_] + : $_ + ; + } ($fin->{$col}, $c->{$col}) ]; } else { $fin->{$col} = $c->{$col}; @@ -1044,39 +1052,8 @@ sub _collapse_cond { } } } - - # unroll single-element -and nodes - if ( ref $fin->{-and} eq 'ARRAY' and @{$fin->{-and}} == 1 ) { - my $piece = (delete $fin->{-and})->[0]; - if (ref $piece eq 'ARRAY') { - $fin->{-or} = $fin->{-or} ? [ $piece, $fin->{-or} ] : $piece; - } - elsif (! exists $fin->{''}) { - $fin->{''} = $piece; - } - } - - # compress same-column conds found in $fin - for my $col ( keys %$fin ) { - next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') eq '-and'; - my $val_bag = { map { - (! defined $_ ) ? ( UNDEF => undef ) - : ( ! ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) - : ( ( 'SER_' . serialize $_ ) => $_ ) - } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] }; - - if (keys %$val_bag == 1 ) { - ($fin->{$col}) = values %$val_bag; - } - else { - $fin->{$col} = [ -and => map { $val_bag->{$_} } sort keys %$val_bag ]; - } - } - - return $fin; } elsif (ref $where eq 'ARRAY') { - # we are always at top-level here, it is safe to dump empty *standalone* pieces my $fin_idx; @@ -1092,20 +1069,23 @@ sub _collapse_cond { my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] }) or next; - $fin_idx->{ serialize $sub_elt } = $sub_elt; + $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt; } elsif (! length ref $where->[$i] ) { - $fin_idx->{"$where->[$i]_$i"} = $self->_collapse_cond({ @{$where}[$i, $i+1] }) || next; + my $sub_elt = $self->_collapse_cond({ @{$where}[$i, $i+1] }) + or next; + + $fin_idx->{ "COL_$where->[$i]_" . serialize $sub_elt } = $sub_elt; $i++; } else { - $fin_idx->{ serialize $where->[$i] } = $self->_collapse_cond( $where->[$i] ) || next; + $fin_idx->{ "SER_" . serialize $where->[$i] } = $self->_collapse_cond( $where->[$i] ) || next; } } return unless $fin_idx; - return ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { + $fin = ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { -or => [ map { ref $fin_idx->{$_} eq 'HASH' ? %{$fin_idx->{$_}} : $fin_idx->{$_} } sort keys %$fin_idx @@ -1114,10 +1094,48 @@ sub _collapse_cond { } else { # not a hash not an array - return { '' => $where }; + $fin = { '' => $where }; + } + + # unroll single-element -and's + while ( + $fin->{-and} + and + @{$fin->{-and}} < 2 + ) { + my $and = delete $fin->{-and}; + last if @$and == 0; + + # at this point we have @$and == 1 + if ( + ref $and->[0] eq 'HASH' + and + ! grep { exists $fin->{$_} } keys %{$and->[0]} + ) { + $fin = { + %$fin, %{$and->[0]} + }; + } + } + + # compress same-column conds found in $fin + for my $col ( grep { $_ !~ /^\-/ } keys %$fin ) { + next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') =~ /^\-and$/i; + my $val_bag = { map { + (! defined $_ ) ? ( UNDEF => undef ) + : ( ! ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) + : ( ( 'SER_' . serialize $_ ) => $_ ) + } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] }; + + if (keys %$val_bag == 1 ) { + ($fin->{$col}) = values %$val_bag; + } + else { + $fin->{$col} = [ -and => map { $val_bag->{$_} } sort keys %$val_bag ]; + } } - die 'should not get here'; + return keys %$fin ? $fin : (); } sub _collapse_cond_unroll_pairs { diff --git a/t/search/stack_cond.t b/t/search/stack_cond.t index a68f69226..d43f274c9 100644 --- a/t/search/stack_cond.t +++ b/t/search/stack_cond.t @@ -35,8 +35,10 @@ for my $c ( my $bare_cond = is_literal_value($c->{cond}) ? { '=', $c->{cond} } : $c->{cond}; my @query_steps = ( - # this is a monkey-wrench, always there + # these are monkey-wrenches, always there { title => { '!=', [ -and => \'bar' ] }, year => { '!=', [ -and => 'bar' ] } }, + { -or => [ genreid => undef, genreid => { '!=' => \42 } ] }, + { -or => [ genreid => undef, genreid => { '!=' => \42 } ] }, { title => $bare_cond, year => { '=', $c->{cond} } }, { -and => [ year => $bare_cond, { title => { '=', $c->{cond} } } ] }, @@ -69,7 +71,18 @@ for my $c ( "( SELECT me.title FROM cd me - WHERE title != bar AND title $c->{sql} AND year != ? AND year $c->{sql} + WHERE + ( genreid != 42 OR genreid IS NULL ) + AND + ( genreid != 42 OR genreid IS NULL ) + AND + title != bar + AND + title $c->{sql} + AND + year != ? + AND + year $c->{sql} )", \@bind, 'Double condition correctly collapsed for steps' . Dumper \@query_steps, diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 1ad550ad4..ced331f48 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -117,6 +117,105 @@ for my $t ( rank => undef, }, }, + (map { { + where => $_, + sql => 'WHERE (rank = 13 OR charfield IS NULL OR artistid = ?) AND (artistid = ? OR charfield IS NULL OR rank != 42)', + collapsed_sql => 'WHERE (artistid = ? OR charfield IS NULL OR rank = 13) AND (artistid = ? OR charfield IS NULL OR rank != 42)', + cc_result => { -and => [ + { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] }, + { -or => [ artistid => 1, charfield => undef, rank => { '!=' => \42 } ] }, + ] }, + efcc_result => {}, + efcc_n_result => {}, + } } ( + { -and => [ + -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], + -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } }, + ] }, + + { + -OR => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], + -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } }, + }, + ) ), + { + where => { -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => { '=' => 1 }, genreid => { '=' => \['?', 2] } ] }, + sql => 'WHERE rank = 13 OR charfield IS NULL OR artistid = ? OR genreid = ?', + collapsed_sql => 'WHERE artistid = ? OR charfield IS NULL OR genreid = ? OR rank = 13', + cc_result => { -or => [ artistid => 1, charfield => undef, genreid => { '=' => \['?', 2] }, rank => { '=' => \13 } ] }, + efcc_result => {}, + efcc_n_result => {}, + }, + { + where => { -and => [ + -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], + -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '=' => \13 } }, + ] }, + cc_result => { -and => [ + { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] }, + { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] }, + ] }, + sql => 'WHERE (rank = 13 OR charfield IS NULL OR artistid = ?) AND (artistid = ? OR charfield IS NULL OR rank = 13)', + collapsed_sql => 'WHERE (artistid = ? OR charfield IS NULL OR rank = 13) AND (artistid = ? OR charfield IS NULL OR rank = 13)', + efcc_result => {}, + efcc_n_result => {}, + }, + { + where => { -and => [ + -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], + -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } }, + -and => [ foo => { '=' => \1 }, bar => 2 ], + -and => [ foo => 3, bar => { '=' => \4 } ], + -exists => \'(SELECT 1)', + -exists => \'(SELECT 2)', + -not => { foo => 69 }, + -not => { foo => 42 }, + ]}, + sql => 'WHERE + ( rank = 13 OR charfield IS NULL OR artistid = ? ) + AND ( artistid = ? OR charfield IS NULL OR rank != 42 ) + AND foo = 1 + AND bar = ? + AND foo = ? + AND bar = 4 + AND (EXISTS (SELECT 1)) + AND (EXISTS (SELECT 2)) + AND NOT foo = ? + AND NOT foo = ? + ', + collapsed_sql => 'WHERE + ( artistid = ? OR charfield IS NULL OR rank = 13 ) + AND ( artistid = ? OR charfield IS NULL OR rank != 42 ) + AND (EXISTS (SELECT 1)) + AND (EXISTS (SELECT 2)) + AND NOT foo = ? + AND NOT foo = ? + AND bar = 4 + AND bar = ? + AND foo = 1 + AND foo = ? + ', + cc_result => { + -and => [ + { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] }, + { -or => [ artistid => 1, charfield => undef, rank => { '!=' => \42 } ] }, + { -exists => \'(SELECT 1)' }, + { -exists => \'(SELECT 2)' }, + { -not => { foo => 69 } }, + { -not => { foo => 42 } }, + ], + foo => [ -and => { '=' => \1 }, 3 ], + bar => [ -and => { '=' => \4 }, 2 ], + }, + efcc_result => { + foo => UNRESOLVABLE_CONDITION, + bar => UNRESOLVABLE_CONDITION, + }, + efcc_n_result => { + foo => UNRESOLVABLE_CONDITION, + bar => UNRESOLVABLE_CONDITION, + }, + }, { where => { -and => [ [ '_macro.to' => { -like => '%correct%' }, '_wc_macros.to' => { -like => '%correct%' } ], From ff7d03e645231bc360b686d7344f618ce8f5ac73 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 4 Sep 2014 12:17:17 +0200 Subject: [PATCH 163/548] Better optional diag in t/sqlmaker/dbihacks_internals.t --- t/sqlmaker/dbihacks_internals.t | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index ced331f48..32ec846d4 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -8,6 +8,12 @@ use DBICTest ':DiffSQL'; use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; use Data::Dumper; +BEGIN { + if ( eval { require Test::Differences } ) { + no warnings 'redefine'; + *is_deeply = \&Test::Differences::eq_or_diff; + } +} my $schema = DBICTest->init_schema( no_deploy => 1); my $sm = $schema->storage->sql_maker; @@ -128,6 +134,7 @@ for my $t ( efcc_result => {}, efcc_n_result => {}, } } ( + { -and => [ -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } }, @@ -137,6 +144,7 @@ for my $t ( -OR => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ], -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } }, }, + ) ), { where => { -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => { '=' => 1 }, genreid => { '=' => \['?', 2] } ] }, @@ -390,6 +398,8 @@ for my $t ( $t->{efcc_n_result}, "Expected fixed_condition including NULLs produced on $name", ) if $t->{efcc_n_result}; + + die unless Test::Builder->new->is_passing; } } From 95da0f23897e2dc2292462546c06ff604bebeefd Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 4 Sep 2014 12:19:48 +0200 Subject: [PATCH 164/548] One more fail-case missed in 135ac69dd Now the test attached to RT#98161 actually passes... le sigh At this point I am very wary of the entire codepath: yes, it is clearly the right thing to do, and the logic is sound, but odd edge cases keep popping up like this... OTOH there is no way to properly do equality inferrence without this entire dance, in other words: rock&hardplace. Hopefully the test suites will shake this out, apologoies to all affected :( --- lib/DBIx/Class/Storage/DBIHacks.pm | 10 ++++++---- t/sqlmaker/dbihacks_internals.t | 10 ++++++++++ 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index da09d12a0..f13be432f 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1086,10 +1086,12 @@ sub _collapse_cond { return unless $fin_idx; $fin = ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { - -or => [ map - { ref $fin_idx->{$_} eq 'HASH' ? %{$fin_idx->{$_}} : $fin_idx->{$_} } - sort keys %$fin_idx - ] + -or => [ map { + # unroll single-element hashes + ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) + ? %{$fin_idx->{$_}} + : $fin_idx->{$_} + } sort keys %$fin_idx ] }; } else { diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index 32ec846d4..a225cdcf6 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -146,6 +146,16 @@ for my $t ( }, ) ), + { + where => { -or => [ -and => [ foo => { '!=', undef }, bar => { -in => [ 69, 42 ] } ], foo => { '=', { -value => undef } } ] }, + sql => 'WHERE ( foo IS NOT NULL AND bar IN ( ?, ? ) ) OR foo IS NULL', + collapsed_sql => 'WHERE foo IS NULL OR ( bar IN ( ?, ? ) AND foo IS NOT NULL )', + cc_result => { -or => [ + foo => undef, + { bar => { -in => [ 69, 42 ] }, foo => { '!=', undef } } + ] }, + efcc_result => {}, + }, { where => { -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => { '=' => 1 }, genreid => { '=' => \['?', 2] } ] }, sql => 'WHERE rank = 13 OR charfield IS NULL OR artistid = ? OR genreid = ?', From c1f3f2e8a79c6a4081c9949fa30da09d18d64d3b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 4 Sep 2014 12:50:11 +0200 Subject: [PATCH 165/548] Add one more spot of unroll --- lib/DBIx/Class/Storage/DBIHacks.pm | 12 ++++++++++++ t/sqlmaker/dbihacks_internals.t | 13 ++++++++++--- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index f13be432f..4c07f5e28 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1223,6 +1223,18 @@ sub _collapse_cond_unroll_pairs { push @conds, { $lhs => $rhs }; } } + # unroll func + { -value => ... } + elsif ( + ref $rhs eq 'HASH' + and + ( my ($subop) = keys %$rhs ) == 1 + and + length ref ((values %$rhs)[0]) + and + my $vref = is_plain_value( (values %$rhs)[0] ) + ) { + push @conds, { $lhs => { $subop => @$vref } } + } else { push @conds, { $lhs => $rhs }; } diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index a225cdcf6..b635d6fb5 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -147,10 +147,17 @@ for my $t ( ) ), { - where => { -or => [ -and => [ foo => { '!=', undef }, bar => { -in => [ 69, 42 ] } ], foo => { '=', { -value => undef } } ] }, - sql => 'WHERE ( foo IS NOT NULL AND bar IN ( ?, ? ) ) OR foo IS NULL', - collapsed_sql => 'WHERE foo IS NULL OR ( bar IN ( ?, ? ) AND foo IS NOT NULL )', + where => { -or => [ + -and => [ foo => { '!=', { -value => undef } }, bar => { -in => [ 69, 42 ] } ], + foo => { '=', { -value => undef } }, + baz => { '!=' => { -ident => 'bozz' } }, + baz => { -ident => 'buzz' }, + ] }, + sql => 'WHERE ( foo IS NOT NULL AND bar IN ( ?, ? ) ) OR foo IS NULL OR baz != bozz OR baz = buzz', + collapsed_sql => 'WHERE baz != bozz OR baz = buzz OR foo IS NULL OR ( bar IN ( ?, ? ) AND foo IS NOT NULL )', cc_result => { -or => [ + baz => { '!=' => { -ident => 'bozz' } }, + baz => { '=' => { -ident => 'buzz' } }, foo => undef, { bar => { -in => [ 69, 42 ] }, foo => { '!=', undef } } ] }, From 8d54fafa1ac137ce9b59beca9780b8feebce55dd Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 4 Sep 2014 14:28:25 +0200 Subject: [PATCH 166/548] Better describe the _resolve_relationship_condition API --- lib/DBIx/Class/ResultSource.pm | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 996beff54..682054262 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1833,20 +1833,23 @@ Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); # metadata # ## self-explanatory API, modeled on the custom cond coderef: -# rel_name -# foreign_alias -# foreign_values -# self_alias -# self_result_object -# require_join_free_condition -# infer_values_based_on (either not supplied or a hashref, implies require_join_free_condition) -# condition (optional, derived from $self->rel_info(rel_name)) +# rel_name => (scalar) +# foreign_alias => (scalar) +# foreign_values => (either not supplied or a hashref) +# self_alias => (scalar) +# self_result_object => (either not supplied or a result object) +# require_join_free_condition => (boolean, throws on failure to construct a JF-cond) +# infer_values_based_on => (either not supplied or a hashref, implies require_join_free_condition) +# condition => (sqla cond struct, optional, defeaults to from $self->rel_info(rel_name)->{cond}) # ## returns a hash -# condition -# identity_map -# join_free_condition (maybe unset) -# inferred_values (always either complete or unset) +# condition => (a valid *likely fully qualified* sqla cond structure) +# identity_map => (a hashref of foreign-to-self *unqualified* column equality names) +# join_free_condition => (a valid *fully qualified* sqla cond structure, maybe unset) +# inferred_values => (in case of an available join_free condition, this is a hashref of +# *unqualified* column/value *EQUALITY* pairs, representing an amalgamation +# of the JF-cond parse and infer_values_based_on +# always either complete or unset) # sub _resolve_relationship_condition { my $self = shift; From cab1f708913e06219cda8a1f48a044619014c2ec Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 4 Sep 2014 17:53:32 +0200 Subject: [PATCH 167/548] Changelog for RT#98161 Breakage introduced in 8d005ad9, fixed for good by a combination of 5268b1da 135ac69d and 95da0f23 --- Changes | 2 ++ lib/DBIx/Class.pm | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Changes b/Changes index e73b27f9f..eace9ac9f 100644 --- a/Changes +++ b/Changes @@ -58,6 +58,8 @@ Revision history for DBIx::Class blind new) (GH#51) * Misc + - A bunch of fixes for the botched condition parsing in the previous + devrels (082700_01 ~ 082700_05) (RT#98161) - Ensure source metadata calls always take place on the result source instance registered with the caller - IFF DBIC_TRACE output defaults to STDERR we now silence the possible diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 4d4eddab7..f2229f063 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -11,7 +11,7 @@ our $VERSION; # $VERSION declaration must stay up here, ahead of any other package # declarations, as to not confuse various modules attempting to determine # this ones version, whether that be s.c.o. or Module::Metadata, etc -$VERSION = '0.082700_01'; +$VERSION = '0.082700_06'; $VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases From 86370cc7412011ba5638c471c9ab5707ca759265 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 5 Sep 2014 11:16:09 +0200 Subject: [PATCH 168/548] (travis) Sorry metacpan, your mirror is simply shit >:( --- maint/travis-ci_scripts/20_install.bash | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/maint/travis-ci_scripts/20_install.bash b/maint/travis-ci_scripts/20_install.bash index 79e75cd2c..4fda3bd48 100755 --- a/maint/travis-ci_scripts/20_install.bash +++ b/maint/travis-ci_scripts/20_install.bash @@ -7,12 +7,13 @@ CPAN_MIRROR=$(echo "$PERL_CPANM_OPT" | grep -oP -- '--mirror\s+\S+' | head -n 1 if ! [[ "$CPAN_MIRROR" =~ "http://" ]] ; then echo_err "Unable to extract primary cpan mirror from PERL_CPANM_OPT - something is wrong" echo_err "PERL_CPANM_OPT: $PERL_CPANM_OPT" - CPAN_MIRROR="https://cpan.metacpan.org/" + CPAN_MIRROR="http://cpan.shadowcatprojects.net/" PERL_CPANM_OPT="$PERL_CPANM_OPT --mirror $CPAN_MIRROR" echo_err "Using $CPAN_MIRROR for the time being" fi -export PERL_MM_USE_DEFAULT=1 PERL_MM_NONINTERACTIVE=1 PERL_AUTOINSTALL_PREFER_CPAN=1 PERLBREW_CPAN_MIRROR="$CPAN_MIRROR" HARNESS_TIMER=1 MAKEFLAGS="-j$NUMTHREADS" +# do not set PERLBREW_CPAN_MIRROR - the canonical backpan.perl.org does not have the perl tarballs +export PERL_MM_USE_DEFAULT=1 PERL_MM_NONINTERACTIVE=1 PERL_AUTOINSTALL_PREFER_CPAN=1 HARNESS_TIMER=1 MAKEFLAGS="-j$NUMTHREADS" # try CPAN's latest offering if requested if [[ "$DEVREL_DEPS" == "true" ]] ; then From e65228c03ac1a399f32a9c527d7036353360ee91 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 5 Sep 2014 00:59:01 +0200 Subject: [PATCH 169/548] Make sure the resolve_cond shim attempts to lint the right thing This only matters in compat mode, so sod testing --- lib/DBIx/Class/ResultSource.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 682054262..bf2a0d43a 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1809,7 +1809,7 @@ sub _resolve_condition { # _resolve_relationship_condition always returns qualified cols even in the # case of join_free_condition, but nothing downstream expects this - if (ref $res[0] eq 'HASH' and ($is_objlike[0] or $is_objlike[1]) ) { + if ($rc->{join_free_condition} and ref $res[0] eq 'HASH') { $res[0] = { map { ($_ =~ /\.(.+)/) => $res[0]{$_} } keys %{$res[0]} From 4d93345c2f06a03076dcb43cb256de5c973c203b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 5 Sep 2014 10:33:40 +0200 Subject: [PATCH 170/548] Make sure IO::Handle is loaded - missing stubs on older perls Also adjust the documentation in the wake of the e9f71ab2a change --- lib/DBIx/Class/Storage.pm | 8 ++++---- lib/DBIx/Class/Storage/Statistics.pm | 11 +++++++---- t/storage/replicated.t | 1 - 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/lib/DBIx/Class/Storage.pm b/lib/DBIx/Class/Storage.pm index ad1770eb9..572f87ed2 100644 --- a/lib/DBIx/Class/Storage.pm +++ b/lib/DBIx/Class/Storage.pm @@ -436,10 +436,10 @@ shell environment. =head2 debugfh -Set or retrieve the filehandle used for trace/debug output. This should be -an IO::Handle compatible object (only the C method is used). Initially -set to be STDERR - although see information on the -L environment variable. +An opportunistic proxy to L<< ->debugobj->debugfh(@_) +|DBIx::Class::Storage::Statistics/debugfh >> +If the currently set L does not have a L method, caling +this is a no-op. =cut diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index e241ad429..4894cf7c2 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -15,6 +15,7 @@ BEGIN { extends 'DBIx::Class'; use DBIx::Class::_Util qw(sigwarn_silencer qsub); +use IO::Handle (); use namespace::clean; =head1 NAME @@ -42,11 +43,13 @@ Returns a new L object. =head2 debugfh Sets or retrieves the filehandle used for trace/debug output. This should -be an IO::Handle compatible object (only the C method is used). Initially -should be set to STDERR - although see information on the -L environment variable. +be an L compatible object (only the +L<< printflush|IO::Handle/$io->printflush_(_ARGS_) >> method is used). By +default it is initially set to STDERR - although see discussion of the +L environment variable. -As getter it will lazily open a filehandle for you if one is not already set. +Invoked as a getter it will lazily open a filehandle for you if one is not +already set. =cut diff --git a/t/storage/replicated.t b/t/storage/replicated.t index 509b3e688..b735e0fa6 100644 --- a/t/storage/replicated.t +++ b/t/storage/replicated.t @@ -24,7 +24,6 @@ use Test::Exception; use List::Util 'first'; use Scalar::Util 'reftype'; use File::Spec; -use IO::Handle; use Moose(); use MooseX::Types(); note "Using Moose version $Moose::VERSION and MooseX::Types version $MooseX::Types::VERSION"; From 2f077f920ee12a9d4decc8a6653517f2a276151b Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 5 Sep 2014 11:05:46 +0200 Subject: [PATCH 171/548] Move and simplify a chunk of parser code No functional changes --- lib/SQL/Translator/Parser/DBIx/Class.pm | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 4fd03f2ec..5a846c035 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -186,9 +186,6 @@ sub parse { # support quoting properly to be signaled about this $rel_table = $$rel_table if ref $rel_table eq 'SCALAR'; - my $reverse_rels = $source->reverse_relationship_info($rel); - my ($otherrelname, $otherrelationship) = each %{$reverse_rels}; - # Force the order of @cond to match the order of ->add_columns my $idx; my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns; @@ -217,6 +214,8 @@ sub parse { $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary); } + my ($otherrelname, $otherrelationship) = %{ $source->reverse_relationship_info($rel) }; + my $cascade; for my $c (qw/delete update/) { if (exists $rel_info->{attrs}{"on_$c"}) { From e089c417ce04bb60a7d0644cb858f83049e64684 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 5 Sep 2014 11:14:36 +0200 Subject: [PATCH 172/548] Skip malformed relationships in SQLT parser instead of vague warnings --- lib/SQL/Translator/Parser/DBIx/Class.pm | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 5a846c035..c84f73ee9 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -163,8 +163,8 @@ sub parse { # global add_fk_index set in parser_args my $add_fk_index = (exists $args->{add_fk_index} && ! $args->{add_fk_index}) ? 0 : 1; - foreach my $rel (sort @rels) - { + REL: + foreach my $rel (sort @rels) { my $rel_info = $source->relationship_info($rel); @@ -189,7 +189,15 @@ sub parse { # Force the order of @cond to match the order of ->add_columns my $idx; my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns; - my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); + + for ( keys %{$rel_info->{cond}} ) { + unless (exists $other_columns_idx{$_}) { + carp "Ignoring relationship '$rel' - related resultsource does not contain one of the specified columns: '$_'\n"; + next REL; + } + } + + my @cond = sort { $other_columns_idx{$a} <=> $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); # Get the key information, mapping off the foreign/self markers my @refkeys = map {/^\w+\.(\w+)$/} @cond; From 9b96f3d4188ca4b01f8df6fbfc479755e6f9a46a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 7 Sep 2014 13:04:04 +0200 Subject: [PATCH 173/548] Minor relcond resolver refactor, no func. changes --- lib/DBIx/Class/ResultSource.pm | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index bf2a0d43a..0636f2ede 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1881,9 +1881,6 @@ sub _resolve_relationship_condition { $args->{condition} ||= $rel_info->{cond}; -# TEMP -# my $rel_rsrc = $self->related_source($args->{rel_name}); - if (exists $args->{self_result_object}) { $self->throw_exception( "Argument 'self_result_object' must be an object of class '@{[ $self->result_class ]}'" ) unless defined blessed $args->{self_result_object}; @@ -1892,6 +1889,8 @@ sub _resolve_relationship_condition { unless $args->{self_result_object}->isa($self->result_class); } + my $rel_rsrc = $self->related_source($args->{rel_name}); + if (exists $args->{foreign_values}) { if (defined blessed $args->{foreign_values}) { $self->throw_exception( "Object supplied as 'foreign_values' ($args->{foreign_values}) must be of class '$rel_info->{class}'" ) @@ -1900,8 +1899,6 @@ sub _resolve_relationship_condition { $args->{foreign_values} = { $args->{foreign_values}->get_columns }; } elsif (! defined $args->{foreign_values} or ref $args->{foreign_values} eq 'HASH') { - # TEMP - my $rel_rsrc = $self->related_source($args->{rel_name}); my $ci = $rel_rsrc->columns_info; ! exists $ci->{$_} and $self->throw_exception( "Key '$_' supplied as 'foreign_values' is not a column on related source '@{[ $rel_rsrc->source_name ]}'" @@ -1947,8 +1944,6 @@ sub _resolve_relationship_condition { my ($joinfree_alias, $joinfree_source); if (defined $args->{self_result_object}) { - # TEMP - my $rel_rsrc = $self->related_source($args->{rel_name}); $joinfree_alias = $args->{foreign_alias}; $joinfree_source = $rel_rsrc; } @@ -2122,8 +2117,6 @@ sub _resolve_relationship_condition { # there is no way to know who is right and who is left in a cref # therefore a full blown resolution call - # TEMP - my $rel_rsrc = $self->related_source($args->{rel_name}); $colinfos ||= $storage->_resolve_column_info([ { -alias => $args->{self_alias}, -rsrc => $self }, { -alias => $args->{foreign_alias}, -rsrc => $rel_rsrc }, From d758a250031618a2b0f50aaf5edf8dfb81efd918 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 7 Sep 2014 13:11:30 +0200 Subject: [PATCH 174/548] Simplify self_result_object argument check in cond resolver --- lib/DBIx/Class/ResultSource.pm | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 0636f2ede..d04687542 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1881,13 +1881,13 @@ sub _resolve_relationship_condition { $args->{condition} ||= $rel_info->{cond}; - if (exists $args->{self_result_object}) { - $self->throw_exception( "Argument 'self_result_object' must be an object of class '@{[ $self->result_class ]}'" ) - unless defined blessed $args->{self_result_object}; - - $self->throw_exception( "Object '$args->{self_result_object}' must be of class '@{[ $self->result_class ]}'" ) - unless $args->{self_result_object}->isa($self->result_class); - } + $self->throw_exception( "Argument 'self_result_object' must be an object of class '@{[ $self->result_class ]}'" ) + if ( + exists $args->{self_result_object} + and + ( ! defined blessed $args->{self_result_object} or ! $args->{self_result_object}->isa($self->result_class) ) + ) + ; my $rel_rsrc = $self->related_source($args->{rel_name}); From 7e5a0e7c25474567b7f0b0daadba3f9b07297073 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 7 Sep 2014 13:20:18 +0200 Subject: [PATCH 175/548] No longer use rel_info($rel)->{class} in the cond resolver It turns out there are a lot of codebases there containing garbage in the rel definition. Punt for after 0.082800 to lean up that mess. Also be less strict on checking the foreign_values contents - downgrade mismatches to a warning (but still hard-require ::Row ancestry) --- lib/DBIx/Class/ResultSource.pm | 18 ++++++++++++++---- t/cdbi/06-hasa.t | 8 +++++--- t/cdbi/18-has_a.t | 21 ++++++++++++--------- 3 files changed, 31 insertions(+), 16 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index d04687542..6baff16e9 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1835,7 +1835,7 @@ Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1); ## self-explanatory API, modeled on the custom cond coderef: # rel_name => (scalar) # foreign_alias => (scalar) -# foreign_values => (either not supplied or a hashref) +# foreign_values => (either not supplied, or a hashref, or a foreign ResultObject (to be ->get_columns()ed), or plain undef ) # self_alias => (scalar) # self_result_object => (either not supplied or a result object) # require_join_free_condition => (boolean, throws on failure to construct a JF-cond) @@ -1893,8 +1893,15 @@ sub _resolve_relationship_condition { if (exists $args->{foreign_values}) { if (defined blessed $args->{foreign_values}) { - $self->throw_exception( "Object supplied as 'foreign_values' ($args->{foreign_values}) must be of class '$rel_info->{class}'" ) - unless $args->{foreign_values}->isa($rel_info->{class}); + + $self->throw_exception( "Objects supplied as 'foreign_values' ($args->{foreign_values}) must inherit from DBIx::Class::Row" ) + unless $args->{foreign_values}->isa('DBIx::Class::Row'); + + carp_unique( + "Objects supplied as 'foreign_values' ($args->{foreign_values}) " + . "usually should inherit from the related ResultClass ('@{[ $rel_rsrc->result_class ]}'), " + . "perhaps you've made a mistake invoking the condition resolver?" + ) unless $args->{foreign_values}->isa($rel_rsrc->result_class); $args->{foreign_values} = { $args->{foreign_values}->get_columns }; } @@ -1905,7 +1912,10 @@ sub _resolve_relationship_condition { ) for keys %{ $args->{foreign_values} ||= {} }; } else { - $self->throw_exception( "Argument 'foreign_values' must be either an object inheriting from '$rel_info->{class}' or a hash reference or undef" ); + $self->throw_exception( + "Argument 'foreign_values' must be either an object inheriting from '@{[ $rel_rsrc->result_class ]}', " + . "or a hash reference, or undef" + ); } } diff --git a/t/cdbi/06-hasa.t b/t/cdbi/06-hasa.t index 255383be6..02933cd56 100644 --- a/t/cdbi/06-hasa.t +++ b/t/cdbi/06-hasa.t @@ -1,6 +1,8 @@ use strict; use warnings; use Test::More; +use Test::Exception; +use DBIx::Class::_Util 'sigwarn_silencer'; @YA::Film::ISA = 'Film'; @@ -105,7 +107,8 @@ sub taste_bad { sub fail_with_bad_object { my ($dir, $codir) = @_; - eval { + throws_ok { + local $SIG{__WARN__} = sigwarn_silencer( qr/\Qusually should inherit from the related ResultClass ('Director')/ ); YA::Film->create( { Title => 'Tastes Bad', @@ -115,8 +118,7 @@ sub fail_with_bad_object { NumExplodingSheep => 23 } ); - }; - ok $@, $@; + } qr/isn't a Director/; } package Foo; diff --git a/t/cdbi/18-has_a.t b/t/cdbi/18-has_a.t index 1dacd6c8a..e1deb0908 100644 --- a/t/cdbi/18-has_a.t +++ b/t/cdbi/18-has_a.t @@ -1,6 +1,8 @@ use strict; use warnings; use Test::More; +use Test::Exception; +use DBIx::Class::_Util 'sigwarn_silencer'; use lib 't/cdbi/testlib'; use Film; @@ -45,8 +47,8 @@ my $sj = Director->create({ }); { - eval { $btaste->Director($btaste) }; - like $@, qr/Director/, "Can't set film as director"; + throws_ok { $btaste->Director($btaste) } + qr/isn't a Director/, "Can't set film as director"; is $btaste->Director->id, $pj->id, "PJ still the director"; # drop from cache so that next retrieve() is from db @@ -69,8 +71,7 @@ my $sj = Director->create({ is $sj->id, 'Skippy Jackson', 'Create new director - Skippy'; Film->has_a('CoDirector' => 'Director'); { - eval { $btaste->CoDirector("Skippy Jackson") }; - is $@, "", "Auto inflates"; + lives_ok { $btaste->CoDirector("Skippy Jackson") }; isa_ok $btaste->CoDirector, "Director"; is $btaste->CoDirector->id, $sj->id, "To skippy"; } @@ -96,7 +97,8 @@ is( $pj = Director->retrieve('Peter Jackson'); my $fail; - eval { + throws_ok { + local $SIG{__WARN__} = sigwarn_silencer( qr/\Qusually should inherit from the related ResultClass ('Director')/ ); $fail = YA::Film->create({ Title => 'Tastes Bad', Director => $sj, @@ -104,8 +106,7 @@ is( Rating => 'R', NumExplodingSheep => 23 }); - }; - ok $@, "Can't have film as codirector: $@"; + } qr/isn't a Director/, "Can't have film as codirector"; is $fail, undef, "We didn't get anything"; my $tastes_bad = YA::Film->create({ @@ -226,8 +227,10 @@ SKIP: { } { # Broken has_a declaration - eval { Film->has_a(driector => "Director") }; - like $@, qr/driector/, "Sensible error from has_a with incorrect column: $@"; + throws_ok{ Film->has_a(driector => "Director") } + qr/No such column driector/, + "Sensible error from has_a with incorrect column" + ; } done_testing; From 2a7875330aa2f812eaa325ce69f0fa1c860771ac Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 8 Sep 2014 11:14:29 +0200 Subject: [PATCH 176/548] Fix for gut-reaching travesty as reported by gbjk --- lib/DBIx/Class/ResultSource.pm | 2 +- t/row/filter_column.t | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 6baff16e9..4550af8a9 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1580,7 +1580,7 @@ sub _minimal_valueset_satisfying_constraint { $cols->{fc}{$col} = 1 if ( ! ( $cols->{missing} || {})->{$col} and - $args->{columns_info}{$col}{_filter_info} + keys %{ $args->{columns_info}{$col}{_filter_info} || {} } ); } diff --git a/t/row/filter_column.t b/t/row/filter_column.t index 10ac1a4d7..7823fa53e 100644 --- a/t/row/filter_column.t +++ b/t/row/filter_column.t @@ -260,6 +260,18 @@ throws_ok { DBICTest::Schema::Artist->filter_column( charfield => {} ) } ; FC_ON_PK_TEST: { + # there are cases in the wild that autovivify stuff deep in the + # colinfo guts. While this is insane, there is no alternative + # so at leats make sure it keeps working... + + $schema->source('Artist')->column_info('artistid')->{_filter_info} ||= {}; + + for my $key ('', 'primary') { + lives_ok { + $schema->resultset('Artist')->find_or_create({ artistid => 42 }, { $key ? ( key => $key ) : () }); + }; + } + DBICTest::Schema::Artist->filter_column(artistid => { filter_to_storage => sub { $_[1] * 100 }, From bb96193692f7c0f53de7fe15e157492b0af51988 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 8 Sep 2014 13:06:59 +0200 Subject: [PATCH 177/548] Stop using Sys::SigAction in pg tests --- lib/DBIx/Class/Optional/Dependencies.pm | 1 - t/72pg.t | 30 ++++++++++++++++--------- xt/optional_deps.t | 1 - 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/lib/DBIx/Class/Optional/Dependencies.pm b/lib/DBIx/Class/Optional/Dependencies.pm index 28061598d..76229c237 100644 --- a/lib/DBIx/Class/Optional/Dependencies.pm +++ b/lib/DBIx/Class/Optional/Dependencies.pm @@ -453,7 +453,6 @@ my $reqs = { ? ( # when changing this list make sure to adjust xt/optional_deps.t %$rdbms_pg, - ($^O ne 'MSWin32' ? ('Sys::SigAction' => '0') : ()), 'DBD::Pg' => '2.009002', ) : () }, diff --git a/t/72pg.t b/t/72pg.t index 8e4b1420c..1e7ed0a48 100644 --- a/t/72pg.t +++ b/t/72pg.t @@ -4,10 +4,12 @@ use warnings; use Test::More; use Test::Exception; use Sub::Name; +use Config; use DBIx::Class::Optional::Dependencies (); use lib qw(t/lib); use DBICTest; use SQL::Abstract 'is_literal_value'; +use DBIx::Class::_Util 'is_exception'; plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg') unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg'); @@ -339,14 +341,9 @@ my $cds = $artist->cds_unordered->search({ lives_ok { $cds->update({ year => '2010' }) } 'Update on prefetched rs'; ## Test SELECT ... FOR UPDATE - SKIP: { - if(eval { require Sys::SigAction }) { - Sys::SigAction->import( 'set_sig_handler' ); - } - else { - skip "Sys::SigAction is not available", 6; - } + skip "Your system does not support unsafe signals (d_sigaction) - unable to run deadlock test", 1 + unless eval { $Config{d_sigaction} and require POSIX }; my ($timed_out, $artist2); @@ -385,15 +382,28 @@ lives_ok { $cds->update({ year => '2010' }) } 'Update on prefetched rs'; is($artist->artistid, 1, "select returns artistid = 1"); $timed_out = 0; + eval { - my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } ); + # can not use %SIG assignment directly - we need sigaction below + # localization to a block still works however + local $SIG{ALRM}; + + POSIX::sigaction( POSIX::SIGALRM() => POSIX::SigAction->new( + sub { die "DBICTestTimeout" }, + )); + alarm(2); $artist2 = $schema2->resultset('Artist')->find(1); $artist2->name('fooey'); $artist2->update; - alarm(0); }; - $timed_out = $@ =~ /DBICTestTimeout/; + + alarm(0); + + if (is_exception($@)) { + $timed_out = $@ =~ /DBICTestTimeout/ + or die $@; + } }); $t->{test_sub}->(); diff --git a/xt/optional_deps.t b/xt/optional_deps.t index 0ae8023b7..781273c5c 100644 --- a/xt/optional_deps.t +++ b/xt/optional_deps.t @@ -119,7 +119,6 @@ is_deeply( is_deeply( DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_pg'), { - $^O ne 'MSWin32' ? ('Sys::SigAction' => '0') : (), 'DBD::Pg' => '2.009002', }, 'optional dependencies for testing Postgres with ENV var ok'); From 81023d83ad94dc8e6601d7c8aad598673f90ad18 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 10 Sep 2014 01:48:35 +0200 Subject: [PATCH 178/548] Ensure ::Schema::Versioned connects only once by reusing the main connection --- Changes | 2 ++ lib/DBIx/Class.pm | 2 ++ lib/DBIx/Class/Schema/Versioned.pm | 10 ++++++---- t/94versioning.t | 11 +++++++++++ 4 files changed, 21 insertions(+), 4 deletions(-) diff --git a/Changes b/Changes index eace9ac9f..4a830a2cc 100644 --- a/Changes +++ b/Changes @@ -51,6 +51,8 @@ Revision history for DBIx::Class to SQL::Translator (partially RT#87731) - Fix SQLT constraint naming when DBIC table names are fully qualified (PR#48) + - Ensure ::Schema::Versioned connects only once by reusing the main + connection (GH#57) - Fix inability to handle multiple consecutive transactions with savepoints on DBD::SQLite < 1.39 - Fix CDBICompat to match Class::DBI behavior handling non-result diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index f2229f063..88b6bf470 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -416,6 +416,8 @@ jasonmay: Jason May jegade: Jens Gassmann +jeneric: Eric A. Miller + jesper: Jesper Krogh jgoulah: John Goulah diff --git a/lib/DBIx/Class/Schema/Versioned.pm b/lib/DBIx/Class/Schema/Versioned.pm index 95adc66b0..e92f356b9 100644 --- a/lib/DBIx/Class/Schema/Versioned.pm +++ b/lib/DBIx/Class/Schema/Versioned.pm @@ -204,6 +204,7 @@ use base 'DBIx::Class::Schema'; use DBIx::Class::Carp; use Time::HiRes qw/gettimeofday/; use Try::Tiny; +use Scalar::Util 'weaken'; use namespace::clean; __PACKAGE__->mk_classdata('_filedata'); @@ -589,9 +590,10 @@ sub _on_connect { my ($self) = @_; - my $conn_info = $self->storage->connect_info; - $self->{vschema} = DBIx::Class::Version->connect(@$conn_info); - my $conn_attrs = $self->{vschema}->storage->_dbic_connect_attributes || {}; + weaken (my $w_self = $self ); + + $self->{vschema} = DBIx::Class::Version->connect(sub { $w_self->storage->dbh }); + my $conn_attrs = $self->storage->_dbic_connect_attributes || {}; my $vtable = $self->{vschema}->resultset('Table'); @@ -600,7 +602,7 @@ sub _on_connect # check for legacy versions table and move to new if exists unless ($self->_source_exists($vtable)) { - my $vtable_compat = DBIx::Class::VersionCompat->connect(@$conn_info)->resultset('TableCompat'); + my $vtable_compat = DBIx::Class::VersionCompat->connect(sub { $w_self->storage->dbh })->resultset('TableCompat'); if ($self->_source_exists($vtable_compat)) { $self->{vschema}->deploy; map { $vtable->new_result({ installed => $_->Installed, version => $_->Version })->insert } $vtable_compat->all; diff --git a/t/94versioning.t b/t/94versioning.t index 93fcca7fa..a154d8f96 100644 --- a/t/94versioning.t +++ b/t/94versioning.t @@ -285,6 +285,17 @@ my $schema_v3 = DBICVersion::Schema->connect($dsn, $user, $pass, { ignore_versio ok($get_db_version_run == 0, "attributes pulled from list connect_info"); } +# at this point we have v1, v2 and v3 still connected +# make sure they are the only connections and everything else is gone +is + scalar( grep + { defined $_ and $_->{Active} } + map + { @{$_->{ChildHandles}} } + values %{ { DBI->installed_drivers } } + ), 3, "Expected number of connections at end of script" +; + END { unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) { $ddl_dir->rmtree; From 76cc4546dfc51e7d83add9a90af1eb2d5a7d156c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 10 Sep 2014 01:38:43 +0200 Subject: [PATCH 179/548] Fix incorrect cond construction in _minimal_valueset_satisfying_constraint The function in question introduced in d681f1bb (which builds upon work in 8e40a627) correctly uses _extract_fixed_condition_columns, but then fails to account for all literals having their leading { '=' => ... } being stripped, nor does it consider UNRESOLVABLE_CONDITION as a valid return value. Tests and fixes to get this rolling, thanks go to Lianna Eeftinck for testing and reporting \o/ --- lib/DBIx/Class/ResultSource.pm | 18 +++++++-------- t/60core.t | 7 ++++++ t/resultset/find_on_subquery_cond.t | 34 +++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 9 deletions(-) create mode 100644 t/resultset/find_on_subquery_cond.t diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 4550af8a9..fde2ac5ed 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1567,18 +1567,21 @@ sub _minimal_valueset_satisfying_constraint { my $cols; for my $col ($self->unique_constraint_columns($args->{constraint_name}) ) { - if( ! exists $vals->{$col} ) { - $cols->{missing}{$col} = 1; + if( ! exists $vals->{$col} or ( $vals->{$col}||'' ) eq UNRESOLVABLE_CONDITION ) { + $cols->{missing}{$col} = undef; } elsif( ! defined $vals->{$col} ) { - $cols->{$args->{carp_on_nulls} ? 'undefined' : 'missing'}{$col} = 1; + $cols->{$args->{carp_on_nulls} ? 'undefined' : 'missing'}{$col} = undef; } else { - $cols->{present}{$col} = 1; + # we need to inject back the '=' as _extract_fixed_condition_columns + # will strip it from literals and values alike, resulting in an invalid + # condition in the end + $cols->{present}{$col} = { '=' => $vals->{$col} }; } $cols->{fc}{$col} = 1 if ( - ! ( $cols->{missing} || {})->{$col} + ( ! $cols->{missing} or ! exists $cols->{missing}{$col} ) and keys %{ $args->{columns_info}{$col}{_filter_info} || {} } ); @@ -1609,10 +1612,7 @@ sub _minimal_valueset_satisfying_constraint { )); } - return { map - { $_ => $vals->{$_} } - ( keys %{$cols->{present}}, keys %{$cols->{undefined}} ) - }; + return { map { %{ $cols->{$_}||{} } } qw(present undefined) }; } # Returns the {from} structure used to express JOIN conditions diff --git a/t/60core.t b/t/60core.t index 62299c322..f92159b29 100644 --- a/t/60core.t +++ b/t/60core.t @@ -130,6 +130,13 @@ throws_ok { is($schema->resultset("Artist")->count, 4, 'count ok'); +# test find on an unresolvable condition +is( + $schema->resultset('Artist')->find({ artistid => [ -and => 1, 2 ]}), + undef +); + + # test find_or_new { my $existing_obj = $schema->resultset('Artist')->find_or_new({ diff --git a/t/resultset/find_on_subquery_cond.t b/t/resultset/find_on_subquery_cond.t new file mode 100644 index 000000000..af2ca51aa --- /dev/null +++ b/t/resultset/find_on_subquery_cond.t @@ -0,0 +1,34 @@ +use strict; +use warnings; + +use Test::More; +use Test::Exception; + +use lib qw(t/lib); +use DBICTest; + +my $schema = DBICTest->init_schema(); +my $rs = $schema->resultset('Artist'); + +for my $id ( + 2, + \' = 2 ', + \[ '= ?', 2 ], +) { + lives_ok { + is( $rs->find({ artistid => $id })->id, 2 ) + } "Correctly found artist with id of @{[ explain $id ]}"; +} + +for my $id ( + 2, + \'2', + \[ '?', 2 ], +) { + my $cond = { artistid => { '=', $id } }; + lives_ok { + is( $rs->find($cond)->id, 2 ) + } "Correctly found artist with id of @{[ explain $cond ]}"; +} + +done_testing; From 8096b12d3edbb78a0040224f84c0b4c17ab6f73a Mon Sep 17 00:00:00 2001 From: Tommy Butler Date: Wed, 10 Sep 2014 03:51:27 -0500 Subject: [PATCH 180/548] Fix syntax error in ::Manual::Joining example fixed syntax error on line 245 (missing closing curly bracket) Closes: #60 --- lib/DBIx/Class/Manual/Joining.pod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/Manual/Joining.pod b/lib/DBIx/Class/Manual/Joining.pod index 625a4d90e..ff02475b9 100644 --- a/lib/DBIx/Class/Manual/Joining.pod +++ b/lib/DBIx/Class/Manual/Joining.pod @@ -242,7 +242,7 @@ To join two relations at the same level, use an arrayref instead: Or combine the two: - join => { room => [ 'chair', { table => 'leg' } ] + join => { room => [ 'chair', { table => 'leg' } ] } =head2 Table aliases From bd15e62e568b5b8574f79bd799e3eb72ffc21757 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 13:58:48 +0200 Subject: [PATCH 181/548] (travis) Stop auto-upgrading everything on non-clean installs It adds little value currently and makes version-specific installs difficult (see next commit) --- maint/travis-ci_scripts/30_before_script.bash | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index 6aadca4b4..7d247c804 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -206,18 +206,7 @@ while (@chunks) { fi else - - # listalldeps is deliberate - will upgrade everything it can find - # we exclude SQLA specifically, since we do not want to pull - # in 1.99_xx on bleadcpan runs - deplist="$(make listalldeps | grep -vP '^(SQL::Abstract)$')" - - # assume MDV on POISON_ENV, do not touch DBI/SQLite - if [[ "$POISON_ENV" = "true" ]] ; then - deplist="$(grep -vP '^(DBI|DBD::SQLite)$' <<< "$deplist")" - fi - - parallel_installdeps_notest "$deplist" + parallel_installdeps_notest "$(make listdeps)" fi echo_err "$(tstamp) Dependency installation finished" From 9ac3347b9d86da4e8d2490d72ec4da05ee346bb4 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 13:42:00 +0200 Subject: [PATCH 182/548] (travis) Accommodate a slow mirror grrrrr --- maint/travis-ci_scripts/30_before_script.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index 7d247c804..8fd295e91 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -107,7 +107,7 @@ else parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant - parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which + parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which Path::Class@0.34 if [[ -n "$DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then # the official version is very much outdated and does not compile on 5.14+ From 5379386ef2b88e002a778e02132b1f58adf31152 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 14 Sep 2014 20:00:40 +0200 Subject: [PATCH 183/548] Adjust for SQLA's API change 966200cc8 --- lib/DBIx/Class/Storage/DBIHacks.pm | 6 +++--- t/search/stack_cond.t | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 4c07f5e28..17fdbfbe4 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1124,8 +1124,8 @@ sub _collapse_cond { for my $col ( grep { $_ !~ /^\-/ } keys %$fin ) { next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') =~ /^\-and$/i; my $val_bag = { map { - (! defined $_ ) ? ( UNDEF => undef ) - : ( ! ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) + (! defined $_ ) ? ( UNDEF => undef ) + : ( ! length ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ ) : ( ( 'SER_' . serialize $_ ) => $_ ) } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] }; @@ -1233,7 +1233,7 @@ sub _collapse_cond_unroll_pairs { and my $vref = is_plain_value( (values %$rhs)[0] ) ) { - push @conds, { $lhs => { $subop => @$vref } } + push @conds, { $lhs => { $subop => $$vref } } } else { push @conds, { $lhs => $rhs }; diff --git a/t/search/stack_cond.t b/t/search/stack_cond.t index d43f274c9..9a0e8062b 100644 --- a/t/search/stack_cond.t +++ b/t/search/stack_cond.t @@ -50,9 +50,9 @@ for my $c ( if (my $v = is_plain_value($c->{cond})) { push @query_steps, - { year => $v->[0] }, - { title => $v->[0] }, - { -and => [ year => $v->[0], title => $v->[0] ] }, + { year => $$v }, + { title => $$v }, + { -and => [ year => $$v, title => $$v ] }, ; } From cc506f8b4fdf305b5f4483c7fdad19bc84ea68c8 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 11:55:53 +0200 Subject: [PATCH 184/548] Fix copy() assuming all columns are native --- Changes | 1 + lib/DBIx/Class/Row.pm | 6 +++--- t/row/copy_with_extra_selection.t | 31 +++++++++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 t/row/copy_with_extra_selection.t diff --git a/Changes b/Changes index 4a830a2cc..56396695e 100644 --- a/Changes +++ b/Changes @@ -45,6 +45,7 @@ Revision history for DBIx::Class - Fix set_column on non-native (+columns) selections (RT#86685) - Fix set_inflated_column incorrectly handling \[] literals (GH#44) - Ensure that setting a column to a literal invariably marks it dirty + - Fix copy() not working correctly with extra selections present - Work around exception objects with broken string overloading in one additional codepath (missed in 0.08260) - Fix more inconsistencies of the quote_names attribute propagating diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index f785773b1..630d2bc84 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -1147,14 +1147,14 @@ is set by default on C relationships and unset on all others. sub copy { my ($self, $changes) = @_; $changes ||= {}; - my $col_data = { %{$self->{_column_data}} }; + my $col_data = { $self->get_columns }; my $rsrc = $self->result_source; - my $colinfo = $rsrc->columns_info([ keys %$col_data ]); + my $colinfo = $rsrc->columns_info; foreach my $col (keys %$col_data) { delete $col_data->{$col} - if $colinfo->{$col}{is_auto_increment}; + if ( ! $colinfo->{$col} or $colinfo->{$col}{is_auto_increment} ); } my $new = { _column_data => $col_data }; diff --git a/t/row/copy_with_extra_selection.t b/t/row/copy_with_extra_selection.t new file mode 100644 index 000000000..c1e3df4d2 --- /dev/null +++ b/t/row/copy_with_extra_selection.t @@ -0,0 +1,31 @@ +use strict; +use warnings; + +use Test::More; + +use lib qw(t/lib); +use DBICTest; + +my $schema = DBICTest->init_schema(); + +my $cd = $schema->resultset('CD')->search({}, { + '+columns' => { avg_year => $schema->resultset('CD')->get_column('year')->func_rs('avg')->as_query }, + order_by => 'cdid', +})->next; + +my $ccd = $cd->copy({ cdid => 5_000_000, artist => 2 }); + +cmp_ok( + $ccd->id, + '!=', + $cd->id, + 'IDs differ' +); + +is( + $ccd->title, + $cd->title, + 'Title same on copied object', +); + +done_testing; From bad0b73b680b041ca8ce677365ca1f6387232e61 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 14 Sep 2014 20:55:55 +0200 Subject: [PATCH 185/548] Make sure the me-relname-flip does not affect any error text --- lib/DBIx/Class/ResultSource.pm | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index fde2ac5ed..57d833eae 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1331,6 +1331,7 @@ sub add_relationship { my %rels = %{ $self->_relationships }; $rels{$rel} = { class => $f_source_name, source => $f_source_name, + _original_name => $rel, cond => $cond, attrs => $attrs }; $self->_relationships(\%rels); @@ -1864,12 +1865,12 @@ sub _resolve_relationship_condition { $self->throw_exception("Arguments 'self_alias' and 'foreign_alias' may not be identical") if $args->{self_alias} eq $args->{foreign_alias}; - my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'"; - my $rel_info = $self->relationship_info($args->{rel_name}) # TEMP # or $self->throw_exception( "No such $exception_rel_id" ); - or carp_unique("Requesting resolution on non-existent $exception_rel_id: fix your code *soon*, as it will break with the next major version"); + or carp_unique("Requesting resolution on non-existent relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}': fix your code *soon*, as it will break with the next major version"); + + my $exception_rel_id = "relationship '$rel_info->{_original_name}' on source '@{[ $self->source_name ]}'"; $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures") if exists $args->{self_result_object} and exists $args->{foreign_values}; From 7df2b5df3d6c48b35c73a9e840d8e8ef395b11f6 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 10:40:39 +0200 Subject: [PATCH 186/548] Yet another missed sanity check in the relcond resolver rel/col duality is just hateful --- lib/DBIx/Class/ResultSource.pm | 19 +++++++++++++++++-- t/lib/DBICTest/Schema/Track.pm | 2 +- t/relationship/custom.t | 6 ++++++ 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 57d833eae..9e8641aa6 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1973,11 +1973,26 @@ sub _resolve_relationship_condition { $joinfree_source->columns }; - $fq_col_list->{$_} or $self->throw_exception ( + exists $fq_col_list->{$_} or $self->throw_exception ( "The join-free condition returned for $exception_rel_id may only " - . 'contain keys that are fully qualified column names of the corresponding source' + . 'contain keys that are fully qualified column names of the corresponding source ' + . "(it returned '$_')" ) for keys %$jfc; + ( + length ref $_ + and + defined blessed($_) + and + $_->isa('DBIx::Class::Row') + and + $self->throw_exception ( + "The join-free condition returned for $exception_rel_id may not " + . 'contain result objects as values - perhaps instead of invoking ' + . '->$something you meant to return ->get_column($something)' + ) + ) for values %$jfc; + } } elsif (ref $args->{condition} eq 'HASH') { diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index 5b0811eeb..c43591a2b 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -67,7 +67,7 @@ sub { }, ! $args->{self_result_object} ? () : { - "$args->{foreign_alias}.cdid" => $args->{self_result_object}->cd + "$args->{foreign_alias}.cdid" => $args->{self_result_object}->get_column('cd') }, ! $args->{foreign_values} ? () : { diff --git a/t/relationship/custom.t b/t/relationship/custom.t index 5bc52d4fe..fb4859797 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -285,6 +285,12 @@ my $cd_single_track = $schema->resultset('CD')->create({ my $single_track = $cd_single_track->tracks->next; +is( + $single_track->cd_cref_cond->title, + $cd_single_track->title, + 'Got back the expected single-track cd title', +); + is_deeply { $schema->resultset('Track')->find({ cd_cref_cond => { cdid => $cd_single_track->id } })->get_columns }, { $single_track->get_columns }, From 139e7991dd0542b926ad9cac8de3711e4c716e13 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 11:39:12 +0200 Subject: [PATCH 187/548] Ensure undef_on_null_fk does not affect non-introspectable custom conds --- lib/DBIx/Class/Relationship/Accessor.pm | 25 ++++++++---- t/lib/DBICTest/Schema/CD.pm | 8 ++++ t/relationship/custom_opaque.t | 51 +++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 8 deletions(-) create mode 100644 t/relationship/custom_opaque.t diff --git a/lib/DBIx/Class/Relationship/Accessor.pm b/lib/DBIx/Class/Relationship/Accessor.pm index aeefa84d7..40deeafa4 100644 --- a/lib/DBIx/Class/Relationship/Accessor.pm +++ b/lib/DBIx/Class/Relationship/Accessor.pm @@ -35,15 +35,24 @@ sub add_relationship_accessor { return $self->{_relationship_data}{%1$s}; } else { - my $rel_info = $self->result_source->relationship_info(%1$s); - my $cond = $self->result_source->_resolve_condition( - $rel_info->{cond}, %1$s, $self, %1$s + my $relcond = $self->result_source->_resolve_relationship_condition( + rel_name => %1$s, + foreign_alias => %1$s, + self_alias => 'me', + self_result_object => $self, ); - if ($rel_info->{attrs}->{undef_on_null_fk}){ - return undef unless ref($cond) eq 'HASH'; - return undef if grep { not defined $_ } values %%$cond; - } - my $val = $self->find_related( %1$s => {} ); + + return undef if ( + $relcond->{join_free_condition} + and + $relcond->{join_free_condition} ne DBIx::Class::_Util::UNRESOLVABLE_CONDITION + and + scalar grep { not defined $_ } values %%{ $relcond->{join_free_condition} || {} } + and + $self->result_source->relationship_info(%1$s)->{attrs}{undef_on_null_fk} + ); + + my $val = $self->search_related( %1$s )->single; return $val unless $val; # $val instead of undef so that null-objects can go through return $self->{_relationship_data}{%1$s} = $val; diff --git a/t/lib/DBICTest/Schema/CD.pm b/t/lib/DBICTest/Schema/CD.pm index 190f11d85..1a0771b76 100644 --- a/t/lib/DBICTest/Schema/CD.pm +++ b/t/lib/DBICTest/Schema/CD.pm @@ -55,6 +55,14 @@ __PACKAGE__->belongs_to( single_track => 'DBICTest::Schema::Track', { join_type => 'left'}, ); +__PACKAGE__->belongs_to( single_track_opaque => 'DBICTest::Schema::Track', + sub { + my $args = &check_customcond_args; + \ " $args->{foreign_alias}.trackid = $args->{self_alias}.single_track "; + }, + { join_type => 'left'}, +); + # add a non-left single relationship for the complex prefetch tests __PACKAGE__->belongs_to( existing_single_track => 'DBICTest::Schema::Track', { 'foreign.trackid' => 'self.single_track' }, diff --git a/t/relationship/custom_opaque.t b/t/relationship/custom_opaque.t new file mode 100644 index 000000000..1139c6aa2 --- /dev/null +++ b/t/relationship/custom_opaque.t @@ -0,0 +1,51 @@ +use strict; +use warnings; + +use Test::More; + +use lib 't/lib'; +use DBICTest; + +my $schema = DBICTest->init_schema( no_populate => 1, quote_names => 1 ); + +$schema->resultset('CD')->create({ + title => 'Equinoxe', + year => 1978, + artist => { name => 'JMJ' }, + genre => { name => 'electro' }, + tracks => [ + { title => 'e1' }, + { title => 'e2' }, + { title => 'e3' }, + ], + single_track => { + title => 'o1', + cd => { + title => 'Oxygene', + year => 1976, + artist => { name => 'JMJ' }, + }, + }, +}); + +my $cd = $schema->resultset('CD')->search({ single_track => { '!=', undef } })->first; + +$schema->is_executed_sql_bind( + sub { is( eval{$cd->single_track_opaque->title}, 'o1', 'Found correct single track' ) }, + [ + [ + 'SELECT "me"."trackid", "me"."cd", "me"."position", "me"."title", "me"."last_updated_on", "me"."last_updated_at" + FROM cd "cd__row" + JOIN "track" "me" + ON me.trackid = cd__row.single_track + WHERE "cd__row"."cdid" = ? + ', + [ + { dbic_colname => "cd__row.cdid", sqlt_datatype => "integer" } + => 2 + ] + ], + ], +); + +done_testing; From 21621fe4565697ead298e6829425dc0e9e5ba816 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 14 Sep 2014 20:41:55 +0200 Subject: [PATCH 188/548] A more robust column equality extractor It is a mistake to reach into $ret->{condition} - it may very well be a non-hash structure. Also replace the silly regexping with the (already regexed out) results of the info resolver --- lib/DBIx/Class/ResultSource.pm | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 9e8641aa6..34f26e9de 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -2139,28 +2139,29 @@ sub _resolve_relationship_condition { for my $lhs (keys %$col_eqs) { next if $col_eqs->{$lhs} eq UNRESOLVABLE_CONDITION; - my ($rhs) = @{ is_literal_value( $ret->{condition}{$lhs} ) || next }; # there is no way to know who is right and who is left in a cref - # therefore a full blown resolution call + # therefore a full blown resolution call, and figure out the + # direction a bit further below $colinfos ||= $storage->_resolve_column_info([ { -alias => $args->{self_alias}, -rsrc => $self }, { -alias => $args->{foreign_alias}, -rsrc => $rel_rsrc }, ]); - my ($l_col, $r_col) = map { $_ =~ / ([^\.]+) $ /x } ($lhs, $rhs); + next unless $colinfos->{$lhs}; # someone is engaging in witchcraft - if ( - $colinfos->{$l_col} - and - $colinfos->{$r_col} - and - $colinfos->{$l_col}{-source_alias} ne $colinfos->{$r_col}{-source_alias} - ) { - ( $colinfos->{$l_col}{-source_alias} eq $args->{self_alias} ) - ? ( $ret->{identity_map}{$l_col} = $r_col ) - : ( $ret->{identity_map}{$r_col} = $l_col ) - ; + if ( my $rhs_ref = is_literal_value( $col_eqs->{$lhs} ) ) { + + if ( + $colinfos->{$rhs_ref->[0]} + and + $colinfos->{$lhs}{-source_alias} ne $colinfos->{$rhs_ref->[0]}{-source_alias} + ) { + ( $colinfos->{$lhs}{-source_alias} eq $args->{self_alias} ) + ? ( $ret->{identity_map}{$colinfos->{$lhs}{-colname}} = $colinfos->{$rhs_ref->[0]}{-colname} ) + : ( $ret->{identity_map}{$colinfos->{$rhs_ref->[0]}{-colname}} = $colinfos->{$lhs}{-colname} ) + ; + } } } } From c200d94979bde5ac74070d3e898927433b0e667c Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 14 Sep 2014 20:59:58 +0200 Subject: [PATCH 189/548] Ensure the custom rel cond resolver does not trigger forgotten compat shim During the rush to get custom rels out the door (this is why rushing fucking sucks), a697fa31 introduced a shortsighted workaround into ::SQLMaker::_from_chunk_to_sql(). This code slipped consequent review and made its way into the codebase... 4 FUCKING YEARS AGO!!! >:( Since it is not known how much stuff relies on the insanity being there (moreover we have tests that rely on it) leave things as is for the time being. The only change is making the cond resolver *completely* oblivious to the "single-element hash" workaround (albeit via a silly hack). In the process exposed that ora-joins module is entirely incapable of understanding non-equality conds... fml See next commits for added warnings, etc. --- lib/DBIx/Class/ResultSource.pm | 20 +++++++++++++++++++- lib/DBIx/Class/SQLMaker/OracleJoins.pm | 13 +++++++++++++ t/lib/DBICTest/Schema/Track.pm | 14 ++++++++++++++ t/relationship/custom.t | 23 +++++++++++++++++++++++ 4 files changed, 69 insertions(+), 1 deletion(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 34f26e9de..4669926ff 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -2163,10 +2163,28 @@ sub _resolve_relationship_condition { ; } } + elsif ( + $col_eqs->{$lhs} =~ /^ ( \Q$args->{self_alias}\E \. .+ ) /x + and + ($colinfos->{$1}||{})->{-result_source} == $rel_rsrc + ) { + my ($lcol, $rcol) = map + { $colinfos->{$_}{-colname} } + ( $lhs, $1 ) + ; + carp_unique( + "The $exception_rel_id specifies equality of column '$lcol' and the " + . "*VALUE* '$rcol' (you did not use the { -ident => ... } operator)" + ); + } } } - $ret + # FIXME - temporary, to fool the idiotic check in SQLMaker::_join_condition + $ret->{condition} = { -and => [ $ret->{condition} ] } + unless $ret->{condition} eq UNRESOLVABLE_CONDITION; + + $ret; } =head2 related_source diff --git a/lib/DBIx/Class/SQLMaker/OracleJoins.pm b/lib/DBIx/Class/SQLMaker/OracleJoins.pm index b95c56e88..44f4b08ab 100644 --- a/lib/DBIx/Class/SQLMaker/OracleJoins.pm +++ b/lib/DBIx/Class/SQLMaker/OracleJoins.pm @@ -80,6 +80,19 @@ sub _recurse_oracle_joins { && $jt !~ /inner/i; } + # FIXME - the code below *UTTERLY* doesn't work with custom conds... sigh + # for the time being do not do any processing with the likes of _collapse_cond + # instead only unroll the -and hack if present + $on = $on->{-and}[0] if ( + ref $on eq 'HASH' + and + keys %$on == 1 + and + ref $on->{-and} eq 'ARRAY' + and + @{$on->{-and}} == 1 + ); + # sadly SQLA treats where($scalar) as literal, so we need to jump some hoops push @where, map { \sprintf ('%s%s = %s%s', ref $_ ? $self->_recurse_where($_) : $self->_quote($_), diff --git a/t/lib/DBICTest/Schema/Track.pm b/t/lib/DBICTest/Schema/Track.pm index c43591a2b..10d49f7b4 100644 --- a/t/lib/DBICTest/Schema/Track.pm +++ b/t/lib/DBICTest/Schema/Track.pm @@ -116,6 +116,20 @@ __PACKAGE__->has_many ( } ); +__PACKAGE__->has_many ( + deliberately_broken_all_cd_tracks => __PACKAGE__, + sub { + # This is for test purposes only. A regular user does not + # need to sanity check the passed-in arguments, this is what + # the tests are for :) + my $args = &check_customcond_args; + + return { + "$args->{foreign_alias}.cd" => "$args->{self_alias}.cd" + }; + } +); + our $hook_cb; sub sqlt_deploy_hook { diff --git a/t/relationship/custom.t b/t/relationship/custom.t index fb4859797..f79b605d9 100644 --- a/t/relationship/custom.t +++ b/t/relationship/custom.t @@ -3,6 +3,7 @@ use warnings; use Test::More; use Test::Exception; +use Test::Warn; use lib qw(t/lib); use DBICTest ':DiffSQL'; @@ -297,6 +298,28 @@ is_deeply 'Proper find with related via coderef cond', ; +warnings_exist { + is_same_sql_bind( + $single_track->deliberately_broken_all_cd_tracks->as_query, + '( + SELECT me.trackid, me.cd, me.position, me.title, me.last_updated_on, me.last_updated_at + FROM track track__row + JOIN track me + ON me.cd = ? + WHERE track__row.trackid = ? + )', + [ + [{ dbic_colname => "me.cd", sqlt_datatype => "integer" } + => "track__row.cd" ], + [{ dbic_colname => "track__row.trackid", sqlt_datatype => "integer" } + => 19 ], + ], + 'Expected nonsensical JOIN cond', + ), +} qr/\Qrelationship 'deliberately_broken_all_cd_tracks' on source 'Track' specifies equality of column 'cd' and the *VALUE* 'cd' (you did not use the { -ident => ... } operator)/, + 'Warning on 99.9999% malformed custom cond' +; + $single_track->set_from_related( cd_cref_cond => undef ); ok $single_track->is_column_changed('cd'); is $single_track->get_column('cd'), undef, 'UNset from related via coderef cond'; From 638cd9500bc5e3f326f8b4ae0153633db2df98ec Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 08:11:55 +0200 Subject: [PATCH 190/548] Split DBIC from SQLMaker test (deprecated in next commit) --- t/76joins.t | 102 +++----------------------------------- t/sqlmaker/legacy_joins.t | 97 ++++++++++++++++++++++++++++++++++++ 2 files changed, 104 insertions(+), 95 deletions(-) create mode 100644 t/sqlmaker/legacy_joins.t diff --git a/t/76joins.t b/t/76joins.t index 66e9fb7d9..d20faeca5 100644 --- a/t/76joins.t +++ b/t/76joins.t @@ -7,103 +7,15 @@ use DBICTest ':DiffSQL'; my $schema = DBICTest->init_schema(); -# test the abstract join => SQL generator -my $sa = DBIx::Class::SQLMaker->new; - -my @j = ( - { child => 'person' }, - [ { father => 'person' }, { 'father.person_id' => 'child.father_id' }, ], - [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ], -); -my $match = 'person child JOIN person father ON ( father.person_id = ' - . 'child.father_id ) JOIN person mother ON ( mother.person_id ' - . '= child.mother_id )' - ; -is_same_sql( - $sa->_recurse_from(@j), - $match, - 'join 1 ok' -); - -my @j2 = ( - { mother => 'person' }, - [ [ { child => 'person' }, - [ { father => 'person' }, - { 'father.person_id' => 'child.father_id' } - ] - ], - { 'mother.person_id' => 'child.mother_id' } - ], -); -$match = 'person mother JOIN (person child JOIN person father ON (' - . ' father.person_id = child.father_id )) ON ( mother.person_id = ' - . 'child.mother_id )' - ; -is_same_sql( - $sa->_recurse_from(@j2), - $match, - 'join 2 ok' -); - - -my @j3 = ( - { child => 'person' }, - [ { father => 'person', -join_type => 'inner' }, { 'father.person_id' => 'child.father_id' }, ], - [ { mother => 'person', -join_type => 'inner' }, { 'mother.person_id' => 'child.mother_id' } ], -); -$match = 'person child INNER JOIN person father ON ( father.person_id = ' - . 'child.father_id ) INNER JOIN person mother ON ( mother.person_id ' - . '= child.mother_id )' - ; - -is_same_sql( - $sa->_recurse_from(@j3), - $match, - 'join 3 (inner join) ok' -); - -my @j4 = ( - { mother => 'person' }, - [ [ { child => 'person', -join_type => 'left' }, - [ { father => 'person', -join_type => 'right' }, - { 'father.person_id' => 'child.father_id' } - ] - ], - { 'mother.person_id' => 'child.mother_id' } - ], -); -$match = 'person mother LEFT JOIN (person child RIGHT JOIN person father ON (' - . ' father.person_id = child.father_id )) ON ( mother.person_id = ' - . 'child.mother_id )' - ; -is_same_sql( - $sa->_recurse_from(@j4), - $match, - 'join 4 (nested joins + join types) ok' -); - -my @j5 = ( - { child => 'person' }, - [ { father => 'person' }, { 'father.person_id' => \'!= child.father_id' }, ], - [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ], -); -$match = 'person child JOIN person father ON ( father.person_id != ' - . 'child.father_id ) JOIN person mother ON ( mother.person_id ' - . '= child.mother_id )' - ; -is_same_sql( - $sa->_recurse_from(@j5), - $match, - 'join 5 (SCALAR reference for ON statement) ok' -); - my $rs = $schema->resultset("CD")->search( { 'year' => 2001, 'artist.name' => 'Caterwauler McCrae' }, - { from => [ { 'me' => 'cd' }, - [ - { artist => 'artist' }, - { 'me.artist' => 'artist.artistid' } - ] ] } + { from => [ + { 'me' => 'cd' }, + [ + { artist => 'artist' }, + { 'me.artist' => { -ident => 'artist.artistid' } }, + ], + ] } ); is( $rs + 0, 1, "Single record in resultset"); diff --git a/t/sqlmaker/legacy_joins.t b/t/sqlmaker/legacy_joins.t new file mode 100644 index 000000000..5d17e9941 --- /dev/null +++ b/t/sqlmaker/legacy_joins.t @@ -0,0 +1,97 @@ +use strict; +use warnings; + +use Test::More; +use lib qw(t/lib); +use DBICTest ':DiffSQL'; + +use DBIx::Class::SQLMaker; +my $sa = DBIx::Class::SQLMaker->new; + +my @j = ( + { child => 'person' }, + [ { father => 'person' }, { 'father.person_id' => 'child.father_id' }, ], + [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ], +); +my $match = 'person child JOIN person father ON ( father.person_id = ' + . 'child.father_id ) JOIN person mother ON ( mother.person_id ' + . '= child.mother_id )' + ; +is_same_sql( + $sa->_recurse_from(@j), + $match, + 'join 1 ok' +); + +my @j2 = ( + { mother => 'person' }, + [ [ { child => 'person' }, + [ { father => 'person' }, + { 'father.person_id' => 'child.father_id' } + ] + ], + { 'mother.person_id' => 'child.mother_id' } + ], +); +$match = 'person mother JOIN (person child JOIN person father ON (' + . ' father.person_id = child.father_id )) ON ( mother.person_id = ' + . 'child.mother_id )' + ; +is_same_sql( + $sa->_recurse_from(@j2), + $match, + 'join 2 ok' +); + +my @j3 = ( + { child => 'person' }, + [ { father => 'person', -join_type => 'inner' }, { 'father.person_id' => 'child.father_id' }, ], + [ { mother => 'person', -join_type => 'inner' }, { 'mother.person_id' => 'child.mother_id' } ], +); +$match = 'person child INNER JOIN person father ON ( father.person_id = ' + . 'child.father_id ) INNER JOIN person mother ON ( mother.person_id ' + . '= child.mother_id )' + ; + +is_same_sql( + $sa->_recurse_from(@j3), + $match, + 'join 3 (inner join) ok' +); + +my @j4 = ( + { mother => 'person' }, + [ [ { child => 'person', -join_type => 'left' }, + [ { father => 'person', -join_type => 'right' }, + { 'father.person_id' => 'child.father_id' } + ] + ], + { 'mother.person_id' => 'child.mother_id' } + ], +); +$match = 'person mother LEFT JOIN (person child RIGHT JOIN person father ON (' + . ' father.person_id = child.father_id )) ON ( mother.person_id = ' + . 'child.mother_id )' + ; +is_same_sql( + $sa->_recurse_from(@j4), + $match, + 'join 4 (nested joins + join types) ok' +); + +my @j5 = ( + { child => 'person' }, + [ { father => 'person' }, { 'father.person_id' => \'!= child.father_id' }, ], + [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ], +); +$match = 'person child JOIN person father ON ( father.person_id != ' + . 'child.father_id ) JOIN person mother ON ( mother.person_id ' + . '= child.mother_id )' + ; +is_same_sql( + $sa->_recurse_from(@j5), + $match, + 'join 5 (SCALAR reference for ON statement) ok' +); + +done_testing; From 1efc866d8235ddd640d956352d59036a1cd3bbd7 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 08:22:24 +0200 Subject: [PATCH 191/548] Put in place deprecation forgotten for several years More info and rationale in the commit msg of c200d949 --- Changes | 2 ++ lib/DBIx/Class/SQLMaker.pm | 8 ++++++-- t/search/subquery.t | 3 +++ t/sqlmaker/core_quoted.t | 6 +++--- t/sqlmaker/legacy_joins.t | 3 +++ t/sqlmaker/msaccess.t | 6 +++--- 6 files changed, 20 insertions(+), 8 deletions(-) diff --git a/Changes b/Changes index 56396695e..e18f607db 100644 --- a/Changes +++ b/Changes @@ -16,6 +16,8 @@ Revision history for DBIx::Class objects that went out of sync with the storage (RT#96499) - CDBICompat::columns() now supports adding columns through supplied Class::DBI::Column instances (GH#52) + - Deprecate { col1 => col2 } expressions in manual from structures + (at some point of time manual from will be deprecated itself) * Fixes - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases diff --git a/lib/DBIx/Class/SQLMaker.pm b/lib/DBIx/Class/SQLMaker.pm index 6213c8b34..791e4fc47 100644 --- a/lib/DBIx/Class/SQLMaker.pm +++ b/lib/DBIx/Class/SQLMaker.pm @@ -452,8 +452,6 @@ sub _join_condition { # Backcompat for the old days when a plain hashref # { 't1.col1' => 't2.col2' } meant ON t1.col1 = t2.col2 - # Once things settle we should start warning here so that - # folks unroll their hacks if ( ref $cond eq 'HASH' and @@ -463,6 +461,12 @@ sub _join_condition { and ! ref ( (values %$cond)[0] ) ) { + carp_unique( + "ResultSet {from} structures with conditions not conforming to the " + . "SQL::Abstract syntax are deprecated: you either need to stop abusing " + . "{from} altogether, or express the condition properly using the " + . "{ -ident => ... } operator" + ); $cond = { keys %$cond => { -ident => values %$cond } } } elsif ( ref $cond eq 'ARRAY' ) { diff --git a/t/search/subquery.t b/t/search/subquery.t index 87195fdc0..8c3fcf777 100644 --- a/t/search/subquery.t +++ b/t/search/subquery.t @@ -6,6 +6,7 @@ use Test::More; use lib qw(t/lib); use DBICTest ':DiffSQL'; use DBIx::Class::SQLMaker::LimitDialects; +use DBIx::Class::_Util 'sigwarn_silencer'; my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; @@ -164,6 +165,8 @@ my @tests = ( for my $i (0 .. $#tests) { my $t = $tests[$i]; for my $p (1, 2) { # repeat everything twice, make sure we do not clobber search arguments + local $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ ); + is_same_sql_bind ( $t->{rs}->search ($t->{search}, $t->{attrs})->as_query, $t->{sqlbind}, diff --git a/t/sqlmaker/core_quoted.t b/t/sqlmaker/core_quoted.t index e90befeb8..8e455660d 100644 --- a/t/sqlmaker/core_quoted.t +++ b/t/sqlmaker/core_quoted.t @@ -24,7 +24,7 @@ my ($sql, @bind) = $sql_maker->select( '-join_type' => '' }, { - 'artist.artistid' => 'me.artist' + 'artist.artistid' => { -ident => 'me.artist' }, } ], [ @@ -33,7 +33,7 @@ my ($sql, @bind) = $sql_maker->select( '-join_type' => 'left' }, { - 'tracks.cd' => 'me.cdid' + 'tracks.cd' => { -ident => 'me.cdid' }, } ], ], @@ -307,7 +307,7 @@ $sql_maker->quote_char([qw/[ ]/]); '-join_type' => '' }, { - 'artist.artistid' => 'me.artist' + 'artist.artistid' => { -ident => 'me.artist' } } ] ], diff --git a/t/sqlmaker/legacy_joins.t b/t/sqlmaker/legacy_joins.t index 5d17e9941..1c93c3596 100644 --- a/t/sqlmaker/legacy_joins.t +++ b/t/sqlmaker/legacy_joins.t @@ -4,10 +4,13 @@ use warnings; use Test::More; use lib qw(t/lib); use DBICTest ':DiffSQL'; +use DBIx::Class::_Util 'sigwarn_silencer'; use DBIx::Class::SQLMaker; my $sa = DBIx::Class::SQLMaker->new; +$SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ ); + my @j = ( { child => 'person' }, [ { father => 'person' }, { 'father.person_id' => 'child.father_id' }, ], diff --git a/t/sqlmaker/msaccess.t b/t/sqlmaker/msaccess.t index 0333cb28c..179b3f31e 100644 --- a/t/sqlmaker/msaccess.t +++ b/t/sqlmaker/msaccess.t @@ -86,7 +86,7 @@ my ($sql, @bind) = $sa->select( { me => "cd" }, [ { "-join_type" => "LEFT", artist => "artist" }, - { "artist.artistid" => "me.artist" }, + { "artist.artistid" => { -ident => "me.artist" } }, ], ], [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ], @@ -104,11 +104,11 @@ is_same_sql_bind( { me => "cd" }, [ { "-join_type" => "LEFT", track => "track" }, - { "track.cd" => "me.cdid" }, + { "track.cd" => { -ident => "me.cdid" } }, ], [ { artist => "artist" }, - { "artist.artistid" => "me.artist" }, + { "artist.artistid" => { -ident => "me.artist" } }, ], ], [ 'track.title', 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ], From 445d17c8b1cb523f0cb245a0bb26715049bc5063 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Mon, 15 Sep 2014 15:32:28 +0200 Subject: [PATCH 192/548] Ensure the tempextlib is available at distbuild time Missed one spot to load ::_Util during bbcc1fe8 --- maint/gen_pod_inherit | 2 ++ 1 file changed, 2 insertions(+) diff --git a/maint/gen_pod_inherit b/maint/gen_pod_inherit index db0f65a7a..e441e88ee 100755 --- a/maint/gen_pod_inherit +++ b/maint/gen_pod_inherit @@ -3,6 +3,8 @@ use warnings; use strict; +use DBIx::Class::_Util; # load early in case any shims are needed + my $lib_dir = 'lib'; my $pod_dir = 'maint/.Generated_Pod'; From 07add744fd8b328dbc83f2a0906aaf6bd0b25674 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 16 Sep 2014 08:01:11 +0200 Subject: [PATCH 193/548] Yet another loose end on the cond collapser Fingercross this is the last one... this transform is turning out ot be a bad bad bad idea :((( --- lib/DBIx/Class/Storage/DBIHacks.pm | 63 +++++++++++++++++---- t/sqlmaker/dbihacks_internals.t | 88 +++++++++++++++++++++++++++--- 2 files changed, 133 insertions(+), 18 deletions(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 17fdbfbe4..4c0da0790 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -1083,20 +1083,57 @@ sub _collapse_cond { } } - return unless $fin_idx; - - $fin = ( keys %$fin_idx == 1 ) ? (values %$fin_idx)[0] : { - -or => [ map { - # unroll single-element hashes - ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) - ? %{$fin_idx->{$_}} - : $fin_idx->{$_} - } sort keys %$fin_idx ] - }; + if (! $fin_idx) { + return; + } + elsif ( keys %$fin_idx == 1 ) { + $fin = (values %$fin_idx)[0]; + } + else { + my @or; + + # at this point everything is at most one level deep - unroll if needed + for (sort keys %$fin_idx) { + if ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) { + my ($l, $r) = %{$fin_idx->{$_}}; + + if ( + ref $r eq 'ARRAY' + and + ( + ( @$r == 1 and $l =~ /^\-and$/i ) + or + $l =~ /^\-or$/i + ) + ) { + push @or, @$r + } + + elsif ( + ref $r eq 'HASH' + and + keys %$r == 1 + and + $l =~ /^\-(?:and|or)$/i + ) { + push @or, %$r; + } + + else { + push @or, $l, $r; + } + } + else { + push @or, $fin_idx->{$_}; + } + } + + $fin->{-or} = \@or; + } } else { # not a hash not an array - $fin = { '' => $where }; + $fin = { -and => [ $where ] }; } # unroll single-element -and's @@ -1118,6 +1155,10 @@ sub _collapse_cond { %$fin, %{$and->[0]} }; } + else { + $fin->{-and} = $and; + last; + } } # compress same-column conds found in $fin diff --git a/t/sqlmaker/dbihacks_internals.t b/t/sqlmaker/dbihacks_internals.t index b635d6fb5..cd229fd5d 100644 --- a/t/sqlmaker/dbihacks_internals.t +++ b/t/sqlmaker/dbihacks_internals.t @@ -81,10 +81,17 @@ for my $t ( }, { where => { -and => [ \'foo=bar', [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'} ] }, - cc_result => { '' => \'foo=bar', name => 'Caterwauler McCrae', artistid => $num }, + cc_result => { -and => [ \'foo=bar' ], name => 'Caterwauler McCrae', artistid => $num }, sql => 'WHERE foo=bar AND artistid = ? AND name = ?', efcc_result => { name => 'Caterwauler McCrae', artistid => $num }, }, + { + where => { -and => [ \'foo=bar', [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'}, \'buzz=bozz' ] }, + cc_result => { -and => [ \'foo=bar', \'buzz=bozz' ], name => 'Caterwauler McCrae', artistid => $num }, + sql => 'WHERE foo=bar AND artistid = ? AND name = ? AND buzz=bozz', + collapsed_sql => 'WHERE foo=bar AND buzz=bozz AND artistid = ? AND name = ?', + efcc_result => { name => 'Caterwauler McCrae', artistid => $num }, + }, { where => { artistid => [ $num ], rank => [ 13, 2, 3 ], charfield => [ undef ] }, cc_result => { artistid => $num, charfield => undef, rank => [13, 2, 3] }, @@ -357,15 +364,82 @@ for my $t ( [ { 'me.title' => 'Spoonful of bees' } ], ]}, cc_result => { - '' => \[ + -and => [ \[ "LOWER(me.title) LIKE ?", '%spoon%', - ], + ]], 'me.title' => 'Spoonful of bees', }, sql => 'WHERE LOWER(me.title) LIKE ? AND me.title = ?', efcc_result => { 'me.title' => 'Spoonful of bees' }, - } + }, + + # crazy literals + { + where => { + -or => [ + \'foo = bar', + ], + }, + sql => 'WHERE foo = bar', + cc_result => { + -and => [ + \'foo = bar', + ], + }, + efcc_result => {}, + }, + { + where => { + -or => [ + \'foo = bar', + \'baz = ber', + ], + }, + sql => 'WHERE foo = bar OR baz = ber', + collapsed_sql => 'WHERE baz = ber OR foo = bar', + cc_result => { + -or => [ + \'baz = ber', + \'foo = bar', + ], + }, + efcc_result => {}, + }, + { + where => { + -and => [ + \'foo = bar', + \'baz = ber', + ], + }, + sql => 'WHERE foo = bar AND baz = ber', + cc_result => { + -and => [ + \'foo = bar', + \'baz = ber', + ], + }, + efcc_result => {}, + }, + { + where => { + -and => [ + \'foo = bar', + \'baz = ber', + x => { -ident => 'y' }, + ], + }, + sql => 'WHERE foo = bar AND baz = ber AND x = y', + cc_result => { + -and => [ + \'foo = bar', + \'baz = ber', + ], + x => { '=' => { -ident => 'y' } } + }, + efcc_result => { x => { -ident => 'y' } }, + }, ) { for my $w ( @@ -390,14 +464,14 @@ for my $t ( is_same_sql ( $generated_sql, $t->{sql}, "Expected SQL from $name" ) if exists $t->{sql}; - my $collapsed_cond = $schema->storage->_collapse_cond($w); - is_same_sql( - ($sm->where($collapsed_cond))[0], + ($sm->where($t->{cc_result}))[0], ( $t->{collapsed_sql} || $t->{sql} || $generated_sql ), "Collapse did not alter *the semantics* of the final SQL based on $name", ); + my $collapsed_cond = $schema->storage->_collapse_cond($w); + is_deeply( $collapsed_cond, $t->{cc_result}, From e084cb2bccea6e55372bb772ab02b7c9804542a1 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 16 Sep 2014 10:04:59 +0200 Subject: [PATCH 194/548] Multilevel find_or_(multi)create got inadvertently broken Relax the check of what we feed to the relcond resolver --- lib/DBIx/Class/ResultSource.pm | 3 +- t/multi_create/find_or_multicreate.t | 71 ++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 t/multi_create/find_or_multicreate.t diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 4669926ff..2d54ec0a3 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1907,8 +1907,9 @@ sub _resolve_relationship_condition { $args->{foreign_values} = { $args->{foreign_values}->get_columns }; } elsif (! defined $args->{foreign_values} or ref $args->{foreign_values} eq 'HASH') { + my $ri = { map { $_ => 1 } $rel_rsrc->relationships }; my $ci = $rel_rsrc->columns_info; - ! exists $ci->{$_} and $self->throw_exception( + ! exists $ci->{$_} and ! exists $ri->{$_} and $self->throw_exception( "Key '$_' supplied as 'foreign_values' is not a column on related source '@{[ $rel_rsrc->source_name ]}'" ) for keys %{ $args->{foreign_values} ||= {} }; } diff --git a/t/multi_create/find_or_multicreate.t b/t/multi_create/find_or_multicreate.t new file mode 100644 index 000000000..762b96275 --- /dev/null +++ b/t/multi_create/find_or_multicreate.t @@ -0,0 +1,71 @@ +use strict; +use warnings; + +use Test::More; +use lib qw(t/lib); +use DBICTest; + +my $schema = DBICTest->init_schema( no_populate => 1 ); + +my $t11 = $schema->resultset('Track')->find_or_create({ + trackid => 1, + title => 'Track one cd one', + cd => { + year => 1, + title => 'CD one', + very_long_artist_relationship => { + name => 'Artist one', + } + } +}); + +my $t12 = $schema->resultset('Track')->find_or_create({ + trackid => 2, + title => 'Track two cd one', + cd => { + title => 'CD one', + very_long_artist_relationship => { + name => 'Artist one', + } + } +}); + +# FIXME - MC should be smart enough to infer this on its own... +$schema->resultset('Artist')->create({ name => 'Artist two' }); + +my $t2 = $schema->resultset('Track')->find_or_create({ + trackid => 3, + title => 'Track one cd one', + cd => { + year => 1, + title => 'CD one', + very_long_artist_relationship => { + name => 'Artist two', + } + } +}); + +is_deeply( + $schema->resultset('Artist')->search({}, { + prefetch => { cds => 'tracks' }, + order_by => 'tracks.title', + })->all_hri, + [ + { artistid => 1, charfield => undef, name => "Artist one", rank => 13, cds => [ + { artist => 1, cdid => 1, genreid => undef, single_track => undef, title => "CD one", year => 1, tracks => [ + { cd => 1, last_updated_at => undef, last_updated_on => undef, position => 1, title => "Track one cd one", trackid => 1 }, + { cd => 1, last_updated_at => undef, last_updated_on => undef, position => 2, title => "Track two cd one", trackid => 2 }, + ]}, + ]}, + { artistid => 2, charfield => undef, name => "Artist two", rank => 13, cds => [ + { artist => 2, cdid => 2, genreid => undef, single_track => undef, title => "CD one", year => 1, tracks => [ + { cd => 2, last_updated_at => undef, last_updated_on => undef, position => 1, title => "Track one cd one", trackid => 3 }, + ]}, + ]}, + ], + 'Expected state of database after several find_or_create rounds' +); + + +done_testing; + From 4f8c967809333fc5378171094166600b2a3a5121 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Tue, 16 Sep 2014 18:24:25 +0200 Subject: [PATCH 195/548] Fix broken temporary backcompat shim for naughty _resolve_condition callers The 9b96f3d41 refactor undid all the backcompat shims that were introduced in c0f445097. Reinstate the insanity until after 28... --- lib/DBIx/Class/ResultSource.pm | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 2d54ec0a3..22ab02b27 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1865,12 +1865,17 @@ sub _resolve_relationship_condition { $self->throw_exception("Arguments 'self_alias' and 'foreign_alias' may not be identical") if $args->{self_alias} eq $args->{foreign_alias}; +# TEMP + my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'"; + my $rel_info = $self->relationship_info($args->{rel_name}) # TEMP # or $self->throw_exception( "No such $exception_rel_id" ); or carp_unique("Requesting resolution on non-existent relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}': fix your code *soon*, as it will break with the next major version"); - my $exception_rel_id = "relationship '$rel_info->{_original_name}' on source '@{[ $self->source_name ]}'"; +# TEMP + $exception_rel_id = "relationship '$rel_info->{_original_name}' on source '@{[ $self->source_name ]}'" + if $rel_info; $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures") if exists $args->{self_result_object} and exists $args->{foreign_values}; @@ -1890,9 +1895,13 @@ sub _resolve_relationship_condition { ) ; - my $rel_rsrc = $self->related_source($args->{rel_name}); +#TEMP + my $rel_rsrc;# = $self->related_source($args->{rel_name}); if (exists $args->{foreign_values}) { +# TEMP + $rel_rsrc ||= $self->related_source($args->{rel_name}); + if (defined blessed $args->{foreign_values}) { $self->throw_exception( "Objects supplied as 'foreign_values' ($args->{foreign_values}) must inherit from DBIx::Class::Row" ) @@ -1954,6 +1963,9 @@ sub _resolve_relationship_condition { "The join-free condition returned for $exception_rel_id must be a hash reference" ) unless ref $jfc eq 'HASH'; +# TEMP + $rel_rsrc ||= $self->related_source($args->{rel_name}); + my ($joinfree_alias, $joinfree_source); if (defined $args->{self_result_object}) { $joinfree_alias = $args->{foreign_alias}; @@ -2141,6 +2153,9 @@ sub _resolve_relationship_condition { next if $col_eqs->{$lhs} eq UNRESOLVABLE_CONDITION; +# TEMP + $rel_rsrc ||= $self->related_source($args->{rel_name}); + # there is no way to know who is right and who is left in a cref # therefore a full blown resolution call, and figure out the # direction a bit further below From 8494142cea239b72298004f762cf500f71650533 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 17 Sep 2014 18:58:29 +0200 Subject: [PATCH 196/548] Revert e9f71ab2 - it ends up breaking a declared API I should have thought of this earlier - a debug object is not necessarily isa(IO::Handle) (the documentation never mentioned this). And indeed: the tests of DBIx::Class::QueryLog do exactly this - they use an object with just a print() method and absolutely nothing else. So instead of disabling the sticky autoflush (which really is only a visible change on STDERR dup) document this behavior and move on. --- lib/DBIx/Class/Storage/Statistics.pm | 12 +++++---- t/storage/debug.t | 38 +++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class/Storage/Statistics.pm b/lib/DBIx/Class/Storage/Statistics.pm index 4894cf7c2..5768db693 100644 --- a/lib/DBIx/Class/Storage/Statistics.pm +++ b/lib/DBIx/Class/Storage/Statistics.pm @@ -15,7 +15,6 @@ BEGIN { extends 'DBIx::Class'; use DBIx::Class::_Util qw(sigwarn_silencer qsub); -use IO::Handle (); use namespace::clean; =head1 NAME @@ -44,12 +43,13 @@ Returns a new L object. Sets or retrieves the filehandle used for trace/debug output. This should be an L compatible object (only the -L<< printflush|IO::Handle/$io->printflush_(_ARGS_) >> method is used). By +L<< print|IO::Handle/METHODS >> method is used). By default it is initially set to STDERR - although see discussion of the L environment variable. -Invoked as a getter it will lazily open a filehandle for you if one is not -already set. +Invoked as a getter it will lazily open a filehandle and set it to +L<< autoflush|perlvar/HANDLE->autoflush( EXPR ) >> (if one is not +already set). =cut @@ -85,6 +85,8 @@ sub _build_debugfh { $_[0]->_defaulted_to_stderr(1); } + $fh->autoflush(1); + $fh; } @@ -109,7 +111,7 @@ sub print { local $SIG{__WARN__} = sigwarn_silencer(qr/^Wide character in print/) if $self->_defaulted_to_stderr; - $fh->printflush($msg); + $fh->print($msg); } =head2 silence diff --git a/t/storage/debug.t b/t/storage/debug.t index f28d4b5f7..e023fff98 100644 --- a/t/storage/debug.t +++ b/t/storage/debug.t @@ -24,6 +24,7 @@ $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new); ok ( $schema->storage->debug(1), 'debug' ); $schema->storage->debugfh($lfn->openw); +$schema->storage->debugfh->autoflush(1); $schema->resultset('CD')->count; my @loglines = $lfn->slurp; @@ -94,7 +95,6 @@ die "How did that fail... $exception" is_deeply(\@warnings, [], 'No warnings with unicode on STDERR'); - # test debugcb and debugobj protocol { my $rs = $schema->resultset('CD')->search( { @@ -136,4 +136,40 @@ is_deeply(\@warnings, [], 'No warnings with unicode on STDERR'); is_deeply ( $do->{_traced_bind}, \@bind_trace ); } +# recreate test as seen in DBIx::Class::QueryLog +# the rationale is that if someone uses a non-IO::Handle object +# on CPAN, many are *bound* to use one on darkpan. Thus this +# test to ensure there is no future silent breakage +{ + my $output = ""; + + { + package DBICTest::_Printable; + + sub print { + my ($self, @args) = @_; + $output .= join('', @args); + } + } + + $schema->storage->debugobj(undef); + $schema->storage->debug(1); + $schema->storage->debugfh( bless {}, "DBICTest::_Printable" ); + $schema->storage->txn_do( sub { $schema->resultset('Artist')->count } ); + + like ( + $output, + qr/ + \A + ^ \QBEGIN WORK\E \s*? + ^ \QSELECT COUNT( * ) FROM artist me:\E \s*? + ^ \QCOMMIT\E \s*? + \z + /xm + ); + + $schema->storage->debug(0); + $schema->storage->debugfh(undef); +} + done_testing; From 777738d07cfd6055a4f40b944a69329c6995df9a Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Wed, 17 Sep 2014 19:36:45 +0200 Subject: [PATCH 197/548] Properly implement very sloppy and lazy hack from bad0b73b We are abusing local() here - might as well abuse it correctly. This way we do not add a permanent shitty name to the relinfo structure and nobody needs to fix their tests. Winning! --- lib/DBIx/Class/Relationship/Base.pm | 7 ++++++- lib/DBIx/Class/ResultSource.pm | 3 +-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index c4d111186..56adec8f7 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -546,7 +546,12 @@ sub related_resultset { # root alias as 'me', instead of $rel (as opposed to invoking # $rs->search_related) - local $rsrc->{_relationships}{me} = $rsrc->{_relationships}{$rel}; # make the fake 'me' rel + # make the fake 'me' rel + local $rsrc->{_relationships}{me} = { + %{ $rsrc->{_relationships}{$rel} }, + _original_name => $rel, + }; + my $obj_table_alias = lc($rsrc->source_name) . '__row'; $obj_table_alias =~ s/\W+/_/g; diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 22ab02b27..de51b5e4a 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -1331,7 +1331,6 @@ sub add_relationship { my %rels = %{ $self->_relationships }; $rels{$rel} = { class => $f_source_name, source => $f_source_name, - _original_name => $rel, cond => $cond, attrs => $attrs }; $self->_relationships(\%rels); @@ -1875,7 +1874,7 @@ sub _resolve_relationship_condition { # TEMP $exception_rel_id = "relationship '$rel_info->{_original_name}' on source '@{[ $self->source_name ]}'" - if $rel_info; + if $rel_info and exists $rel_info->{_original_name}; $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures") if exists $args->{self_result_object} and exists $args->{foreign_values}; From 10e3756737972e77ba1eb1dd00b28bce06543d7e Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Thu, 18 Sep 2014 15:43:30 +0200 Subject: [PATCH 198/548] Improve SQLT::Parser warning messages (build further upon e089c417) --- lib/SQL/Translator/Parser/DBIx/Class.pm | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index c84f73ee9..3af0d0476 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -173,7 +173,7 @@ sub parse { my $relsource = try { $source->related_source($rel) }; unless ($relsource) { - carp "Ignoring relationship '$rel' - related resultsource '$rel_info->{class}' is not registered with this schema\n"; + carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '$rel_info->{class}' is not registered with this schema\n"; next; }; @@ -192,7 +192,7 @@ sub parse { for ( keys %{$rel_info->{cond}} ) { unless (exists $other_columns_idx{$_}) { - carp "Ignoring relationship '$rel' - related resultsource does not contain one of the specified columns: '$_'\n"; + carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '@{[ $relsource->source_name ]}' does not contain one of the specified columns: '$_'\n"; next REL; } } From d16df2398243321f1bd43fcc625d2e14852af0c9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 19 Sep 2014 10:32:10 +0200 Subject: [PATCH 199/548] Properly handle empty group_by/order_by --- lib/DBIx/Class/ResultSet.pm | 20 +++--- t/search/empty_attrs.t | 51 ++++++++++++++ t/sqlmaker/limit_dialects/torture.t | 102 +++++++++++++++++++++++++++- 3 files changed, 162 insertions(+), 11 deletions(-) create mode 100644 t/search/empty_attrs.t diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index 97417fa2d..a699745da 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -3614,18 +3614,18 @@ sub _resolved_attrs { ]; } - if ( defined $attrs->{order_by} ) { - $attrs->{order_by} = ( - ref( $attrs->{order_by} ) eq 'ARRAY' - ? [ @{ $attrs->{order_by} } ] - : [ $attrs->{order_by} || () ] - ); - } + for my $attr (qw(order_by group_by)) { - if ($attrs->{group_by} and ref $attrs->{group_by} ne 'ARRAY') { - $attrs->{group_by} = [ $attrs->{group_by} ]; - } + if ( defined $attrs->{$attr} ) { + $attrs->{$attr} = ( + ref( $attrs->{$attr} ) eq 'ARRAY' + ? [ @{ $attrs->{$attr} } ] + : [ $attrs->{$attr} || () ] + ); + delete $attrs->{$attr} unless @{$attrs->{$attr}}; + } + } # generate selections based on the prefetch helper my ($prefetch, @prefetch_select, @prefetch_as); diff --git a/t/search/empty_attrs.t b/t/search/empty_attrs.t new file mode 100644 index 000000000..3b5248736 --- /dev/null +++ b/t/search/empty_attrs.t @@ -0,0 +1,51 @@ +use strict; +use warnings; + +use Test::More; + +use lib qw(t/lib); +use DBICTest ':DiffSQL'; + +my $schema = DBICTest->init_schema(); + +my $rs = $schema->resultset('Artist')->search( + [ -and => [ {}, [] ], -or => [ {}, [] ] ], + { + select => [], + columns => {}, + '+columns' => 'artistid', + join => [ {}, [ [ {}, {} ] ], {} ], + prefetch => [ [ [ {}, [] ], {} ], {}, [ {} ] ], + order_by => [], + group_by => [], + offset => 0, + } +); + +is_same_sql_bind( + $rs->as_query, + '(SELECT me.artistid FROM artist me)', + [], +); + +is_same_sql_bind( + $rs->count_rs->as_query, + '(SELECT COUNT(*) FROM artist me)', + [], +); + +is_same_sql_bind( + $rs->as_subselect_rs->search({}, { columns => 'artistid' })->as_query, + '(SELECT me.artistid FROM (SELECT me.artistid FROM artist me) me)', + [], +); + +{ + local $TODO = 'Stupid misdesigned as_subselect_rs'; + is_same_sql_bind( + $rs->as_subselect_rs->as_query, + $rs->as_subselect_rs->search({}, { columns => 'artistid' })->as_query, + ); +} + +done_testing; diff --git a/t/sqlmaker/limit_dialects/torture.t b/t/sqlmaker/limit_dialects/torture.t index c14ea60cf..9d8d23db8 100644 --- a/t/sqlmaker/limit_dialects/torture.t +++ b/t/sqlmaker/limit_dialects/torture.t @@ -37,6 +37,12 @@ my @order_bind = ( my $tests = { LimitOffset => { + limit_plain => [ + "( SELECT me.artistid FROM artist me LIMIT ? )", + [ + [ { sqlt_datatype => 'integer' } => 5 ] + ], + ], limit => [ "( SELECT me.id, owner.id, owner.name, ? * ?, ? @@ -140,6 +146,12 @@ my $tests = { }, LimitXY => { + limit_plain => [ + "( SELECT me.artistid FROM artist me LIMIT ? )", + [ + [ { sqlt_datatype => 'integer' } => 5 ] + ], + ], ordered_limit_offset => [ "( SELECT me.id, owner.id, owner.name, ? * ?, ? @@ -181,6 +193,12 @@ my $tests = { }, SkipFirst => { + limit_plain => [ + "( SELECT FIRST ? me.artistid FROM artist me )", + [ + [ { sqlt_datatype => 'integer' } => 5 ] + ], + ], ordered_limit_offset => [ "( SELECT SKIP ? FIRST ? me.id, owner.id, owner.name, ? * ?, ? @@ -220,6 +238,12 @@ my $tests = { }, FirstSkip => { + limit_plain => [ + "( SELECT FIRST ? me.artistid FROM artist me )", + [ + [ { sqlt_datatype => 'integer' } => 5 ] + ], + ], ordered_limit_offset => [ "( SELECT FIRST ? SKIP ? me.id, owner.id, owner.name, ? * ?, ? @@ -295,6 +319,23 @@ my $tests = { )"; { + limit_plain => [ + "( + SELECT me.artistid + FROM ( + SELECT me.artistid, ROW_NUMBER() OVER( ) AS rno__row__index + FROM ( + SELECT me.artistid + FROM artist me + ) me + ) me + WHERE rno__row__index >= ? AND rno__row__index <= ? + )", + [ + [ { sqlt_datatype => 'integer' } => 1 ], + [ { sqlt_datatype => 'integer' } => 5 ], + ], + ], limit => [$unordered_sql, [ @select_bind, @@ -380,6 +421,19 @@ my $tests = { }; { + limit_plain => [ + "( + SELECT me.artistid + FROM ( + SELECT me.artistid + FROM artist me + ) me + WHERE ROWNUM <= ? + )", + [ + [ { sqlt_datatype => 'integer' } => 5 ], + ], + ], limit => [ $limit_sql->(), [ @select_bind, @@ -479,6 +533,10 @@ my $tests = { }, FetchFirst => { + limit_plain => [ + "( SELECT me.artistid FROM artist me FETCH FIRST 5 ROWS ONLY )", + [], + ], limit => [ "( SELECT me.id, owner.id, owner.name, ? * ?, ? @@ -593,6 +651,10 @@ my $tests = { }, Top => { + limit_plain => [ + "( SELECT TOP 5 me.artistid FROM artist me )", + [], + ], limit => [ "( SELECT TOP 4 me.id, owner.id, owner.name, ? * ?, ? @@ -699,6 +761,25 @@ my $tests = { }, GenericSubQ => { + limit_plain => [ + "( + SELECT me.artistid + FROM ( + SELECT me.artistid + FROM artist me + ) me + WHERE + ( + SELECT COUNT(*) + FROM artist rownum__emulation + WHERE rownum__emulation.artistid < me.artistid + ) < ? + ORDER BY me.artistid ASC + )", + [ + [ { sqlt_datatype => 'integer' } => 5 ] + ], + ], ordered_limit => [ "( SELECT me.id, owner__id, owner__name, bar, baz @@ -836,7 +917,25 @@ for my $limtype (sort keys %$tests) { delete $schema->storage->_sql_maker->{_cached_syntax}; $schema->storage->_sql_maker->limit_dialect ($limtype); - my $can_run = ($limtype eq $native_limit_dialect or $limtype eq 'GenericSubQ'); + # do the simplest thing possible first + if ($tests->{$limtype}{limit_plain}) { + is_same_sql_bind( + $schema->resultset('Artist')->search( + [ -and => [ {}, [] ], -or => [ {}, [] ] ], + { + columns => 'artistid', + join => [ {}, [ [ {}, {} ] ], {} ], + prefetch => [ [ [ {}, [] ], {} ], {}, [ {} ] ], + order_by => ( $limtype eq 'GenericSubQ' ? 'artistid' : [] ), + group_by => [], + rows => 5, + offset => 0, + } + )->as_query, + @{$tests->{$limtype}{limit_plain}}, + "$limtype: Plain unordered ungrouped select with limit and no offset", + ) + } # chained search is necessary to exercise the recursive {where} parser my $rs = $schema->resultset('BooksInLibrary')->search( @@ -856,6 +955,7 @@ for my $limtype (sort keys %$tests) { # # not all tests run on all dialects (somewhere impossible, somewhere makes no sense) # + my $can_run = ($limtype eq $native_limit_dialect or $limtype eq 'GenericSubQ'); # only limit, no offset, no order if ($tests->{$limtype}{limit}) { From 07292953168edb416e480a16d8bac39a575a73f9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Fri, 19 Sep 2014 19:43:57 +0200 Subject: [PATCH 200/548] (travis) Attempt to revert 86370cc74, SC is too slow on uptake. Grumble --- maint/travis-ci_scripts/20_install.bash | 4 ++-- maint/travis-ci_scripts/30_before_script.bash | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/maint/travis-ci_scripts/20_install.bash b/maint/travis-ci_scripts/20_install.bash index 4fda3bd48..c18faecf8 100755 --- a/maint/travis-ci_scripts/20_install.bash +++ b/maint/travis-ci_scripts/20_install.bash @@ -7,12 +7,12 @@ CPAN_MIRROR=$(echo "$PERL_CPANM_OPT" | grep -oP -- '--mirror\s+\S+' | head -n 1 if ! [[ "$CPAN_MIRROR" =~ "http://" ]] ; then echo_err "Unable to extract primary cpan mirror from PERL_CPANM_OPT - something is wrong" echo_err "PERL_CPANM_OPT: $PERL_CPANM_OPT" - CPAN_MIRROR="http://cpan.shadowcatprojects.net/" + CPAN_MIRROR="http://cpan.metacpan.org/" PERL_CPANM_OPT="$PERL_CPANM_OPT --mirror $CPAN_MIRROR" echo_err "Using $CPAN_MIRROR for the time being" fi -# do not set PERLBREW_CPAN_MIRROR - the canonical backpan.perl.org does not have the perl tarballs +# do not set PERLBREW_CPAN_MIRROR - not all backpan-like mirrors have the perl tarballs export PERL_MM_USE_DEFAULT=1 PERL_MM_NONINTERACTIVE=1 PERL_AUTOINSTALL_PREFER_CPAN=1 HARNESS_TIMER=1 MAKEFLAGS="-j$NUMTHREADS" # try CPAN's latest offering if requested diff --git a/maint/travis-ci_scripts/30_before_script.bash b/maint/travis-ci_scripts/30_before_script.bash index 8fd295e91..7d247c804 100755 --- a/maint/travis-ci_scripts/30_before_script.bash +++ b/maint/travis-ci_scripts/30_before_script.bash @@ -107,7 +107,7 @@ else parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant - parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which Path::Class@0.34 + parallel_installdeps_notest SQL::Abstract Moose Module::Install JSON SQL::Translator File::Which if [[ -n "$DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then # the official version is very much outdated and does not compile on 5.14+ From 3437fbe9986916a555b7c924c74be1de762702d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dagfinn=20Ilmari=20Manns=C3=A5ker?= Date: Sat, 20 Sep 2014 13:24:00 +0100 Subject: [PATCH 201/548] Add basic .dir-locals.el --- .dir-locals.el | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .dir-locals.el diff --git a/.dir-locals.el b/.dir-locals.el new file mode 100644 index 000000000..46d8e83fd --- /dev/null +++ b/.dir-locals.el @@ -0,0 +1,6 @@ +((cperl-mode . ((cperl-indent-level . 2) + (cperl-continued-statement-offset . 2) + (cperl-continued-brace-offset . 0) + (cperl-close-paren-offset . -2) + (cperl-indent-subs-specially . nil) + (indent-tabs-mode . nil)))) From a9e985b78735ff61e4443139aa510915222cd550 Mon Sep 17 00:00:00 2001 From: Matt S Trout Date: Mon, 7 Oct 2013 11:40:53 +0000 Subject: [PATCH 202/548] Add forgotten bit preventing scan of where-binds (originally part of 1b8f2dd9) --- lib/DBIx/Class/Storage/DBIHacks.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBIx/Class/Storage/DBIHacks.pm b/lib/DBIx/Class/Storage/DBIHacks.pm index 4c0da0790..f8f908df1 100644 --- a/lib/DBIx/Class/Storage/DBIHacks.pm +++ b/lib/DBIx/Class/Storage/DBIHacks.pm @@ -416,7 +416,7 @@ sub _resolve_aliastypes_from_select_args { # generate sql chunks my $to_scan = { restricting => [ - $sql_maker->_recurse_where ($attrs->{where}), + ($sql_maker->_recurse_where ($attrs->{where}))[0], $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} }), ], grouping => [ From d095c62d491afe010e870f31b4e0d1419273cba9 Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 21 Sep 2014 03:23:38 +0200 Subject: [PATCH 203/548] Proper end-of-file for DBIx/Class.pm --- lib/DBIx/Class.pm | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm index 88b6bf470..2508368c5 100644 --- a/lib/DBIx/Class.pm +++ b/lib/DBIx/Class.pm @@ -57,8 +57,16 @@ sub _attr_cache { }; } +# *DO NOT* change this URL nor the identically named =head1 below +# it is linked throughout the ecosystem +sub DBIx::Class::_ENV_::HELP_URL () { + 'http://p3rl.org/DBIx::Class#GETTING_HELP/SUPPORT' +} + 1; +__END__ + =encoding UTF-8 =head1 NAME @@ -74,12 +82,6 @@ L in the order presented there. =cut -# *DO NOT* change this heading - it is linked throughout the ecosystem - -sub DBIx::Class::_ENV_::HELP_URL () { - 'http://p3rl.org/DBIx::Class#GETTING_HELP/SUPPORT' -} - =head1 GETTING HELP/SUPPORT Due to the sheer size of its problem domain, DBIx::Class is a relatively From 5529838f7afff91467ef2664087999ab222da48d Mon Sep 17 00:00:00 2001 From: Peter Rabbitson Date: Sun, 21 Sep 2014 20:00:27 +0200 Subject: [PATCH 204/548] A massive amount of link fixes (just links, almost no rewording) kryde++ for the insanely useful tool App::PodLinkCheck. It is a bit heavy and way too noisy for automated use, but with some handholding it is scarily effective (as can be seen by the sheer size of this commit) --- lib/DBIx/Class/Exception.pm | 4 +-- lib/DBIx/Class/InflateColumn/DateTime.pm | 4 +-- lib/DBIx/Class/Manual/Cookbook.pod | 35 ++++++++++--------- lib/DBIx/Class/Manual/FAQ.pod | 12 +++---- lib/DBIx/Class/Manual/Features.pod | 6 ++-- lib/DBIx/Class/Manual/Glossary.pod | 8 ++--- lib/DBIx/Class/Optional/Dependencies.pm | 2 +- lib/DBIx/Class/Ordered.pm | 2 +- lib/DBIx/Class/Relationship/Base.pm | 4 +-- lib/DBIx/Class/ResultSet.pm | 8 ++--- lib/DBIx/Class/ResultSource.pm | 7 ++-- lib/DBIx/Class/Row.pm | 8 ++--- lib/DBIx/Class/Schema.pm | 17 ++++----- lib/DBIx/Class/Schema/Versioned.pm | 2 +- lib/DBIx/Class/Storage/DBI.pm | 17 +++++++-- .../Storage/DBI/ADO/Microsoft_SQL_Server.pm | 2 +- lib/DBIx/Class/Storage/DBI/AutoCast.pm | 3 +- lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm | 2 +- lib/DBIx/Class/Storage/DBI/Replicated.pm | 7 ++-- .../Class/Storage/DBI/Replicated/Balancer.pm | 2 +- .../Storage/DBI/Replicated/Introduction.pod | 12 +++---- .../Class/Storage/DBI/Replicated/Replicant.pm | 4 ++- lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm | 16 ++++----- .../Storage/DBI/Sybase/ASE/NoBindVars.pm | 6 ++-- 24 files changed, 108 insertions(+), 82 deletions(-) diff --git a/lib/DBIx/Class/Exception.pm b/lib/DBIx/Class/Exception.pm index 07f587ddb..b3e5cbefa 100644 --- a/lib/DBIx/Class/Exception.pm +++ b/lib/DBIx/Class/Exception.pm @@ -40,8 +40,8 @@ overload fallback to give natural boolean/numeric values. This is meant for internal use by L's C code, and shouldn't be used directly elsewhere. -Expects a scalar exception message. The optional argument -C<$stacktrace> tells it to output a full trace similar to L. +Expects a scalar exception message. The optional boolean C<$stacktrace> +causes it to output a full trace similar to L. DBIx::Class::Exception->throw('Foo'); try { ... } catch { DBIx::Class::Exception->throw(shift) } diff --git a/lib/DBIx/Class/InflateColumn/DateTime.pm b/lib/DBIx/Class/InflateColumn/DateTime.pm index 3162223fb..eeed3f362 100644 --- a/lib/DBIx/Class/InflateColumn/DateTime.pm +++ b/lib/DBIx/Class/InflateColumn/DateTime.pm @@ -78,8 +78,8 @@ deflation/inflation as defined in the storage class. For instance, for a C field the methods C and C would be called on deflation/inflation. If the storage class does not provide a specialized inflator/deflator, C<[parse|format]_datetime> will -be used as a fallback. See L for more information on -date formatting. +be used as a fallback. See L +for more information on date formatting. For more help with using components, see L. diff --git a/lib/DBIx/Class/Manual/Cookbook.pod b/lib/DBIx/Class/Manual/Cookbook.pod index 0cb560bdf..c18ab66a5 100644 --- a/lib/DBIx/Class/Manual/Cookbook.pod +++ b/lib/DBIx/Class/Manual/Cookbook.pod @@ -146,8 +146,9 @@ Next, you can execute your complex query using bind parameters like this: ); ... and you'll get back a perfect L (except, of course, -that you cannot modify the rows it contains, e.g. cannot call L, -L, ... on it). +that you cannot modify the rows it contains, e.g. cannot call +L or +L on it). Note that you cannot have bind parameters unless is_virtual is set to true. @@ -448,8 +449,8 @@ See also L is either too slow or does -not work at all, you can try the +and L is either +too slow or does not work at all, you can try the L L attribute, which skips over records to simulate limits in the Perl layer. @@ -1065,7 +1066,7 @@ See L for more documentation. Sometimes you have a (set of) result objects that you want to put into a resultset without the need to hit the DB again. You can do that by using the -L method: +L method: my @uploadable_groups; while (my $group = $groups->next) { @@ -1380,9 +1381,11 @@ row. } In this example it might be hard to see where the rollbacks, releases and -commits are happening, but it works just the same as for plain L<>: If -the C-block around C fails, a rollback is issued. If the C -succeeds, the transaction is committed (or the savepoint released). +commits are happening, but it works just the same as for plain +L: If the L-block +around L fails, a rollback is issued. +If the L succeeds, the transaction is committed +(or the savepoint released). While you can get more fine-grained control using C, C and C, it is strongly recommended to use C with coderefs. @@ -1840,7 +1843,7 @@ See L for further details. =head3 Oracle Information about Oracle support for unicode can be found in -L. +L. =head3 PostgreSQL @@ -2202,10 +2205,9 @@ classes dynamically based on the database schema then there will be a significant startup delay. For production use a statically defined schema (which can be generated -using L to dump -the database schema once - see +using L to dump the database schema once - see L and -L for more +L for more details on creating static schemas from a database). =head2 Move Common Startup into a Base Class @@ -2251,10 +2253,11 @@ avoiding L. =head2 Cached statements -L normally caches all statements with L<< prepare_cached()|DBI/prepare_cached >>. -This is normally a good idea, but if too many statements are cached, the database may use too much -memory and may eventually run out and fail entirely. If you suspect this may be the case, you may want -to examine DBI's L<< CachedKids|DBI/CachedKidsCachedKids_(hash_ref) >> hash: +L normally caches all statements with +L. This is normally a good idea, but if +too many statements are cached, the database may use too much memory and may +eventually run out and fail entirely. If you suspect this may be the case, +you may want to examine DBI's L hash: # print all currently cached prepared statements print for keys %{$schema->storage->dbh->{CachedKids}}; diff --git a/lib/DBIx/Class/Manual/FAQ.pod b/lib/DBIx/Class/Manual/FAQ.pod index bef779e57..48b3d56cd 100644 --- a/lib/DBIx/Class/Manual/FAQ.pod +++ b/lib/DBIx/Class/Manual/FAQ.pod @@ -78,7 +78,7 @@ lot later. If your database server allows you to run queries across multiple databases at once, then so can DBIx::Class. All you need to do is make sure you write the database name as part of the -L call. Eg: +L call. Eg: __PACKAGE__->table('mydb.mytablename'); @@ -87,10 +87,10 @@ L call. =item .. use DBIx::Class across PostgreSQL/DB2/Oracle schemas? -Add the name of the schema to the L -as part of the name, and make sure you give the one user you are going -to connect with has permissions to read/write all the schemas/tables as -necessary. +Add the name of the schema to the table name, when invoking +L, and make sure the user +you are about to connect as has permissions to read/write all the +schemas/tables as necessary. =back @@ -154,7 +154,7 @@ See L. =item .. use a relationship? Use its name. An accessor is created using the name. See examples in -L. +L. =back diff --git a/lib/DBIx/Class/Manual/Features.pod b/lib/DBIx/Class/Manual/Features.pod index a28bb3558..523e92700 100644 --- a/lib/DBIx/Class/Manual/Features.pod +++ b/lib/DBIx/Class/Manual/Features.pod @@ -479,7 +479,9 @@ on our system (Microsoft SQL) is: ) rpt_score WHERE rno__row__index BETWEEN 1 AND 1 -See: L, L, and L. +See: L, +L, and +L. =head2 bonus rel methods @@ -661,5 +663,5 @@ Better: price => \['price + ?', [inc => $inc]], }); -See L +See L diff --git a/lib/DBIx/Class/Manual/Glossary.pod b/lib/DBIx/Class/Manual/Glossary.pod index 136355da7..334637d33 100644 --- a/lib/DBIx/Class/Manual/Glossary.pod +++ b/lib/DBIx/Class/Manual/Glossary.pod @@ -66,11 +66,11 @@ relationships must be used. A Schema object represents your entire table collection, plus the connection to the database. You can create one or more schema objects, connected to various databases, with various users, using the same set -of table L definitions. +of table L definitions. At least one L class is needed per database. -=head2 Result class +=head2 Result Class A Result class defines both a source of data (usually one per table), and the methods that will be available in the L objects @@ -87,7 +87,7 @@ ResultSource objects represent the source of your data, these are sometimes (incorrectly) called table objects. ResultSources do not need to be directly created, a ResultSource -instance is created for each L in your L, by +instance is created for each L in your L, by the proxied methods C and C. See also: L @@ -148,7 +148,7 @@ to issue multiple SQL queries. A normalised database is a sane database. Each table contains only data belonging to one concept, related tables refer to the key field or fields of each other. Some links to webpages about normalisation -can be found in L. +can be found in L. =head2 Related data diff --git a/lib/DBIx/Class/Optional/Dependencies.pm b/lib/DBIx/Class/Optional/Dependencies.pm index 76229c237..468237fe1 100644 --- a/lib/DBIx/Class/Optional/Dependencies.pm +++ b/lib/DBIx/Class/Optional/Dependencies.pm @@ -154,7 +154,7 @@ my $reqs = { }, pod => { title => 'Storage::DBI::deploy()', - desc => 'Modules required for L and L', + desc => 'Modules required for L and L', }, }, diff --git a/lib/DBIx/Class/Ordered.pm b/lib/DBIx/Class/Ordered.pm index 539abd833..e227d23ad 100644 --- a/lib/DBIx/Class/Ordered.pm +++ b/lib/DBIx/Class/Ordered.pm @@ -147,7 +147,7 @@ Returns an B resultset of all other objects in the same group excluding the one you called it on. The ordering is a backwards-compatibility artifact - if you need -a resultset with no ordering applied use L +a resultset with no ordering applied use C<_siblings> =cut sub siblings { diff --git a/lib/DBIx/Class/Relationship/Base.pm b/lib/DBIx/Class/Relationship/Base.pm index 56adec8f7..39d74a8d1 100644 --- a/lib/DBIx/Class/Relationship/Base.pm +++ b/lib/DBIx/Class/Relationship/Base.pm @@ -814,8 +814,8 @@ call set_from_related on the book. This is called internally when you pass existing objects as values to L, or pass an object to a belongs_to accessor. -The columns are only set in the local copy of the object, call L to -set them in the storage. +The columns are only set in the local copy of the object, call +L to update them in the storage. =cut diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm index a699745da..08bbb4323 100644 --- a/lib/DBIx/Class/ResultSet.pm +++ b/lib/DBIx/Class/ResultSet.pm @@ -58,7 +58,7 @@ just stores all the conditions needed to create the query. A basic ResultSet representing the data of an entire table is returned by calling C on a L and passing in a -L name. +L name. my $users_rs = $schema->resultset('User'); @@ -1147,7 +1147,7 @@ You most likely want to use L with specific operators. For more information, see L. -This method is deprecated and will be removed in 0.09. Use L +This method is deprecated and will be removed in 0.09. Use L instead. An example conversion is: ->search_like({ foo => 'bar' }); @@ -1542,8 +1542,8 @@ L<"table"|DBIx::Class::Manual::Glossary/"ResultSource"> class. Note that changing the result_class will also remove any components that were originally loaded in the source class via -L. Any overloaded methods -in the original source class will not run. +L. +Any overloaded methods in the original source class will not run. =cut diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index de51b5e4a..237227ace 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -77,7 +77,7 @@ More specifically, the L base class pulls in the L component, which defines the L method. When called, C
creates and stores an instance of -L. Luckily, to use tables as result +L. Luckily, to use tables as result sources, you don't need to remember any of this. Result sources representing select queries, or views, can also be @@ -86,7 +86,8 @@ created, see L for full details. =head2 Finding result source objects As mentioned above, a result source instance is created and stored for -you when you define a L. +you when you define a +L. You can retrieve the result source at runtime in the following ways: @@ -582,7 +583,7 @@ sub remove_column { shift->remove_columns(@_); } # DO NOT CHANGE THIS TO GLOB Defines one or more columns as primary key for this source. Must be called after L. -Additionally, defines a L +Additionally, defines a L named C. Note: you normally do want to define a primary key on your sources diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm index 630d2bc84..861a54b14 100644 --- a/lib/DBIx/Class/Row.pm +++ b/lib/DBIx/Class/Row.pm @@ -52,7 +52,7 @@ All "Row objects" derived from a Schema-attached L object (such as a typical C<< L-> L >> call) are actually Result instances, based on your application's -L. +L. L implements most of the row-based communication with the underlying storage, but a Result class B. @@ -480,8 +480,8 @@ sub insert { Indicates whether the object exists as a row in the database or not. This is set to true when L, -L or L -are used. +L or L +are invoked. Creating a result object using L, or calling L on one, sets it to false. @@ -1325,7 +1325,7 @@ sub inflate_result { =back -Ls the object if it's already in the database, according to +Ls the object if it's already in the database, according to L, else Ls it. =head2 insert_or_update diff --git a/lib/DBIx/Class/Schema.pm b/lib/DBIx/Class/Schema.pm index c83dc87d0..dca80c893 100644 --- a/lib/DBIx/Class/Schema.pm +++ b/lib/DBIx/Class/Schema.pm @@ -108,11 +108,12 @@ are no matching Result classes like this: load_namespaces found ResultSet class $classname with no corresponding Result class -If a Result class is found to already have a ResultSet class set using -L to some other class, you will be warned like this: +If a ResultSource instance is found to already have a ResultSet class set +using L to some +other class, you will be warned like this: - We found ResultSet class '$rs_class' for '$result', but it seems - that you had already set '$result' to use '$rs_set' instead + We found ResultSet class '$rs_class' for '$result_class', but it seems + that you had already set '$result_class' to use '$rs_set' instead =head3 Examples @@ -1117,8 +1118,8 @@ sub deploy { A convenient shortcut to C<< $self->storage->deployment_statements($self, @args) >>. -Returns the SQL statements used by L and -L. +Returns the statements used by L and +L. =cut @@ -1218,8 +1219,8 @@ sub thaw { =head2 freeze -This doesn't actually do anything more than call L, it is just -provided here for symmetry. +This doesn't actually do anything beyond calling L, +it is just provided here for symmetry. =cut diff --git a/lib/DBIx/Class/Schema/Versioned.pm b/lib/DBIx/Class/Schema/Versioned.pm index e92f356b9..74fbd8d2d 100644 --- a/lib/DBIx/Class/Schema/Versioned.pm +++ b/lib/DBIx/Class/Schema/Versioned.pm @@ -239,7 +239,7 @@ Call this to initialise a previously unversioned database. The table 'dbix_class Takes one argument which should be the version that the database is currently at. Defaults to the return value of L. -See L for more details. +See L for more details. =cut diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm index 9a456d4c1..473e50347 100644 --- a/lib/DBIx/Class/Storage/DBI.pm +++ b/lib/DBIx/Class/Storage/DBI.pm @@ -1402,7 +1402,19 @@ sub disconnect_call_do_sql { $self->_do_query(@_); } -# override in db-specific backend when necessary +=head2 connect_call_datetime_setup + +A no-op stub method, provided so that one can always safely supply the +L + + on_connect_call => 'datetime_setup' + +This way one does not need to know in advance whether the underlying +storage requires any sort of hand-holding when dealing with calendar +data. + +=cut + sub connect_call_datetime_setup { 1 } sub _do_query { @@ -2982,7 +2994,8 @@ sub create_ddl_dir { =back -Returns the statements used by L and L. +Returns the statements used by L +and L. The L (not L) database driver name can be explicitly provided in C<$type>, otherwise the result of L is used as default. diff --git a/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm b/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm index 9bb93f94e..7a1b03938 100644 --- a/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm +++ b/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm @@ -60,7 +60,7 @@ size of the bind sizes in the first prepare call: L -The C workaround is used (see L) with the +The C workaround is used (see L) with the approximate maximum size of the data_type of the bound column, or 8000 (maximum VARCHAR size) if the data_type is not available. diff --git a/lib/DBIx/Class/Storage/DBI/AutoCast.pm b/lib/DBIx/Class/Storage/DBI/AutoCast.pm index db9fb8b5e..aa08f5050 100644 --- a/lib/DBIx/Class/Storage/DBI/AutoCast.pm +++ b/lib/DBIx/Class/Storage/DBI/AutoCast.pm @@ -23,7 +23,8 @@ statements with values bound to columns or conditions that are not strings will throw implicit type conversion errors. As long as a column L is -defined and resolves to a base RDBMS native type via L as +defined and resolves to a base RDBMS native type via +L<_native_data_type|DBIx::Class::Storage::DBI/_native_data_type> as defined in your Storage driver, the placeholder for this column will be converted to: diff --git a/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm b/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm index 84799f14c..92cb34fe5 100644 --- a/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm +++ b/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm @@ -635,7 +635,7 @@ Unfortunately, Oracle doesn't support identifiers over 30 chars in length, so the L name is shortened and appended with half of an MD5 hash. -See L. +See L. =cut diff --git a/lib/DBIx/Class/Storage/DBI/Replicated.pm b/lib/DBIx/Class/Storage/DBI/Replicated.pm index 0a73c4b8d..b3247abff 100644 --- a/lib/DBIx/Class/Storage/DBI/Replicated.pm +++ b/lib/DBIx/Class/Storage/DBI/Replicated.pm @@ -405,7 +405,7 @@ for my $method (@{$method_dispatch->{unimplemented}}) { =head2 read_handler -Defines an object that implements the read side of L. +Defines an object that implements the read side of L. =cut @@ -418,7 +418,7 @@ has 'read_handler' => ( =head2 write_handler -Defines an object that implements the write side of L, +Defines an object that implements the write side of L, as well as methods that don't write or read that can be called on only one storage, methods that return a C<$dbh>, and any methods that don't make sense to run on a replicant. @@ -590,7 +590,8 @@ sub _build_read_handler { =head2 around: connect_replicants All calls to connect_replicants needs to have an existing $schema tacked onto -top of the args, since L needs it, and any C +top of the args, since L needs it, and any +L options merged with the master, with replicant opts having higher priority. =cut diff --git a/lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm b/lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm index de9c2e923..f07a958d0 100644 --- a/lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm +++ b/lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm @@ -97,7 +97,7 @@ This class defines the following methods. =head2 _build_current_replicant -Lazy builder for the L attribute. +Lazy builder for the L attribute. =cut diff --git a/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod b/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod index 0b49b984f..77f2d082b 100644 --- a/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod +++ b/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod @@ -6,19 +6,19 @@ DBIx::Class::Storage::DBI::Replicated::Introduction - Minimum Need to Know =head1 SYNOPSIS -This is an introductory document for L. +This is an introductory document for L. This document is not an overview of what replication is or why you should be -using it. It is not a document explaining how to setup MySQL native replication -either. Copious external resources are available for both. This document +using it. It is not a document explaining how to setup MySQL native replication +either. Copious external resources are available for both. This document presumes you have the basics down. =head1 DESCRIPTION -L supports a framework for using database replication. This system +L supports a framework for using database replication. This system is integrated completely, which means once it's setup you should be able to automatically just start using a replication cluster without additional work or -changes to your code. Some caveats apply, primarily related to the proper use +changes to your code. Some caveats apply, primarily related to the proper use of transactions (you are wrapping all your database modifying statements inside a transaction, right ;) ) however in our experience properly written DBIC will work transparently with Replicated storage. @@ -137,7 +137,7 @@ will result in increased database loads, so choose a number with care. Our experience is that setting the number around 5 seconds results in a good performance / integrity balance. -'master_read_weight' is an option associated with the ::Random balancer. It +'master_read_weight' is an option associated with the ::Random balancer. It allows you to let the master be read from. I usually leave this off (default is off). diff --git a/lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm b/lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm index a541e7df2..344e1a662 100644 --- a/lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm +++ b/lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm @@ -38,7 +38,9 @@ when it gets too far behind the master, if it stops replicating, etc. This attribute DOES NOT reflect a replicant's internal status, i.e. if it is properly replicating from a master and has not fallen too many seconds behind a -reliability threshold. For that, use L and L. +reliability threshold. For that, use +L and +L. Since the implementation of those functions database specific (and not all DBIC supported DBs support replication) you should refer your database-specific storage driver for more information. diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm index 252a50caa..eceef2016 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm @@ -233,7 +233,7 @@ Also sets the C value for blob write operations. The default is C<1>, but C<0> is better if your database is configured for it. See -L. +L. =cut @@ -1082,15 +1082,15 @@ for L. =head1 LIMITED QUERIES -Because ASE does not have a good way to limit results in SQL that works for all -types of queries, the limit dialect is set to -L. +Because ASE does not have a good way to limit results in SQL that works for +all types of queries, the limit dialect is set to +L. Fortunately, ASE and L support cursors properly, so when -L is too slow you can use -the L -L attribute to simulate limited queries by skipping over -records. +L is too slow +you can use the L +L attribute to simulate limited queries by skipping +over records. =head1 TEXT/IMAGE COLUMNS diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm b/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm index b5ade315f..107a0e059 100644 --- a/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm +++ b/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm @@ -81,8 +81,10 @@ You can also enable this driver explicitly using: $schema->storage_type('::DBI::Sybase::ASE::NoBindVars'); $schema->connect($dsn, $user, $pass, \%opts); -See the discussion in L<< DBD::Sybase/Using ? Placeholders & bind parameters to -$sth->execute >> for details on the pros and cons of using placeholders. +See the discussion in +L<< DBD::Sybase/Using ? Placeholders & bind parameters to $sth->execute >> +for details on the pros and cons of using placeholders with this particular +driver. One advantage of not using placeholders is that C
is replaced with the table name. +The options hashref will be passed to +L; the intention being to +allow the C flag to be set. You should avoid putting +C, C, or C in here. + Unique constraints are used, for example, when you pass the constraint name as the C attribute to L. Then only columns in the constraint are searched. @@ -736,14 +743,20 @@ the result source. sub add_unique_constraint { my $self = shift; - if (@_ > 2) { + if ((grep {ref $_ eq 'ARRAY'} @_) > 1) { $self->throw_exception( 'add_unique_constraint() does not accept multiple constraints, use ' . 'add_unique_constraints() instead' ); } + my $opts; my $cols = pop @_; + if (ref $cols eq 'HASH') { + $opts = $cols; + $cols = pop @_; + } + if (ref $cols ne 'ARRAY') { $self->throw_exception ( 'Expecting an arrayref of constraint columns, got ' . ($cols||'NOTHING') @@ -760,8 +773,11 @@ sub add_unique_constraint { } my %unique_constraints = $self->unique_constraints; + my %unique_constraints_extra = $self->unique_constraints_extra; $unique_constraints{$name} = $cols; + $unique_constraints_extra{$name} = $opts; $self->_unique_constraints(\%unique_constraints); + $self->_unique_constraints_extra(\%unique_constraints_extra); } =head2 add_unique_constraints @@ -877,6 +893,30 @@ sub unique_constraints { return %{shift->_unique_constraints||{}}; } +=head2 unique_constraints_extra + +=over 4 + +=item Arguments: none + +=item Return Value: Hash of unique constraint \%options + +=back + + my %uq_extras = $source->unique_constraints_extra(); + +Read-only accessor which returns a hash of the options provided to +unique constraints. + +The hash is keyed by constraint name, and the values are the options +hashrefs as provided to L. + +=cut + +sub unique_constraints_extra { + return %{shift->_unique_constraints_extra||{}}; +} + =head2 unique_constraint_names =over 4 diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 59aec2af1..7552496d1 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -150,12 +150,14 @@ sub parse { $table->primary_key(@primary) if @primary; my %unique_constraints = $source->unique_constraints; + my %unique_constraints_extra = $source->unique_constraints_extra; foreach my $uniq (sort keys %unique_constraints) { if (!$source->_compare_relationship_keys($unique_constraints{$uniq}, \@primary)) { $table->add_constraint( type => 'unique', name => $uniq, - fields => $unique_constraints{$uniq} + fields => $unique_constraints{$uniq}, + %{ $unique_constraints_extra{$uniq} // {} }, ); } } From 05e063c83f4d30dd1d1f087e2e7dfd39535c4614 Mon Sep 17 00:00:00 2001 From: Alastair McGowan-Douglas Date: Tue, 3 Nov 2015 13:29:49 +0000 Subject: [PATCH 548/548] No longer require caveat on unique \%options --- lib/DBIx/Class/ResultSource.pm | 4 ++-- lib/SQL/Translator/Parser/DBIx/Class.pm | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm index 23be5ea36..12eccc1a6 100644 --- a/lib/DBIx/Class/ResultSource.pm +++ b/lib/DBIx/Class/ResultSource.pm @@ -728,8 +728,8 @@ name. The options hashref will be passed to L; the intention being to -allow the C flag to be set. You should avoid putting -C, C, or C in here. +allow the C flag to be set, but you may find others +useful. Note that C, C, and C will be ignored. Unique constraints are used, for example, when you pass the constraint name as the C attribute to L. Then diff --git a/lib/SQL/Translator/Parser/DBIx/Class.pm b/lib/SQL/Translator/Parser/DBIx/Class.pm index 7552496d1..ba5e75d13 100644 --- a/lib/SQL/Translator/Parser/DBIx/Class.pm +++ b/lib/SQL/Translator/Parser/DBIx/Class.pm @@ -154,10 +154,10 @@ sub parse { foreach my $uniq (sort keys %unique_constraints) { if (!$source->_compare_relationship_keys($unique_constraints{$uniq}, \@primary)) { $table->add_constraint( + %{ $unique_constraints_extra{$uniq} // {} }, type => 'unique', name => $uniq, fields => $unique_constraints{$uniq}, - %{ $unique_constraints_extra{$uniq} // {} }, ); } }