The Perl Toolchain Summit needs more sponsors. If your company depends on Perl, please support this very important event.
AUTHORS 0206
Changes 079
LICENSE 0412
MANIFEST 1833
META.yml 5202
Makefile.PL 2725
README 145301
examples/Schema/db/example.db --
lib/DBIx/Class/AccessorGroup.pm 57
lib/DBIx/Class/Admin/Usage.pm 11
lib/DBIx/Class/Admin.pm 69
lib/DBIx/Class/CDBICompat/AbstractSearch.pm 013
lib/DBIx/Class/CDBICompat/AccessorMapping.pm 29
lib/DBIx/Class/CDBICompat/ColumnsAsHash.pm 013
lib/DBIx/Class/CDBICompat/Constructor.pm 1311
lib/DBIx/Class/CDBICompat/Copy.pm 013
lib/DBIx/Class/CDBICompat/ImaDBI.pm 2115
lib/DBIx/Class/CDBICompat/Iterator.pm 113
lib/DBIx/Class/CDBICompat/NoObjectIndex.pm 013
lib/DBIx/Class/CDBICompat/Relationship.pm 916
lib/DBIx/Class/CDBICompat/Relationships.pm 1332
lib/DBIx/Class/CDBICompat/SQLTransformer.pm 113
lib/DBIx/Class/CDBICompat.pm 89
lib/DBIx/Class/Carp.pm 013
lib/DBIx/Class/Core.pm 69
lib/DBIx/Class/Cursor.pm 013
lib/DBIx/Class/DB.pm 47
lib/DBIx/Class/Exception.pm 69
lib/DBIx/Class/FilterColumn.pm 4074
lib/DBIx/Class/FilterColumn.pod 112
lib/DBIx/Class/InflateColumn/DateTime.pm 119
lib/DBIx/Class/InflateColumn/DateTime.pod 109
lib/DBIx/Class/InflateColumn/File.pm 1020
lib/DBIx/Class/InflateColumn.pm 4372
lib/DBIx/Class/InflateColumn.pod 107
lib/DBIx/Class/Manual/Component.pod 29
lib/DBIx/Class/Manual/Cookbook.pod 1630
lib/DBIx/Class/Manual/DocMap.pod 012
lib/DBIx/Class/Manual/Example.pod 58
lib/DBIx/Class/Manual/FAQ.pod 819
lib/DBIx/Class/Manual/Features.pod 517
lib/DBIx/Class/Manual/Glossary.pod 811
lib/DBIx/Class/Manual/Intro.pod 110
lib/DBIx/Class/Manual/Joining.pod 111
lib/DBIx/Class/Manual/QuickStart.pod 011
lib/DBIx/Class/Manual/Reading.pod 67
lib/DBIx/Class/Manual/ResultClass.pod 47
lib/DBIx/Class/Manual/Troubleshooting.pod 210
lib/DBIx/Class/Manual.pod 19
lib/DBIx/Class/Optional/Dependencies.pm 1114
lib/DBIx/Class/Optional/Dependencies.pod 1212
lib/DBIx/Class/Ordered.pm 3751
lib/DBIx/Class/PK/Auto/DB2.pm 69
lib/DBIx/Class/PK/Auto/MSSQL.pm 69
lib/DBIx/Class/PK/Auto/MySQL.pm 69
lib/DBIx/Class/PK/Auto/Oracle.pm 69
lib/DBIx/Class/PK/Auto/Pg.pm 69
lib/DBIx/Class/PK/Auto/SQLite.pm 69
lib/DBIx/Class/PK/Auto.pm 69
lib/DBIx/Class/PK.pm 79
lib/DBIx/Class/PK.pod 47
lib/DBIx/Class/Relationship/Accessor.pm 6574
lib/DBIx/Class/Relationship/Base.pm 109111
lib/DBIx/Class/Relationship/BelongsTo.pm 103
lib/DBIx/Class/Relationship/HasMany.pm 01
lib/DBIx/Class/Relationship/HasOne.pm 13
lib/DBIx/Class/Relationship/ManyToMany.pm 32
lib/DBIx/Class/Relationship/ProxyMethods.pm 1714
lib/DBIx/Class/Relationship.pm 1313
lib/DBIx/Class/ResultClass/HashRefInflator.pm 212
lib/DBIx/Class/ResultSet.pm 419549
lib/DBIx/Class/ResultSetColumn.pm 1014
lib/DBIx/Class/ResultSetManager.pm 013
lib/DBIx/Class/ResultSource/RowParser/Util.pm 33
lib/DBIx/Class/ResultSource/Table.pm 68
lib/DBIx/Class/ResultSource/Table.pod 47
lib/DBIx/Class/ResultSource/View.pm 68
lib/DBIx/Class/ResultSource/View.pod 47
lib/DBIx/Class/ResultSource.pm 149536
lib/DBIx/Class/ResultSourceHandle.pm 310
lib/DBIx/Class/ResultSourceProxy/Table.pm 139
lib/DBIx/Class/ResultSourceProxy/Table.pod 47
lib/DBIx/Class/ResultSourceProxy.pm 79
lib/DBIx/Class/Row.pm 116151
lib/DBIx/Class/SQLMaker/LimitDialects.pm 4151
lib/DBIx/Class/SQLMaker/OracleJoins.pm 1221
lib/DBIx/Class/SQLMaker/OracleJoins.pod 087
lib/DBIx/Class/SQLMaker.pm 2442
lib/DBIx/Class/SQLMaker.pod 057
lib/DBIx/Class/Schema/Versioned.pm 1319
lib/DBIx/Class/Schema.pm 2722
lib/DBIx/Class/Serialize/Storable.pm 67
lib/DBIx/Class/StartupCheck.pm 1611
lib/DBIx/Class/Storage/BlockRunner.pm 1532
lib/DBIx/Class/Storage/DBI/ACCESS.pm 610
lib/DBIx/Class/Storage/DBI/ADO/MS_Jet/Cursor.pm 69
lib/DBIx/Class/Storage/DBI/ADO/MS_Jet.pm 610
lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server/Cursor.pm 69
lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm 913
lib/DBIx/Class/Storage/DBI/ADO.pm 610
lib/DBIx/Class/Storage/DBI/AutoCast.pm 59
lib/DBIx/Class/Storage/DBI/Cursor.pm 324
lib/DBIx/Class/Storage/DBI/DB2.pm 610
lib/DBIx/Class/Storage/DBI/Firebird/Common.pm 610
lib/DBIx/Class/Storage/DBI/Firebird.pm 129
lib/DBIx/Class/Storage/DBI/IdentityInsert.pm 47
lib/DBIx/Class/Storage/DBI/Informix.pm 610
lib/DBIx/Class/Storage/DBI/InterBase.pm 610
lib/DBIx/Class/Storage/DBI/MSSQL.pm 67
lib/DBIx/Class/Storage/DBI/NoBindVars.pm 47
lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm 610
lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm 67
lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm 411
lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm 610
lib/DBIx/Class/Storage/DBI/ODBC/SQL_Anywhere.pm 57
lib/DBIx/Class/Storage/DBI/ODBC.pm 610
lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm 88
lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm 107
lib/DBIx/Class/Storage/DBI/Oracle.pm 67
lib/DBIx/Class/Storage/DBI/Pg.pm 67
lib/DBIx/Class/Storage/DBI/Replicated/Balancer/First.pm 47
lib/DBIx/Class/Storage/DBI/Replicated/Balancer/Random.pm 47
lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm 58
lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod 1513
lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm 47
lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm 510
lib/DBIx/Class/Storage/DBI/Replicated/Types.pm 80
lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm 47
lib/DBIx/Class/Storage/DBI/Replicated.pm 1817
lib/DBIx/Class/Storage/DBI/SQLAnywhere/Cursor.pm 610
lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm 67
lib/DBIx/Class/Storage/DBI/SQLite.pm 1527
lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm 911
lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm 2020
lib/DBIx/Class/Storage/DBI/Sybase/FreeTDS.pm 69
lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm 67
lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server/NoBindVars.pm 67
lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm 610
lib/DBIx/Class/Storage/DBI/Sybase.pm 610
lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm 47
lib/DBIx/Class/Storage/DBI/mysql.pm 1316
lib/DBIx/Class/Storage/DBI.pm 205226
lib/DBIx/Class/Storage/DBIHacks.pm 115461
lib/DBIx/Class/Storage/Statistics.pm 4283
lib/DBIx/Class/Storage/TxnScopeGuard.pm 68
lib/DBIx/Class/Storage.pm 913
lib/DBIx/Class/UTF8Columns.pm 47
lib/DBIx/Class/_Util.pm 20105
lib/DBIx/Class.pm 27144
lib/DBIx/Class.pod 0656
lib/SQL/Translator/Parser/DBIx/Class.pm 1527
lib/SQL/Translator/Producer/DBIx/Class/File.pm 011
maint/Makefile.PL.inc/11_authortests.pl 55
maint/Makefile.PL.inc/12_authordeps.pl 03
maint/Makefile.PL.inc/21_meta_noindex.pl 190
maint/Makefile.PL.inc/21_set_meta.pl 056
maint/Makefile.PL.inc/29_handle_version.pl 1210
maint/Makefile.PL.inc/52_autogen_README.pl 230
maint/Makefile.PL.inc/53_autogen_pod.pl 027
maint/Makefile.PL.inc/54_autogen_legalese_and_README.pl 042
maint/gen_pod_authors 027
maint/gen_pod_inherit 02
maint/getstatus 052
maint/travis-ci_scripts/10_before_install.bash 1880
maint/travis-ci_scripts/20_install.bash 860
maint/travis-ci_scripts/30_before_script.bash 2790
maint/travis-ci_scripts/40_script.bash 520
maint/travis-ci_scripts/50_after_failure.bash 110
maint/travis-ci_scripts/50_after_success.bash 150
maint/travis-ci_scripts/60_after_script.bash 110
maint/travis-ci_scripts/common.bash 1940
maint/travis-ci_scripts/lib/TAP/Harness/IgnoreNonessentialDzilAutogeneratedTests.pm 930
script/dbicadmin 11
t/100populate.t 50171
t/101populate_rs.t 1150
t/104view.t 02
t/18insert_default.t 235
t/40compose_connection.t 102
t/52leaks.t 213
t/53lean_startup.t 417
t/54taint.t 1456
t/55namespaces_cleaned.t 421
t/60core.t 28
t/61findnot.t 24
t/71mysql.t 3411
t/72pg.t 1226
t/73oracle.t 10
t/73oracle_blob.t 102
t/73oracle_hq.t 16
t/746mssql.t 169
t/746sybase.t 2929
t/747mssql_ado.t 51
t/74mssql.t 70
t/751msaccess.t 360
t/752sqlite.t 3555
t/76joins.t 998
t/76select.t 21
t/80unique.t 196
t/82cascade_copy.t 115
t/83cache.t 8353
t/84serialize.t 155
t/85utf8.t 2728
t/86might_have.t 2414
t/86sqlt.t 11
t/88result_set_column.t 2546
t/90join_torture.t 21
t/93autocast.t 3016
t/94versioning.t 011
t/98savepoints.t 1780
t/99dbic_sqlt_parser.t 022
t/admin/02ddl.t 35
t/cdbi/02-Film.t 11
t/cdbi/06-hasa.t 35
t/cdbi/09-has_many.t 11
t/cdbi/18-has_a.t 912
t/cdbi/68-inflate_has_a.t 11
t/cdbi/70_implicit_inflate.t 036
t/cdbi/71_column_object.t 029
t/cdbi/testlib/ColumnObject.pm 029
t/cdbi/testlib/DBIC/Test/SQLite.pm 05
t/cdbi/testlib/ImplicitInflate.pm 042
t/count/count_rs.t 4627
t/count/distinct.t 21
t/count/in_subquery.t 10
t/count/joined.t 1328
t/count/prefetch.t 21
t/delete/cascade_missing.t 22
t/inflate/datetime.t 048
t/inflate/datetime_mssql.t 70
t/inflate/datetime_oracle.t 3033
t/inflate/serialize.t 11
t/lib/DBIC/DebugObj.pm 500
t/lib/DBIC/SqlMakerTest.pm 1650
t/lib/DBICTest/Base.pm 012
t/lib/DBICTest/BaseResult.pm 41
t/lib/DBICTest/BaseResultSet.pm 51
t/lib/DBICTest/BaseSchema.pm 3254
t/lib/DBICTest/SQLTracerObj.pm 021
t/lib/DBICTest/Schema/Artist.pm 2834
t/lib/DBICTest/Schema/Artwork.pm 105
t/lib/DBICTest/Schema/Artwork_to_Artist.pm 166
t/lib/DBICTest/Schema/CD.pm 218
t/lib/DBICTest/Schema/Track.pm 1143
t/lib/DBICTest/Schema/TwoKeys.pm 11
t/lib/DBICTest/Schema.pm 1630
t/lib/DBICTest/Stats.pm 630
t/lib/DBICTest/Util/LeakTracer.pm 2351
t/lib/DBICTest/Util.pm 182
t/lib/DBICTest/WithTaint.pm 04
t/lib/DBICTest.pm 4946
t/lib/PrefetchBug.pm 110
t/multi_create/find_or_multicreate.t 071
t/multi_create/standard.t 46
t/ordered/unordered_movement.t 1114
t/prefetch/correlated.t 3627
t/prefetch/count.t 2013
t/prefetch/double_prefetch.t 21
t/prefetch/empty_cache.t 039
t/prefetch/false_colvalues.t 2415
t/prefetch/grouped.t 4828
t/prefetch/incomplete.t 21
t/prefetch/join_type.t 21
t/prefetch/manual.t 135
t/prefetch/multiple_hasmany.t 5031
t/prefetch/multiple_hasmany_torture.t 2449
t/prefetch/o2m_o2m_order_by_with_limit.t 21
t/prefetch/one_to_many_to_one.t 115
t/prefetch/refined_search_on_relation.t 102
t/prefetch/standard.t 10771
t/prefetch/via_search_related.t 3722
t/prefetch/with_limit.t 43
t/relationship/core.t 138
t/relationship/custom.t 678
t/relationship/custom_opaque.t 051
t/relationship/malformed_declaration.t 028
t/relationship/update_or_create_multi.t 2424
t/resultset/as_query.t 65
t/resultset/as_subselect_rs.t 51
t/resultset/bind_attr.t 51
t/resultset/find_on_subquery_cond.t 034
t/resultset/inflate_result_api.t 027
t/resultset/rowparser_internals.t 12
t/resultset/update_delete.t 171184
t/row/copy_with_extra_selection.t 031
t/row/filter_column.t 58146
t/row/find_one_has_many.t 1715
t/row/set_extra_column.t 032
t/search/distinct.t 21
t/search/empty_attrs.t 051
t/search/preserve_original_rs.t 42
t/search/related_has_many.t 10
t/search/related_strip_prefetch.t 21
t/search/select_chains.t 21
t/search/select_chains_unbalanced.t 21
t/search/stack_cond.t 092
t/search/subquery.t 24
t/sqlmaker/bind_transport.t 21
t/sqlmaker/core.t 21
t/sqlmaker/core_quoted.t 54
t/sqlmaker/dbihacks_internals.t 0497
t/sqlmaker/hierarchical/oracle.t 31
t/sqlmaker/legacy_joins.t 0100
t/sqlmaker/limit_dialects/custom.t 31
t/sqlmaker/limit_dialects/fetch_first.t 317
t/sqlmaker/limit_dialects/first_skip.t 21
t/sqlmaker/limit_dialects/generic_subq.t 21
t/sqlmaker/limit_dialects/mssql_torture.t 21
t/sqlmaker/limit_dialects/rno.t 21
t/sqlmaker/limit_dialects/rownum.t 87
t/sqlmaker/limit_dialects/skip_first.t 21
t/sqlmaker/limit_dialects/toplimit.t 21
t/sqlmaker/limit_dialects/torture.t 91192
t/sqlmaker/msaccess.t 54
t/sqlmaker/mysql.t 5670
t/sqlmaker/nest_deprec.t 21
t/sqlmaker/oracle.t 21
t/sqlmaker/oraclejoin.t 21
t/sqlmaker/order_by_bindtransport.t 21
t/sqlmaker/order_by_func.t 21
t/sqlmaker/quotes/quotes.t 660
t/sqlmaker/quotes/quotes_newstyle.t 830
t/sqlmaker/quotes.t 067
t/sqlmaker/sqlite.t 21
t/storage/debug.t 24114
t/storage/deploy.t 327
t/storage/error.t 11
t/storage/nobindvars.t 2913
t/storage/on_connect_do.t 05
t/storage/ping_count.t 10
t/storage/prefer_stringification.t 033
t/storage/replicated.t 10
t/storage/savepoints.t 0240
t/storage/txn.t 037
t/storage/txn_scope_guard.t 11
t/zzzzzzz_sqlite_deadlock.t 44
xt/authors.t 083
xt/footers.t 051
xt/optional_deps.t 10
xt/podcoverage.t 01
xt/quote_sub.t 048
xt/strictures.t 212
341 files changed (This is a version diff) 639910740
@@ -0,0 +1,206 @@
+#
+#     The list of the awesome folks behind DBIx::Class
+#
+# This utf8-encoded file lists every code author and idea contributor
+# in alphabetical order
+#
+# Entry format (all elements optional, order is mandatory):
+#   (ircnick:) (name) (<email>)
+#
+#
+
+abraxxa: Alexander Hartmaier <abraxxa@cpan.org>
+acca: Alexander Kuznetsov <acca@cpan.org>
+aherzog: Adam Herzog <adam@herzogdesigns.com>
+Alexander Keusch <cpan@keusch.at>
+alexrj: Alessandro Ranellucci <aar@cpan.org>
+alnewkirk: Al Newkirk <github@alnewkirk.com>
+amiri: Amiri Barksdale <amiribarksdale@gmail.com>
+amoore: Andrew Moore <amoore@cpan.org>
+Andrew Mehta <Andrew@unitedgames.co.uk>
+andrewalker: Andre Walker <andre@andrewalker.net>
+andyg: Andy Grundman <andy@hybridized.org>
+ank: Andres Kievsky <ank@ank.com.ar>
+arc: Aaron Crane <arc@cpan.org>
+arcanez: Justin Hunter <justin.d.hunter@gmail.com>
+ash: Ash Berlin <ash@cpan.org>
+bert: Norbert Csongrádi <bert@cpan.org>
+bfwg: Colin Newell <colin.newell@gmail.com>
+blblack: Brandon L. Black <blblack@gmail.com>
+bluefeet: Aran Deltac <bluefeet@cpan.org>
+boghead: Bryan Beeley <cpan@beeley.org>
+bphillips: Brian Phillips <bphillips@cpan.org>
+brd: Brad Davis <brd@FreeBSD.org>
+Brian Kirkbride <brian.kirkbride@deeperbydesign.com>
+bricas: Brian Cassidy <bricas@cpan.org>
+brunov: Bruno Vecchi <vecchi.b@gmail.com>
+caelum: Rafael Kitover <rkitover@cpan.org>
+caldrin: Maik Hentsche <maik.hentsche@amd.com>
+castaway: Jess Robinson <castaway@desert-island.me.uk>
+chorny: Alexandr Ciornii <alexchorny@gmail.com>
+claco: Christopher H. Laco <claco@cpan.org>
+clkao: CL Kao <clkao@clkao.org>
+Ctrl-O http://ctrlo.com/
+da5id: David Jack Olrik <david@olrik.dk>
+dams: Damien Krotkine <dams@cpan.org>
+dandv: Dan Dascalescu <ddascalescu+github@gmail.com>
+dariusj: Darius Jokilehto <dariusjokilehto@yahoo.co.uk>
+davewood: David Schmidt <mail@davidschmidt.at>
+daxim: Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>
+dduncan: Darren Duncan <darren@darrenduncan.net>
+debolaz: Anders Nor Berle <berle@cpan.org>
+dew: Dan Thomas <dan@godders.org>
+dim0xff: Dmitry Latin <dim0xff@gmail.com>
+dkubb: Dan Kubb <dan.kubb-cpan@onautopilot.com>
+dnm: Justin Wheeler <jwheeler@datademons.com>
+dpetrov: Dimitar Petrov <mitakaa@gmail.com>
+dsteinbrunner: David Steinbrunner <dsteinbrunner@pobox.com>
+duncan_dmg: Duncan Garland <Duncan.Garland@motortrak.com>
+dwc: Daniel Westermann-Clark <danieltwc@cpan.org>
+dyfrgi: Michael Leuchtenburg <michael@slashhome.org>
+edenc: Eden Cardim <edencardim@gmail.com>
+Eligo http://eligo.co.uk/
+ether: Karen Etheridge <ether@cpan.org>
+evdb: Edmund von der Burg <evdb@ecclestoad.co.uk>
+faxm0dem: Fabien Wernli <cpan@faxm0dem.org>
+felliott: Fitz Elliott <fitz.elliott@gmail.com>
+freetime: Bill Moseley <moseley@hank.org>
+frew: Arthur Axel "fREW" Schmidt <frioux@gmail.com>
+gbjk: Gareth Kirwan <gbjk@thermeon.com>
+Getty: Torsten Raudssus <torsten@raudss.us>
+goraxe: Gordon Irving <goraxe@cpan.org>
+gphat: Cory G Watson <gphat@cpan.org>
+Grant Street Group http://www.grantstreet.com/
+groditi: Guillermo Roditi <groditi@cpan.org>
+gshank: Gerda Shank <gshank@cpan.org>
+guacamole: Fred Steinberg <fred.steinberg@gmail.com>
+Haarg: Graham Knop <haarg@haarg.org>
+hobbs: Andrew Rodland <andrew@cleverdomain.org>
+Ian Wells <ijw@cack.org.uk>
+idn: Ian Norton <i.norton@shadowcat.co.uk>
+ilmari: Dagfinn Ilmari Mannsåker <ilmari@ilmari.org>
+initself: Mike Baas <mike@initselftech.com>
+ironcamel: Naveed Massjouni <naveedm9@gmail.com>
+jasonmay: Jason May <jason.a.may@gmail.com>
+jawnsy: Jonathan Yu <jawnsy@cpan.org>
+jegade: Jens Gassmann <jens.gassmann@atomix.de>
+jeneric: Eric A. Miller <emiller@cpan.org>
+jesper: Jesper Krogh <jesper@krogh.cc>
+Jesse Sheidlower <jester@panix.com>
+jgoulah: John Goulah <jgoulah@cpan.org>
+jguenther: Justin Guenther <jguenther@cpan.org>
+jhannah: Jay Hannah <jay@jays.net>
+jmac: Jason McIntosh <jmac@appleseed-sc.com>
+jmmills: Jason M. Mills <jmmills@cpan.org>
+jnapiorkowski: John Napiorkowski <jjn1056@yahoo.com>
+Joe Carlson <jwcarlson@lbl.gov>
+jon: Jon Schutz <jjschutz@cpan.org>
+Jordan Metzmeier <jmetzmeier@magazines.com>
+jshirley: J. Shirley <jshirley@gmail.com>
+kaare: Kaare Rasmussen
+kd: Kieren Diment <diment@gmail.com>
+konobi: Scott McWhirter <konobi@cpan.org>
+lejeunerenard: Sean Zellmer <sean@lejeunerenard.com>
+littlesavage: Alexey Illarionov <littlesavage@orionet.ru>
+lukes: Luke Saunders <luke.saunders@gmail.com>
+marcus: Marcus Ramberg <mramberg@cpan.org>
+mateu: Mateu X. Hunter <hunter@missoula.org>
+Matt LeBlanc <antirice@gmail.com>
+Matt Sickler <imMute@msk4.com>
+mattlaw: Matt Lawrence
+mattp: Matt Phillips <mattp@cpan.org>
+mdk: Mark Keating <m.keating@shadowcat.co.uk>
+melo: Pedro Melo <melo@simplicidade.org>
+metaperl: Terrence Brannon <metaperl@gmail.com>
+michaelr: Michael Reddick <michael.reddick@gmail.com>
+milki: Jonathan Chu <milki@rescomp.berkeley.edu>
+minty: Murray Walker <perl@minty.org>
+mithaldu: Christian Walde <walde.christian@gmail.com>
+mjemmeson: Michael Jemmeson <michael.jemmeson@gmail.com>
+mna: Maya
+mo: Moritz Onken <onken@netcubed.de>
+moltar: Roman Filippov <romanf@cpan.org>
+moritz: Moritz Lenz <moritz@faui2k3.org>
+mrf: Mike Francis <ungrim97@gmail.com>
+mst: Matt S. Trout <mst@shadowcat.co.uk>
+mstratman: Mark A. Stratman <stratman@gmail.com>
+ned: Neil de Carteret <n3dst4@gmail.com>
+nigel: Nigel Metheringham <nigelm@cpan.org>
+ningu: David Kamholz <dkamholz@cpan.org>
+Nniuq: Ron "Quinn" Straight" <quinnfazigu@gmail.org>
+norbi: Norbert Buchmuller <norbi@nix.hu>
+nothingmuch: Yuval Kogman <nothingmuch@woobling.org>
+nuba: Nuba Princigalli <nuba@cpan.org>
+Numa: Dan Sully <daniel@cpan.org>
+oalders: Olaf Alders <olaf@wundersolutions.com>
+Olly Betts <olly@survex.com>
+osfameron: Hakim Cassimally <osfameron@cpan.org>
+ovid: Curtis "Ovid" Poe <ovid@cpan.org>
+oyse: Øystein Torget <oystein.torget@dnv.com>
+paulm: Paul Makepeace <paulm+pause@paulm.com>
+penguin: K J Cheetham <jamie@shadowcatsystems.co.uk>
+perigrin: Chris Prather <chris@prather.org>
+Peter Siklósi <einon@einon.hu>
+Peter Valdemar Mørch <peter@morch.com>
+peter: Peter Collingbourne <peter@pcc.me.uk>
+phaylon: Robert Sedlacek <phaylon@dunkelheit.at>
+plu: Johannes Plunien <plu@cpan.org>
+Possum: Daniel LeWarne <possum@cpan.org>
+pplu: Jose Luis Martinez <jlmartinez@capside.com>
+quicksilver: Jules Bean <jules@jellybean.co.uk>
+racke: Stefan Hornburg <racke@linuxia.de>
+rafl: Florian Ragwitz <rafl@debian.org>
+rainboxx: Matthias Dietrich <perl@rb.ly>
+rbo: Robert Bohne <rbo@cpan.org>
+rbuels: Robert Buels <rmb32@cornell.edu>
+rdj: Ryan D Johnson <ryan@innerfence.com>
+Relequestual: Ben Hutton <relequestual@gmail.com>
+renormalist: Steffen Schwigon <schwigon@cpan.org>
+ribasushi: Peter Rabbitson <ribasushi@cpan.org>
+rjbs: Ricardo Signes <rjbs@cpan.org>
+Robert Krimen <rkrimen@cpan.org>
+Robert Olson <bob@rdolson.org>
+robkinyon: Rob Kinyon <rkinyon@cpan.org>
+Roman Ardern-Corris <spam_in@3legs.com>
+ruoso: Daniel Ruoso <daniel@ruoso.com>
+Sadrak: Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>
+sc_: Just Another Perl Hacker
+schwern: Michael G Schwern <mschwern@cpan.org>
+Scott R. Godin <webdragon.net@gmail.com>
+scotty: Scotty Allen <scotty@scottyallen.com>
+semifor: Marc Mims <marc@questright.com>
+Simon Elliott <cpan@browsing.co.uk>
+SineSwiper: Brendan Byrd <perl@resonatorsoft.org>
+skaufman: Samuel Kaufman <sam@socialflow.com>
+solomon: Jared Johnson <jaredj@nmgi.com>
+spb: Stephen Bennett <stephen@freenode.net>
+Squeeks <squeek@cpan.org>
+srezic: Slaven Rezic <slaven@rezic.de>
+sszabo: Stephan Szabo <sszabo@bigpanda.com>
+Stephen Peters <steve@stephenpeters.me>
+stonecolddevin: Devin Austin <dhoss@cpan.org>
+talexb: Alex Beamish <talexb@gmail.com>
+tamias: Ronald J Kimball <rjk@tamias.net>
+TBSliver: Tom Bloor <t.bloor@shadowcat.co.uk>
+teejay: Aaron Trevena <teejay@cpan.org>
+theorbtwo: James Mastros <james@mastros.biz>
+Thomas Kratz <tomk@cpan.org>
+timbunce: Tim Bunce <tim.bunce@pobox.com>
+Todd Lipcon
+Tom Hukins <tom@eborcom.com>
+tommy: Tommy Butler <tbutler.cpan.org@internetalias.net>
+tonvoon: Ton Voon <ton.voon@opsview.com>
+triode: Pete Gamache <gamache@cpan.org>
+typester: Daisuke Murase <typester@cpan.org>
+uree: Oriol Soriano <oriol.soriano@capside.com>
+uwe: Uwe Voelker <uwe@uwevoelker.de>
+victori: Victor Igumnov <victori@cpan.org>
+wdh: Will Hawes <wdhawes@gmail.com>
+wesm: Wes Malone <wes@mitsi.com>
+willert: Sebastian Willert <willert@cpan.org>
+wintermute: Toby Corkindale <tjc@cpan.org>
+wreis: Wallace Reis <wreis@cpan.org>
+xenoterracide: Caleb Cushing <xenoterracide@gmail.com>
+yrlnry: Mark Jason Dominus <mjd@plover.com>
+zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
+Zefram: Andrew Main <zefram@fysh.org>
@@ -1,5 +1,84 @@
 Revision history for DBIx::Class
 
+0.082800 2014-09-25 14:45 (UTC)
+    * Known Issues
+        - Passing large amounts of objects with stringification overload
+          directly to DBIx::Class may result in strange action at a distance
+          exceptions. More info (and a workaround description) can be found
+          under "Note" at https://metacpan.org/pod/SQL::Abstract#is_plain_value
+        - The relationship condition resolution fixes come with the side effect
+          of returning more complete data, tripping up *some* users of an
+          undocumented but widely used internal function. In particular
+          https://rt.cpan.org/Ticket/Display.html?id=91375#txn-1407239
+
+    * Notable Changes and Deprecations
+        - DBIC::FilterColumn now properly bypasses \'' and \[] literals, just
+          like the rest of DBIC
+        - DBIC::FilterColumn "from_storage" handler is now invoked on NULLs
+          returned from storage
+        - find() now throws an exception if some of the supplied values are
+          managed by DBIC::FilterColumn (RT#95054)
+        - Custom condition relationships are now invoked with a slightly
+          different signature (existing coderefs will continue to work)
+        - Add extra custom condition coderef attribute 'foreign_values'
+          to allow for proper reverse-relationship-like behavior
+          (i.e. $result->set_from_related($custom_rel, $foreign_result_object)
+        - When in a transaction, DBIC::Ordered now seamlesly handles result
+          objects that went out of sync with the storage (RT#96499)
+        - CDBICompat::columns() now supports adding columns through supplied
+          Class::DBI::Column instances (GH#52)
+        - Deprecate { col1 => col2 } expressions in manual {from} structures
+          (at some point of time manual {from} will be deprecated entirely)
+
+    * Fixes
+        - Fix Resultset delete/update affecting *THE ENTIRE TABLE* in cases
+          of empty (due to conditions) resultsets with multi-column keys
+        - Fix on_connect_* not always firing in some cases - a race condition
+          existed between storage accessor setters and the determine_driver
+          routines, triggering a connection before the set-cycle is finished
+        - Fix collapse being ignored on single-origin selection (RT#95658)
+        - Fix incorrect behavior on custom result_class inflators altering
+          the amount of returned results
+        - Fix failure to detect stable order criteria when in iterator
+          mode of a has_many prefetch off a search_related chain
+        - Prevent erroneous database hit when accessing prefetched related
+          resultsets with no rows
+        - Proper exceptions on malformed relationship conditions (RT#92234)
+        - Fix incorrect handling of custom relationship conditions returning
+          SQLA literal expressions
+        - Fix long standing bug with populate() missing data from hashrefs with
+          different keysets: http://is.gd/2011_dbic_populate_gotcha (RT#92723)
+        - Fix multi-value literal populate not working with simplified bind
+          specifications
+        - Massively improve the implied resultset condition parsing - now all
+          applicable conditions within a resultset should be properly picked
+          up by create() and populate()
+        - Ensure definitive condition extractor handles bizarre corner cases
+          without bombing out (RT#93244)
+        - Fix set_column on non-native (+columns) selections (RT#86685)
+        - Fix set_inflated_column incorrectly handling \[] literals (GH#44)
+        - Ensure that setting a column to a literal invariably marks it dirty
+        - Fix copy() not working correctly with extra selections present
+        - Work around exception objects with broken string overloading in one
+          additional codepath (missed in 0.08260)
+        - Fix more inconsistencies of the quote_names attribute propagating
+          to SQL::Translator (partially RT#87731)
+        - Fix SQLT constraint naming when DBIC table names are fully qualified
+          (PR#48)
+        - Ensure ::Schema::Versioned connects only once by reusing the main
+          connection (GH#57)
+        - Fix inability to handle multiple consecutive transactions with
+          savepoints on DBD::SQLite < 1.39
+        - Fix CDBICompat to match Class::DBI behavior handling non-result
+          blessed has_a (implicit deflate via stringification and inflate via
+          blind new) (GH#51)
+
+    * Misc
+        - Ensure source metadata calls always take place on the result source
+          instance registered with the caller
+        - IFF DBIC_TRACE output defaults to STDERR we now silence the possible
+          wide-char warnings if the trace happens to contain unicode
+
 0.08270 2014-01-30 21:54 (PST)
     * Fixes
         - Fix 0.08260 regression in DBD::SQLite bound int handling. Inserted
@@ -0,0 +1,412 @@
+DBIx::Class is Copyright (c) 2005-2014 by mst, castaway, ribasushi, and others.
+See AUTHORS and LICENSE included with this distribution. All rights reserved.
+
+This is free software; you can redistribute it and/or modify it under the
+same terms as the Perl5 (v5.0.0 ~ v5.20.0) programming language system
+itself: under the terms of either:
+
+a) the "Artistic License 1.0" as published by The Perl Foundation
+   http://www.perlfoundation.org/artistic_license_1_0
+
+b) the GNU General Public License as published by the Free Software Foundation;
+   either version 1 http://www.gnu.org/licenses/gpl-1.0.html
+   or (at your option) any later version
+
+PLEASE NOTE: It is the current maintainers intention to keep the dual
+licensing intact. Until this notice is removed, releases will continue to
+be available under both the standard GPL and the less restrictive Artistic
+licenses.
+
+Verbatim copies of both licenses are included below:
+
+
+
+--- The Artistic License 1.0 ---
+
+                         The "Artistic License"
+
+                                Preamble
+
+The intent of this document is to state the conditions under which a
+Package may be copied, such that the Copyright Holder maintains some
+semblance of artistic control over the development of the package,
+while giving the users of the package the right to use and distribute
+the Package in a more-or-less customary fashion, plus the right to make
+reasonable modifications.
+
+Definitions:
+
+        "Package" refers to the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection of files
+        created through textual modification.
+
+        "Standard Version" refers to such a Package if it has not been
+        modified, or has been modified in accordance with the wishes
+        of the Copyright Holder as specified below.
+
+        "Copyright Holder" is whoever is named in the copyright or
+        copyrights for the package.
+
+        "You" is you, if you're thinking about copying or distributing
+        this Package.
+
+        "Reasonable copying fee" is whatever you can justify on the
+        basis of media cost, duplication charges, time of people involved,
+        and so on.  (You will not be required to justify it to the
+        Copyright Holder, but only to the computing community at large
+        as a market that must bear the fee.)
+
+        "Freely Available" means that no fee is charged for the item
+        itself, though there may be fees involved in handling the item.
+        It also means that recipients of the item may redistribute it
+        under the same conditions they received it.
+
+1. You may make and give away verbatim copies of the source form of the
+Standard Version of this Package without restriction, provided that you
+duplicate all of the original copyright notices and associated disclaimers.
+
+2. You may apply bug fixes, portability fixes and other modifications
+derived from the Public Domain or from the Copyright Holder.  A Package
+modified in such a way shall still be considered the Standard Version.
+
+3. You may otherwise modify your copy of this Package in any way, provided
+that you insert a prominent notice in each changed file stating how and
+when you changed that file, and provided that you do at least ONE of the
+following:
+
+    a) place your modifications in the Public Domain or otherwise make them
+    Freely Available, such as by posting said modifications to Usenet or
+    an equivalent medium, or placing the modifications on a major archive
+    site such as uunet.uu.net, or by allowing the Copyright Holder to include
+    your modifications in the Standard Version of the Package.
+
+    b) use the modified Package only within your corporation or organization.
+
+    c) rename any non-standard executables so the names do not conflict
+    with standard executables, which must also be provided, and provide
+    a separate manual page for each non-standard executable that clearly
+    documents how it differs from the Standard Version.
+
+    d) make other distribution arrangements with the Copyright Holder.
+
+4. You may distribute the programs of this Package in object code or
+executable form, provided that you do at least ONE of the following:
+
+    a) distribute a Standard Version of the executables and library files,
+    together with instructions (in the manual page or equivalent) on where
+    to get the Standard Version.
+
+    b) accompany the distribution with the machine-readable source of
+    the Package with your modifications.
+
+    c) give non-standard executables non-standard names, and clearly
+    document the differences in manual pages (or equivalent), together
+    with instructions on where to get the Standard Version.
+
+    d) make other distribution arrangements with the Copyright Holder.
+
+5. You may charge a reasonable copying fee for any distribution of this
+Package.  You may charge any fee you choose for support of this
+Package.  You may not charge a fee for this Package itself.  However,
+you may distribute this Package in aggregate with other (possibly
+commercial) programs as part of a larger (possibly commercial) software
+distribution provided that you do not advertise this Package as a
+product of your own.  You may embed this Package's interpreter within
+an executable of yours (by linking); this shall be construed as a mere
+form of aggregation, provided that the complete Standard Version of the
+interpreter is so embedded.
+
+6. The scripts and library files supplied as input to or produced as
+output from the programs of this Package do not automatically fall
+under the copyright of this Package, but belong to whoever generated
+them, and may be sold commercially, and may be aggregated with this
+Package.  If such scripts or library files are aggregated with this
+Package via the so-called "undump" or "unexec" methods of producing a
+binary executable image, then distribution of such an image shall
+neither be construed as a distribution of this Package nor shall it
+fall under the restrictions of Paragraphs 3 and 4, provided that you do
+not represent such an executable image as a Standard Version of this
+Package.
+
+7. C subroutines (or comparably compiled subroutines in other
+languages) supplied by you and linked into this Package in order to
+emulate subroutines and variables of the language defined by this
+Package shall not be considered part of this Package, but are the
+equivalent of input as in Paragraph 6, provided these subroutines do
+not change the language in any way that would cause it to fail the
+regression tests for the language.
+
+8. Aggregation of this Package with a commercial distribution is always
+permitted provided that the use of this Package is embedded; that is,
+when no overt attempt is made to make this Package's interfaces visible
+to the end user of the commercial distribution.  Such use shall not be
+construed as a distribution of this Package.
+
+9. The name of the Copyright Holder may not be used to endorse or promote
+products derived from this software without specific prior written permission.
+
+10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
+WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+
+--- end of The Artistic License 1.0 ---
+
+
+
+
+--- The GNU General Public License, Version 1, February 1989 ---
+
+                    GNU GENERAL PUBLIC LICENSE
+                     Version 1, February 1989
+
+ Copyright (C) 1989 Free Software Foundation, Inc.
+                    51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The license agreements of most software companies try to keep users
+at the mercy of those companies.  By contrast, our General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  The
+General Public License applies to the Free Software Foundation's
+software and to any other program whose authors commit to using it.
+You can use it for your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Specifically, the General Public License is designed to make
+sure that you have the freedom to give away or sell copies of free
+software, that you receive source code or can get it if you want it,
+that you can change the software or use pieces of it in new free
+programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of a such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must tell them their rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License Agreement applies to any program or other work which
+contains a notice placed by the copyright holder saying it may be
+distributed under the terms of this General Public License.  The
+"Program", below, refers to any such program or work, and a "work based
+on the Program" means either the Program or any work containing the
+Program or a portion of it, either verbatim or with modifications.  Each
+licensee is addressed as "you".
+
+  1. You may copy and distribute verbatim copies of the Program's source
+code as you receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice and
+disclaimer of warranty; keep intact all the notices that refer to this
+General Public License and to the absence of any warranty; and give any
+other recipients of the Program a copy of this General Public License
+along with the Program.  You may charge a fee for the physical act of
+transferring a copy.
+
+  2. You may modify your copy or copies of the Program or any portion of
+it, and copy and distribute such modifications under the terms of Paragraph
+1 above, provided that you also do the following:
+
+    a) cause the modified files to carry prominent notices stating that
+    you changed the files and the date of any change; and
+
+    b) cause the whole of any work that you distribute or publish, that
+    in whole or in part contains the Program or any part thereof, either
+    with or without modifications, to be licensed at no charge to all
+    third parties under the terms of this General Public License (except
+    that you may choose to grant warranty protection to some or all
+    third parties, at your option).
+
+    c) If the modified program normally reads commands interactively when
+    run, you must cause it, when started running for such interactive use
+    in the simplest and most usual way, to print or display an
+    announcement including an appropriate copyright notice and a notice
+    that there is no warranty (or else, saying that you provide a
+    warranty) and that users may redistribute the program under these
+    conditions, and telling the user how to view a copy of this General
+    Public License.
+
+    d) You may charge a fee for the physical act of transferring a
+    copy, and you may at your option offer warranty protection in
+    exchange for a fee.
+
+Mere aggregation of another independent work with the Program (or its
+derivative) on a volume of a storage or distribution medium does not bring
+the other work under the scope of these terms.
+
+  3. You may copy and distribute the Program (or a portion or derivative of
+it, under Paragraph 2) in object code or executable form under the terms of
+Paragraphs 1 and 2 above provided that you also do one of the following:
+
+    a) accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of
+    Paragraphs 1 and 2 above; or,
+
+    b) accompany it with a written offer, valid for at least three
+    years, to give any third party free (except for a nominal charge
+    for the cost of distribution) a complete machine-readable copy of the
+    corresponding source code, to be distributed under the terms of
+    Paragraphs 1 and 2 above; or,
+
+    c) accompany it with the information you received as to where the
+    corresponding source code may be obtained.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form alone.)
+
+Source code for a work means the preferred form of the work for making
+modifications to it.  For an executable file, complete source code means
+all the source code for all modules it contains; but, as a special
+exception, it need not include source code for modules which are standard
+libraries that accompany the operating system on which the executable
+file runs, or for standard header files or definitions files that
+accompany that operating system.
+
+  4. You may not copy, modify, sublicense, distribute or transfer the
+Program except as expressly provided under this General Public License.
+Any attempt otherwise to copy, modify, sublicense, distribute or transfer
+the Program is void, and will automatically terminate your rights to use
+the Program under this License.  However, parties who have received
+copies, or rights to use copies, from you under this General Public
+License will not have their licenses terminated so long as such parties
+remain in full compliance.
+
+  5. By copying, distributing or modifying the Program (or any work based
+on the Program) you indicate your acceptance of this license to do so,
+and all its terms and conditions.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the original
+licensor to copy, distribute or modify the Program subject to these
+terms and conditions.  You may not impose any further restrictions on the
+recipients' exercise of the rights granted herein.
+
+  7. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of the license which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+the license, you may choose any version ever published by the Free Software
+Foundation.
+
+  8. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+                            NO WARRANTY
+
+  9. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  10. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+                     END OF TERMS AND CONDITIONS
+
+        Appendix: How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to humanity, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these
+terms.
+
+  To do so, attach the following notices to the program.  It is safest to
+attach them to the start of each source file to most effectively convey
+the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) 19yy  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 1, or (at your option)
+    any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) 19xx name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the
+appropriate parts of the General Public License.  Of course, the
+commands you use may be called something other than `show w' and `show
+c'; they could even be mouse-clicks or menu items--whatever suits your
+program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the
+  program `Gnomovision' (a program to direct compilers to make passes
+  at assemblers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+That's all there is to it!
+
+--- end of The GNU General Public License, Version 1, February 1989 ---
+
+
@@ -1,3 +1,4 @@
+AUTHORS
 Changes
 examples/Benchmarks/benchmark_datafetch.pl
 examples/Benchmarks/benchmark_hashrefinflator.pl
@@ -23,6 +24,7 @@ inc/Module/Install/Scripts.pm
 inc/Module/Install/Win32.pm
 inc/Module/Install/WriteAll.pm
 lib/DBIx/Class.pm
+lib/DBIx/Class.pod
 lib/DBIx/Class/_Util.pm
 lib/DBIx/Class/AccessorGroup.pm
 lib/DBIx/Class/Admin.pm
@@ -134,12 +136,14 @@ lib/DBIx/Class/SQLAHacks/Oracle.pm
 lib/DBIx/Class/SQLAHacks/OracleJoins.pm
 lib/DBIx/Class/SQLAHacks/SQLite.pm
 lib/DBIx/Class/SQLMaker.pm
+lib/DBIx/Class/SQLMaker.pod
 lib/DBIx/Class/SQLMaker/ACCESS.pm
 lib/DBIx/Class/SQLMaker/LimitDialects.pm
 lib/DBIx/Class/SQLMaker/MSSQL.pm
 lib/DBIx/Class/SQLMaker/MySQL.pm
 lib/DBIx/Class/SQLMaker/Oracle.pm
 lib/DBIx/Class/SQLMaker/OracleJoins.pm
+lib/DBIx/Class/SQLMaker/OracleJoins.pod
 lib/DBIx/Class/SQLMaker/SQLite.pm
 lib/DBIx/Class/StartupCheck.pm
 lib/DBIx/Class/Storage.pm
@@ -199,30 +203,24 @@ lib/DBIx/Class/Storage/TxnScopeGuard.pm
 lib/DBIx/Class/UTF8Columns.pm
 lib/SQL/Translator/Parser/DBIx/Class.pm
 lib/SQL/Translator/Producer/DBIx/Class/File.pm
+LICENSE
+maint/gen_pod_authors
 maint/gen_pod_index
 maint/gen_pod_inherit
 maint/gen_sqlite_schema_files
+maint/getstatus
 maint/Makefile.PL.inc/01_adjust_INC.pl
 maint/Makefile.PL.inc/11_authortests.pl
 maint/Makefile.PL.inc/12_authordeps.pl
-maint/Makefile.PL.inc/21_meta_noindex.pl
+maint/Makefile.PL.inc/21_set_meta.pl
 maint/Makefile.PL.inc/29_handle_version.pl
 maint/Makefile.PL.inc/50_redefine_makefile_flow.pl
 maint/Makefile.PL.inc/51_autohandle_MANIFEST.pl
-maint/Makefile.PL.inc/52_autogen_README.pl
 maint/Makefile.PL.inc/53_autogen_pod.pl
+maint/Makefile.PL.inc/54_autogen_legalese_and_README.pl
 maint/Makefile.PL.inc/56_autogen_schema_files.pl
 maint/Makefile.PL.inc/61_inject_dbicadmin_pod.pl
 maint/Makefile.PL.inc/91_inc_sanity_check.pl
-maint/travis-ci_scripts/10_before_install.bash
-maint/travis-ci_scripts/20_install.bash
-maint/travis-ci_scripts/30_before_script.bash
-maint/travis-ci_scripts/40_script.bash
-maint/travis-ci_scripts/50_after_failure.bash
-maint/travis-ci_scripts/50_after_success.bash
-maint/travis-ci_scripts/60_after_script.bash
-maint/travis-ci_scripts/common.bash
-maint/travis-ci_scripts/lib/TAP/Harness/IgnoreNonessentialDzilAutogeneratedTests.pm
 Makefile.PL
 MANIFEST			This list of files
 META.yml
@@ -314,7 +312,6 @@ t/94pk_mutation.t
 t/94versioning.t
 t/96_is_deteministic_value.t
 t/97result_class.t
-t/98savepoints.t
 t/99dbic_sqlt_parser.t
 t/admin/01load.t
 t/admin/02ddl.t
@@ -343,6 +340,8 @@ t/cdbi/24-meta_info.t
 t/cdbi/26-mutator.t
 t/cdbi/30-pager.t
 t/cdbi/68-inflate_has_a.t
+t/cdbi/70_implicit_inflate.t
+t/cdbi/71_column_object.t
 t/cdbi/98-failure.t
 t/cdbi/abstract/search_where.t
 t/cdbi/columns_as_hashes.t
@@ -365,9 +364,11 @@ t/cdbi/testlib/Actor.pm
 t/cdbi/testlib/ActorAlias.pm
 t/cdbi/testlib/Blurb.pm
 t/cdbi/testlib/CDBase.pm
+t/cdbi/testlib/ColumnObject.pm
 t/cdbi/testlib/DBIC/Test/SQLite.pm
 t/cdbi/testlib/Director.pm
 t/cdbi/testlib/Film.pm
+t/cdbi/testlib/ImplicitInflate.pm
 t/cdbi/testlib/Lazy.pm
 t/cdbi/testlib/Log.pm
 t/cdbi/testlib/MyBase.pm
@@ -412,8 +413,6 @@ t/inflate/hri_torture.t
 t/inflate/serialize.t
 t/lib/admincfgtest.json
 t/lib/awesome.json
-t/lib/DBIC/DebugObj.pm
-t/lib/DBIC/SqlMakerTest.pm
 t/lib/DBICNSTest/Bogus/A.pm
 t/lib/DBICNSTest/Bogus/B.pm
 t/lib/DBICNSTest/Bogus/Bigos.pm
@@ -437,6 +436,7 @@ t/lib/DBICNSTest/RtBug41083/ResultSet.pm
 t/lib/DBICNSTest/RtBug41083/ResultSet/Foo.pm
 t/lib/DBICNSTest/RtBug41083/ResultSet_A/A.pm
 t/lib/DBICTest.pm
+t/lib/DBICTest/Base.pm
 t/lib/DBICTest/BaseResult.pm
 t/lib/DBICTest/BaseResultSet.pm
 t/lib/DBICTest/BaseSchema.pm
@@ -507,7 +507,7 @@ t/lib/DBICTest/Schema/TypedObject.pm
 t/lib/DBICTest/Schema/VaryingMAX.pm
 t/lib/DBICTest/Schema/Year1999CDs.pm
 t/lib/DBICTest/Schema/Year2000CDs.pm
-t/lib/DBICTest/Stats.pm
+t/lib/DBICTest/SQLTracerObj.pm
 t/lib/DBICTest/SyntaxErrorComponent1.pm
 t/lib/DBICTest/SyntaxErrorComponent2.pm
 t/lib/DBICTest/SyntaxErrorComponent3.pm
@@ -517,10 +517,10 @@ t/lib/DBICTest/Taint/Namespaces/Result/Test.pm
 t/lib/DBICTest/Util.pm
 t/lib/DBICTest/Util/LeakTracer.pm
 t/lib/DBICTest/Util/OverrideRequire.pm
+t/lib/DBICTest/WithTaint.pm
 t/lib/DBICVersion_v1.pm
 t/lib/DBICVersion_v2.pm
 t/lib/DBICVersion_v3.pm
-t/lib/PrefetchBug.pm
 t/lib/sqlite.sql
 t/lib/test_deploy/DBICTest-Schema-1.x-SQLite.sql
 t/lib/testinclude/DBICTestAdminInc.pm
@@ -552,6 +552,7 @@ t/lib/ViewDepsBad/Result/Year2010CDsWithManyTracks.pm
 t/multi_create/cd_single.t
 t/multi_create/diamond.t
 t/multi_create/existing_in_chain.t
+t/multi_create/find_or_multicreate.t
 t/multi_create/has_many.t
 t/multi_create/in_memory.t
 t/multi_create/insert_defaults.t
@@ -567,6 +568,7 @@ t/prefetch/correlated.t
 t/prefetch/count.t
 t/prefetch/diamond.t
 t/prefetch/double_prefetch.t
+t/prefetch/empty_cache.t
 t/prefetch/false_colvalues.t
 t/prefetch/grouped.t
 t/prefetch/incomplete.t
@@ -585,10 +587,12 @@ t/prefetch/with_limit.t
 t/relationship/after_update.t
 t/relationship/core.t
 t/relationship/custom.t
+t/relationship/custom_opaque.t
 t/relationship/custom_with_null_in_cond.t
 t/relationship/doesnt_exist.t
 t/relationship/dynamic_foreign_columns.t
 t/relationship/info.t
+t/relationship/malformed_declaration.t
 t/relationship/proxy.t
 t/relationship/set_column_on_fk.t
 t/relationship/unresolvable.t
@@ -597,6 +601,7 @@ t/relationship/update_or_create_single.t
 t/resultset/as_query.t
 t/resultset/as_subselect_rs.t
 t/resultset/bind_attr.t
+t/resultset/find_on_subquery_cond.t
 t/resultset/inflate_result_api.t
 t/resultset/inflatemap_abuse.t
 t/resultset/is_ordered.t
@@ -608,26 +613,32 @@ t/resultset/update_delete.t
 t/resultset_class.t
 t/resultset_overload.t
 t/resultsource/set_primary_key.t
+t/row/copy_with_extra_selection.t
 t/row/filter_column.t
 t/row/find_one_has_many.t
 t/row/inflate_result.t
 t/row/pkless.t
+t/row/set_extra_column.t
 t/schema/anon.t
 t/schema/clone.t
 t/search/deprecated_attributes.t
 t/search/distinct.t
+t/search/empty_attrs.t
 t/search/preserve_original_rs.t
 t/search/reentrancy.t
 t/search/related_has_many.t
 t/search/related_strip_prefetch.t
 t/search/select_chains.t
 t/search/select_chains_unbalanced.t
+t/search/stack_cond.t
 t/search/subquery.t
 t/search/void.t
 t/sqlmaker/bind_transport.t
 t/sqlmaker/core.t
 t/sqlmaker/core_quoted.t
+t/sqlmaker/dbihacks_internals.t
 t/sqlmaker/hierarchical/oracle.t
+t/sqlmaker/legacy_joins.t
 t/sqlmaker/limit_dialects/basic.t
 t/sqlmaker/limit_dialects/custom.t
 t/sqlmaker/limit_dialects/fetch_first.t
@@ -647,8 +658,7 @@ t/sqlmaker/oracle.t
 t/sqlmaker/oraclejoin.t
 t/sqlmaker/order_by_bindtransport.t
 t/sqlmaker/order_by_func.t
-t/sqlmaker/quotes/quotes.t
-t/sqlmaker/quotes/quotes_newstyle.t
+t/sqlmaker/quotes.t
 t/sqlmaker/sqlite.t
 t/storage/base.t
 t/storage/cursor.t
@@ -667,9 +677,11 @@ t/storage/nobindvars.t
 t/storage/on_connect_call.t
 t/storage/on_connect_do.t
 t/storage/ping_count.t
+t/storage/prefer_stringification.t
 t/storage/quote_names.t
 t/storage/reconnect.t
 t/storage/replicated.t
+t/storage/savepoints.t
 t/storage/stats.t
 t/storage/txn.t
 t/storage/txn_scope_guard.t
@@ -678,11 +690,14 @@ t/update/ident_cond.t
 t/update/type_aware.t
 t/zzzzzzz_perl_perf_bug.t
 t/zzzzzzz_sqlite_deadlock.t
+xt/authors.t
 xt/dbictest_unlink_guard.t
+xt/footers.t
 xt/old_envvars.t
 xt/optional_deps.t
 xt/pod.t
 xt/podcoverage.t
+xt/quote_sub.t
 xt/standalone_testschema_resultclasses.t
 xt/strictures.t
 xt/whitespace.t
@@ -1,7 +1,9 @@
 ---
 abstract: 'Extensible and flexible object <-> relational mapper.'
 author:
-  - 'mst: Matt S. Trout <mst@shadowcatsystems.co.uk>'
+  - 'mst: Matt S Trout <mst@shadowcat.co.uk> (project founder - original idea, architecture and implementation)'
+  - 'castaway: Jess Robinson <castaway@desert-island.me.uk> (lions share of the reference documentation and manuals)'
+  - 'ribasushi: Peter Rabbitson <ribasushi@cpan.org> (present day maintenance and controlled evolution)'
 build_requires:
   DBD::SQLite: 1.29
   File::Temp: 0.22
@@ -43,7 +45,6 @@ requires:
   Config::Any: 0.20
   Context::Preserve: 0.01
   DBI: 1.57
-  Data::Compare: 1.22
   Data::Dumper::Concise: 2.020
   Data::Page: 2.00
   Devel::GlobalDestruction: 0.09
@@ -51,9 +52,9 @@ requires:
   List::Util: 1.16
   MRO::Compat: 0.12
   Module::Find: 0.07
-  Moo: 1.002
+  Moo: 1.004005
   Path::Class: 0.18
-  SQL::Abstract: 1.77
+  SQL::Abstract: 1.79
   Scope::Guard: 0.03
   Sub::Name: 0.04
   Text::Balanced: 2.00
@@ -67,5 +68,201 @@ resources:
   homepage: http://www.dbix-class.org/
   license: http://dev.perl.org/licenses/
   repository: https://github.com/dbsrgits/DBIx-Class
-version: 0.08270
+version: 0.082800
 x_authority: cpan:RIBASUSHI
+x_contributors:
+  - 'abraxxa: Alexander Hartmaier <abraxxa@cpan.org>'
+  - 'acca: Alexander Kuznetsov <acca@cpan.org>'
+  - 'aherzog: Adam Herzog <adam@herzogdesigns.com>'
+  - 'Alexander Keusch <cpan@keusch.at>'
+  - 'alexrj: Alessandro Ranellucci <aar@cpan.org>'
+  - 'alnewkirk: Al Newkirk <github@alnewkirk.com>'
+  - 'amiri: Amiri Barksdale <amiribarksdale@gmail.com>'
+  - 'amoore: Andrew Moore <amoore@cpan.org>'
+  - 'Andrew Mehta <Andrew@unitedgames.co.uk>'
+  - 'andrewalker: Andre Walker <andre@andrewalker.net>'
+  - 'andyg: Andy Grundman <andy@hybridized.org>'
+  - 'ank: Andres Kievsky <ank@ank.com.ar>'
+  - 'arc: Aaron Crane <arc@cpan.org>'
+  - 'arcanez: Justin Hunter <justin.d.hunter@gmail.com>'
+  - 'ash: Ash Berlin <ash@cpan.org>'
+  - 'bert: Norbert Csongrádi <bert@cpan.org>'
+  - 'bfwg: Colin Newell <colin.newell@gmail.com>'
+  - 'blblack: Brandon L. Black <blblack@gmail.com>'
+  - 'bluefeet: Aran Deltac <bluefeet@cpan.org>'
+  - 'boghead: Bryan Beeley <cpan@beeley.org>'
+  - 'bphillips: Brian Phillips <bphillips@cpan.org>'
+  - 'brd: Brad Davis <brd@FreeBSD.org>'
+  - 'Brian Kirkbride <brian.kirkbride@deeperbydesign.com>'
+  - 'bricas: Brian Cassidy <bricas@cpan.org>'
+  - 'brunov: Bruno Vecchi <vecchi.b@gmail.com>'
+  - 'caelum: Rafael Kitover <rkitover@cpan.org>'
+  - 'caldrin: Maik Hentsche <maik.hentsche@amd.com>'
+  - 'castaway: Jess Robinson <castaway@desert-island.me.uk>'
+  - 'chorny: Alexandr Ciornii <alexchorny@gmail.com>'
+  - 'claco: Christopher H. Laco <claco@cpan.org>'
+  - 'clkao: CL Kao <clkao@clkao.org>'
+  - 'Ctrl-O http://ctrlo.com/'
+  - 'da5id: David Jack Olrik <david@olrik.dk>'
+  - 'dams: Damien Krotkine <dams@cpan.org>'
+  - 'dandv: Dan Dascalescu <ddascalescu+github@gmail.com>'
+  - 'dariusj: Darius Jokilehto <dariusjokilehto@yahoo.co.uk>'
+  - 'davewood: David Schmidt <mail@davidschmidt.at>'
+  - 'daxim: Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>'
+  - 'dduncan: Darren Duncan <darren@darrenduncan.net>'
+  - 'debolaz: Anders Nor Berle <berle@cpan.org>'
+  - 'dew: Dan Thomas <dan@godders.org>'
+  - 'dim0xff: Dmitry Latin <dim0xff@gmail.com>'
+  - 'dkubb: Dan Kubb <dan.kubb-cpan@onautopilot.com>'
+  - 'dnm: Justin Wheeler <jwheeler@datademons.com>'
+  - 'dpetrov: Dimitar Petrov <mitakaa@gmail.com>'
+  - 'dsteinbrunner: David Steinbrunner <dsteinbrunner@pobox.com>'
+  - 'duncan_dmg: Duncan Garland <Duncan.Garland@motortrak.com>'
+  - 'dwc: Daniel Westermann-Clark <danieltwc@cpan.org>'
+  - 'dyfrgi: Michael Leuchtenburg <michael@slashhome.org>'
+  - 'edenc: Eden Cardim <edencardim@gmail.com>'
+  - 'Eligo http://eligo.co.uk/'
+  - 'ether: Karen Etheridge <ether@cpan.org>'
+  - 'evdb: Edmund von der Burg <evdb@ecclestoad.co.uk>'
+  - 'faxm0dem: Fabien Wernli <cpan@faxm0dem.org>'
+  - 'felliott: Fitz Elliott <fitz.elliott@gmail.com>'
+  - 'freetime: Bill Moseley <moseley@hank.org>'
+  - "frew: Arthur Axel \"fREW\" Schmidt <frioux@gmail.com>"
+  - 'gbjk: Gareth Kirwan <gbjk@thermeon.com>'
+  - 'Getty: Torsten Raudssus <torsten@raudss.us>'
+  - 'goraxe: Gordon Irving <goraxe@cpan.org>'
+  - 'gphat: Cory G Watson <gphat@cpan.org>'
+  - 'Grant Street Group http://www.grantstreet.com/'
+  - 'groditi: Guillermo Roditi <groditi@cpan.org>'
+  - 'gshank: Gerda Shank <gshank@cpan.org>'
+  - 'guacamole: Fred Steinberg <fred.steinberg@gmail.com>'
+  - 'Haarg: Graham Knop <haarg@haarg.org>'
+  - 'hobbs: Andrew Rodland <andrew@cleverdomain.org>'
+  - 'Ian Wells <ijw@cack.org.uk>'
+  - 'idn: Ian Norton <i.norton@shadowcat.co.uk>'
+  - 'ilmari: Dagfinn Ilmari Mannsåker <ilmari@ilmari.org>'
+  - 'initself: Mike Baas <mike@initselftech.com>'
+  - 'ironcamel: Naveed Massjouni <naveedm9@gmail.com>'
+  - 'jasonmay: Jason May <jason.a.may@gmail.com>'
+  - 'jawnsy: Jonathan Yu <jawnsy@cpan.org>'
+  - 'jegade: Jens Gassmann <jens.gassmann@atomix.de>'
+  - 'jeneric: Eric A. Miller <emiller@cpan.org>'
+  - 'jesper: Jesper Krogh <jesper@krogh.cc>'
+  - 'Jesse Sheidlower <jester@panix.com>'
+  - 'jgoulah: John Goulah <jgoulah@cpan.org>'
+  - 'jguenther: Justin Guenther <jguenther@cpan.org>'
+  - 'jhannah: Jay Hannah <jay@jays.net>'
+  - 'jmac: Jason McIntosh <jmac@appleseed-sc.com>'
+  - 'jmmills: Jason M. Mills <jmmills@cpan.org>'
+  - 'jnapiorkowski: John Napiorkowski <jjn1056@yahoo.com>'
+  - 'Joe Carlson <jwcarlson@lbl.gov>'
+  - 'jon: Jon Schutz <jjschutz@cpan.org>'
+  - 'Jordan Metzmeier <jmetzmeier@magazines.com>'
+  - 'jshirley: J. Shirley <jshirley@gmail.com>'
+  - 'kaare: Kaare Rasmussen'
+  - 'kd: Kieren Diment <diment@gmail.com>'
+  - 'konobi: Scott McWhirter <konobi@cpan.org>'
+  - 'lejeunerenard: Sean Zellmer <sean@lejeunerenard.com>'
+  - 'littlesavage: Alexey Illarionov <littlesavage@orionet.ru>'
+  - 'lukes: Luke Saunders <luke.saunders@gmail.com>'
+  - 'marcus: Marcus Ramberg <mramberg@cpan.org>'
+  - 'mateu: Mateu X. Hunter <hunter@missoula.org>'
+  - 'Matt LeBlanc <antirice@gmail.com>'
+  - 'Matt Sickler <imMute@msk4.com>'
+  - 'mattlaw: Matt Lawrence'
+  - 'mattp: Matt Phillips <mattp@cpan.org>'
+  - 'mdk: Mark Keating <m.keating@shadowcat.co.uk>'
+  - 'melo: Pedro Melo <melo@simplicidade.org>'
+  - 'metaperl: Terrence Brannon <metaperl@gmail.com>'
+  - 'michaelr: Michael Reddick <michael.reddick@gmail.com>'
+  - 'milki: Jonathan Chu <milki@rescomp.berkeley.edu>'
+  - 'minty: Murray Walker <perl@minty.org>'
+  - 'mithaldu: Christian Walde <walde.christian@gmail.com>'
+  - 'mjemmeson: Michael Jemmeson <michael.jemmeson@gmail.com>'
+  - 'mna: Maya'
+  - 'mo: Moritz Onken <onken@netcubed.de>'
+  - 'moltar: Roman Filippov <romanf@cpan.org>'
+  - 'moritz: Moritz Lenz <moritz@faui2k3.org>'
+  - 'mrf: Mike Francis <ungrim97@gmail.com>'
+  - 'mst: Matt S. Trout <mst@shadowcat.co.uk>'
+  - 'mstratman: Mark A. Stratman <stratman@gmail.com>'
+  - 'ned: Neil de Carteret <n3dst4@gmail.com>'
+  - 'nigel: Nigel Metheringham <nigelm@cpan.org>'
+  - 'ningu: David Kamholz <dkamholz@cpan.org>'
+  - "Nniuq: Ron \"Quinn\" Straight\" <quinnfazigu@gmail.org>"
+  - 'norbi: Norbert Buchmuller <norbi@nix.hu>'
+  - 'nothingmuch: Yuval Kogman <nothingmuch@woobling.org>'
+  - 'nuba: Nuba Princigalli <nuba@cpan.org>'
+  - 'Numa: Dan Sully <daniel@cpan.org>'
+  - 'oalders: Olaf Alders <olaf@wundersolutions.com>'
+  - 'Olly Betts <olly@survex.com>'
+  - 'osfameron: Hakim Cassimally <osfameron@cpan.org>'
+  - "ovid: Curtis \"Ovid\" Poe <ovid@cpan.org>"
+  - 'oyse: Øystein Torget <oystein.torget@dnv.com>'
+  - 'paulm: Paul Makepeace <paulm+pause@paulm.com>'
+  - 'penguin: K J Cheetham <jamie@shadowcatsystems.co.uk>'
+  - 'perigrin: Chris Prather <chris@prather.org>'
+  - 'Peter Siklósi <einon@einon.hu>'
+  - 'Peter Valdemar Mørch <peter@morch.com>'
+  - 'peter: Peter Collingbourne <peter@pcc.me.uk>'
+  - 'phaylon: Robert Sedlacek <phaylon@dunkelheit.at>'
+  - 'plu: Johannes Plunien <plu@cpan.org>'
+  - 'Possum: Daniel LeWarne <possum@cpan.org>'
+  - 'pplu: Jose Luis Martinez <jlmartinez@capside.com>'
+  - 'quicksilver: Jules Bean <jules@jellybean.co.uk>'
+  - 'racke: Stefan Hornburg <racke@linuxia.de>'
+  - 'rafl: Florian Ragwitz <rafl@debian.org>'
+  - 'rainboxx: Matthias Dietrich <perl@rb.ly>'
+  - 'rbo: Robert Bohne <rbo@cpan.org>'
+  - 'rbuels: Robert Buels <rmb32@cornell.edu>'
+  - 'rdj: Ryan D Johnson <ryan@innerfence.com>'
+  - 'Relequestual: Ben Hutton <relequestual@gmail.com>'
+  - 'renormalist: Steffen Schwigon <schwigon@cpan.org>'
+  - 'ribasushi: Peter Rabbitson <ribasushi@cpan.org>'
+  - 'rjbs: Ricardo Signes <rjbs@cpan.org>'
+  - 'Robert Krimen <rkrimen@cpan.org>'
+  - 'Robert Olson <bob@rdolson.org>'
+  - 'robkinyon: Rob Kinyon <rkinyon@cpan.org>'
+  - 'Roman Ardern-Corris <spam_in@3legs.com>'
+  - 'ruoso: Daniel Ruoso <daniel@ruoso.com>'
+  - 'Sadrak: Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>'
+  - 'sc_: Just Another Perl Hacker'
+  - 'schwern: Michael G Schwern <mschwern@cpan.org>'
+  - 'Scott R. Godin <webdragon.net@gmail.com>'
+  - 'scotty: Scotty Allen <scotty@scottyallen.com>'
+  - 'semifor: Marc Mims <marc@questright.com>'
+  - 'Simon Elliott <cpan@browsing.co.uk>'
+  - 'SineSwiper: Brendan Byrd <perl@resonatorsoft.org>'
+  - 'skaufman: Samuel Kaufman <sam@socialflow.com>'
+  - 'solomon: Jared Johnson <jaredj@nmgi.com>'
+  - 'spb: Stephen Bennett <stephen@freenode.net>'
+  - 'Squeeks <squeek@cpan.org>'
+  - 'srezic: Slaven Rezic <slaven@rezic.de>'
+  - 'sszabo: Stephan Szabo <sszabo@bigpanda.com>'
+  - 'Stephen Peters <steve@stephenpeters.me>'
+  - 'stonecolddevin: Devin Austin <dhoss@cpan.org>'
+  - 'talexb: Alex Beamish <talexb@gmail.com>'
+  - 'tamias: Ronald J Kimball <rjk@tamias.net>'
+  - 'TBSliver: Tom Bloor <t.bloor@shadowcat.co.uk>'
+  - 'teejay: Aaron Trevena <teejay@cpan.org>'
+  - 'theorbtwo: James Mastros <james@mastros.biz>'
+  - 'Thomas Kratz <tomk@cpan.org>'
+  - 'timbunce: Tim Bunce <tim.bunce@pobox.com>'
+  - 'Todd Lipcon'
+  - 'Tom Hukins <tom@eborcom.com>'
+  - 'tommy: Tommy Butler <tbutler.cpan.org@internetalias.net>'
+  - 'tonvoon: Ton Voon <ton.voon@opsview.com>'
+  - 'triode: Pete Gamache <gamache@cpan.org>'
+  - 'typester: Daisuke Murase <typester@cpan.org>'
+  - 'uree: Oriol Soriano <oriol.soriano@capside.com>'
+  - 'uwe: Uwe Voelker <uwe@uwevoelker.de>'
+  - 'victori: Victor Igumnov <victori@cpan.org>'
+  - 'wdh: Will Hawes <wdhawes@gmail.com>'
+  - 'wesm: Wes Malone <wes@mitsi.com>'
+  - 'willert: Sebastian Willert <willert@cpan.org>'
+  - 'wintermute: Toby Corkindale <tjc@cpan.org>'
+  - 'wreis: Wallace Reis <wreis@cpan.org>'
+  - 'xenoterracide: Caleb Cushing <xenoterracide@gmail.com>'
+  - 'yrlnry: Mark Jason Dominus <mjd@plover.com>'
+  - 'zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>'
+  - 'Zefram: Andrew Main <zefram@fysh.org>'
@@ -15,37 +15,15 @@ BEGIN {
   $Module::Install::AUTHOR = 0 if (grep { $ENV{"PERL5_${_}_IS_RUNNING"} } (qw/CPANM CPANPLUS CPAN/) );
 }
 
-homepage 'http://www.dbix-class.org/';
-resources 'IRC'         => 'irc://irc.perl.org/#dbix-class';
-resources 'license'     => 'http://dev.perl.org/licenses/';
-resources 'repository'  => 'https://github.com/dbsrgits/DBIx-Class';
-resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class';
-resources 'bugtracker'  => 'http://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class';
-
-name     'DBIx-Class';
+name         'DBIx-Class';
+version_from 'lib/DBIx/Class.pm';
 perl_version '5.008001';
-all_from 'lib/DBIx/Class.pm';
-Meta->{values}{x_authority} = 'cpan:RIBASUSHI';
-
-# nothing determined at runtime, except for possibly SQLT dep, see
-# comment further down
-dynamic_config 0;
-
-tests_recursive (qw|
-    t
-|);
-
-install_script (qw|
-    script/dbicadmin
-|);
 
 ###
 ### DO NOT ADD OPTIONAL DEPENDENCIES HERE, EVEN AS recommends()
 ### All of them *MUST* go to DBIx::Class::Optional::Dependencies
 ###
 my $runtime_requires = {
-  # FIXME - temporary, needs throwing out for something more efficient
-  'Data::Compare'            => '1.22',
 
   # DBI itself should be capable of installation and execution in pure-perl
   # mode. However it has never been tested yet, so consider XS for the time
@@ -78,13 +56,13 @@ my $runtime_requires = {
   'Data::Page'               => '2.00',
   'Devel::GlobalDestruction' => '0.09',
   'Hash::Merge'              => '0.12',
-  'Moo'                      => '1.002',
+  'Moo'                      => '1.004005',
   'MRO::Compat'              => '0.12',
   'Module::Find'             => '0.07',
   'namespace::clean'         => '0.24',
   'Path::Class'              => '0.18',
   'Scope::Guard'             => '0.03',
-  'SQL::Abstract'            => '1.77',
+  'SQL::Abstract'            => '1.79',
   'Try::Tiny'                => '0.07',
 
   # Technically this is not a core dependency - it is only required
@@ -137,6 +115,14 @@ if ($ENV{DBICTEST_SQLT_DEPLOY}) {
   }
 }
 
+tests_recursive (qw|
+    t
+|);
+
+install_script (qw|
+    script/dbicadmin
+|);
+
 # this is so we can order requires alphabetically
 # copies are needed for potential author requires injection
 my $reqs = {
@@ -184,10 +170,22 @@ for my $mod (sort keys %final_req) {
 # IFF we are running interactively
 auto_install();
 
-WriteAll();
+{
+  # M::I understands unicode in meta but does not write with the right
+  # layers - fhtagn!!!
+  local $SIG{__WARN__} = sub { warn $_[0] unless $_[0] =~ /Wide character in print/ };
+  WriteAll();
+}
 
 exit 0;
 
+
+###
+### Nothing user-serviceable beyond this point
+### (none of this executes on regular install)
+###
+
+
 # needs to be here to keep 5.8 string eval happy
 # (the include of Makefile.PL.inc loop)
 my $mm_proto;
@@ -1,3 +1,6 @@
+DBIx::Class is Copyright (c) 2005-2014 by mst, castaway, ribasushi, and others.
+See AUTHORS and LICENSE included with this distribution. All rights reserved.
+
 NAME
     DBIx::Class - Extensible and flexible object <-> relational mapper.
 
@@ -7,12 +10,12 @@ WHERE TO START READING
     confusion it is strongly recommended to read (at the very least) the
     Manuals in the order presented there.
 
-HOW TO GET HELP
-    Due to the complexity of its problem domain, DBIx::Class is a relatively
+GETTING HELP/SUPPORT
+    Due to the sheer size of its problem domain, DBIx::Class is a relatively
     complex framework. After you start using DBIx::Class questions will
     inevitably arise. If you are stuck with a problem or have doubts about a
-    particular approach do not hesitate to contact the community with your
-    questions. The list below is sorted by "fastest response time":
+    particular approach do not hesitate to contact us via any of the
+    following options (the list is sorted by "fastest response time"):
 
     *   IRC: irc.perl.org#dbix-class
 
@@ -171,8 +174,9 @@ HOW TO CONTRIBUTE
     welcome documentation improvements). The delivery methods include git-
     or unified-diff formatted patches, GitHub pull requests, or plain bug
     reports either via RT or the Mailing list. Contributors are generally
-    granted full access to the official repository after their first patch
-    passes successful review.
+    granted access to the official repository after their first several
+    patches pass successful review. Don't hesitate to contact either of the
+    "CAT HERDERS" with any further questions you may have.
 
     This project is maintained in a git repository. The code and related
     tools are accessible at the following locations:
@@ -190,276 +194,428 @@ HOW TO CONTRIBUTE
 
     *   Travis-CI log: <https://travis-ci.org/dbsrgits/dbix-class/builds>
 
-AUTHOR
-    mst: Matt S. Trout <mst@shadowcatsystems.co.uk>
+AUTHORS
+    Even though a large portion of the source *appears* to be written by
+    just a handful of people, this library continues to remain a
+    collaborative effort - perhaps one of the most successful such projects
+    on CPAN <http://cpan.org>. It is important to remember that ideas do not
+    always result in a direct code contribution, but deserve acknowledgement
+    just the same. Time and time again the seemingly most insignificant
+    questions and suggestions have been shown to catalyze monumental
+    improvements in consistency, accuracy and performance.
+
+    List of the awesome contributors who made DBIC v0.082800 possible
+
+        abraxxa:Alexander Hartmaier <abraxxa@cpan.org>
+
+        acca:Alexander Kuznetsov <acca@cpan.org>
+
+        aherzog:Adam Herzog <adam@herzogdesigns.com>
+
+        Alexander Keusch <cpan@keusch.at>
+
+        alexrj:Alessandro Ranellucci <aar@cpan.org>
+
+        alnewkirk:Al Newkirk <github@alnewkirk.com>
+
+        amiri:Amiri Barksdale <amiribarksdale@gmail.com>
+
+        amoore:Andrew Moore <amoore@cpan.org>
+
+        Andrew Mehta <Andrew@unitedgames.co.uk>
+
+        andrewalker:Andre Walker <andre@andrewalker.net>
+
+        andyg:Andy Grundman <andy@hybridized.org>
+
+        ank:Andres Kievsky <ank@ank.com.ar>
+
+        arc:Aaron Crane <arc@cpan.org>
+
+        arcanez:Justin Hunter <justin.d.hunter@gmail.com>
+
+        ash:Ash Berlin <ash@cpan.org>
+
+        bert:Norbert Csongrádi <bert@cpan.org>
+
+        bfwg:Colin Newell <colin.newell@gmail.com>
+
+        blblack:Brandon L. Black <blblack@gmail.com>
+
+        bluefeet:Aran Deltac <bluefeet@cpan.org>
+
+        boghead:Bryan Beeley <cpan@beeley.org>
+
+        bphillips:Brian Phillips <bphillips@cpan.org>
+
+        brd:Brad Davis <brd@FreeBSD.org>
+
+        Brian Kirkbride <brian.kirkbride@deeperbydesign.com>
+
+        bricas:Brian Cassidy <bricas@cpan.org>
+
+        brunov:Bruno Vecchi <vecchi.b@gmail.com>
+
+        caelum:Rafael Kitover <rkitover@cpan.org>
+
+        caldrin:Maik Hentsche <maik.hentsche@amd.com>
+
+        castaway:Jess Robinson <castaway@desert-island.me.uk>
+
+        chorny:Alexandr Ciornii <alexchorny@gmail.com>
+
+        claco:Christopher H. Laco <claco@cpan.org>
+
+        clkao:CL Kao <clkao@clkao.org>
+
+        Ctrl-O <http://ctrlo.com/>
+
+        da5id:David Jack Olrik <david@olrik.dk>
+
+        dams:Damien Krotkine <dams@cpan.org>
+
+        dandv:Dan Dascalescu <ddascalescu+github@gmail.com>
+
+        dariusj:Darius Jokilehto <dariusjokilehto@yahoo.co.uk>
+
+        davewood:David Schmidt <mail@davidschmidt.at>
+
+        daxim:Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>
+
+        dduncan:Darren Duncan <darren@darrenduncan.net>
+
+        debolaz:Anders Nor Berle <berle@cpan.org>
+
+        dew:Dan Thomas <dan@godders.org>
+
+        dim0xff:Dmitry Latin <dim0xff@gmail.com>
+
+        dkubb:Dan Kubb <dan.kubb-cpan@onautopilot.com>
+
+        dnm:Justin Wheeler <jwheeler@datademons.com>
+
+        dpetrov:Dimitar Petrov <mitakaa@gmail.com>
+
+        dsteinbrunner:David Steinbrunner <dsteinbrunner@pobox.com>
+
+        duncan_dmg:Duncan Garland <Duncan.Garland@motortrak.com>
+
+        dwc:Daniel Westermann-Clark <danieltwc@cpan.org>
+
+        dyfrgi:Michael Leuchtenburg <michael@slashhome.org>
+
+        edenc:Eden Cardim <edencardim@gmail.com>
+
+        Eligo <http://eligo.co.uk/>
+
+        ether:Karen Etheridge <ether@cpan.org>
+
+        evdb:Edmund von der Burg <evdb@ecclestoad.co.uk>
+
+        faxm0dem:Fabien Wernli <cpan@faxm0dem.org>
+
+        felliott:Fitz Elliott <fitz.elliott@gmail.com>
+
+        freetime:Bill Moseley <moseley@hank.org>
+
+        frew:Arthur Axel "fREW" Schmidt <frioux@gmail.com>
+
+        gbjk:Gareth Kirwan <gbjk@thermeon.com>
+
+        Getty:Torsten Raudssus <torsten@raudss.us>
+
+        goraxe:Gordon Irving <goraxe@cpan.org>
+
+        gphat:Cory G Watson <gphat@cpan.org>
+
+        Grant Street Group <http://www.grantstreet.com/>
+
+        groditi:Guillermo Roditi <groditi@cpan.org>
+
+        gshank:Gerda Shank <gshank@cpan.org>
+
+        guacamole:Fred Steinberg <fred.steinberg@gmail.com>
+
+        Haarg:Graham Knop <haarg@haarg.org>
+
+        hobbs:Andrew Rodland <andrew@cleverdomain.org>
+
+        Ian Wells <ijw@cack.org.uk>
+
+        idn:Ian Norton <i.norton@shadowcat.co.uk>
+
+        ilmari:Dagfinn Ilmari Mannsåker <ilmari@ilmari.org>
 
-    (I mostly consider myself "project founder" these days but the AUTHOR
-    heading is traditional :)
+        initself:Mike Baas <mike@initselftech.com>
 
-CONTRIBUTORS
-    abraxxa: Alexander Hartmaier <abraxxa@cpan.org>
+        ironcamel:Naveed Massjouni <naveedm9@gmail.com>
 
-    acca: Alexander Kuznetsov <acca@cpan.org>
+        jasonmay:Jason May <jason.a.may@gmail.com>
 
-    aherzog: Adam Herzog <adam@herzogdesigns.com>
+        jawnsy:Jonathan Yu <jawnsy@cpan.org>
 
-    Alexander Keusch <cpan@keusch.at>
+        jegade:Jens Gassmann <jens.gassmann@atomix.de>
 
-    alexrj: Alessandro Ranellucci <aar@cpan.org>
+        jeneric:Eric A. Miller <emiller@cpan.org>
 
-    alnewkirk: Al Newkirk <we@ana.im>
+        jesper:Jesper Krogh <jesper@krogh.cc>
 
-    amiri: Amiri Barksdale <amiri@metalabel.com>
+        Jesse Sheidlower <jester@panix.com>
 
-    amoore: Andrew Moore <amoore@cpan.org>
+        jgoulah:John Goulah <jgoulah@cpan.org>
 
-    andrewalker: Andre Walker <andre@andrewalker.net>
+        jguenther:Justin Guenther <jguenther@cpan.org>
 
-    andyg: Andy Grundman <andy@hybridized.org>
+        jhannah:Jay Hannah <jay@jays.net>
 
-    ank: Andres Kievsky
+        jmac:Jason McIntosh <jmac@appleseed-sc.com>
 
-    arc: Aaron Crane <arc@cpan.org>
+        jmmills:Jason M. Mills <jmmills@cpan.org>
 
-    arcanez: Justin Hunter <justin.d.hunter@gmail.com>
+        jnapiorkowski:John Napiorkowski <jjn1056@yahoo.com>
 
-    ash: Ash Berlin <ash@cpan.org>
+        Joe Carlson <jwcarlson@lbl.gov>
 
-    bert: Norbert Csongrádi <bert@cpan.org>
+        jon:Jon Schutz <jjschutz@cpan.org>
 
-    blblack: Brandon L. Black <blblack@gmail.com>
+        Jordan Metzmeier <jmetzmeier@magazines.com>
 
-    bluefeet: Aran Deltac <bluefeet@cpan.org>
+        jshirley:J. Shirley <jshirley@gmail.com>
 
-    bphillips: Brian Phillips <bphillips@cpan.org>
+        kaare:Kaare Rasmussen
 
-    boghead: Bryan Beeley <cpan@beeley.org>
+        kd:Kieren Diment <diment@gmail.com>
 
-    brd: Brad Davis <brd@FreeBSD.org>
+        konobi:Scott McWhirter <konobi@cpan.org>
 
-    bricas: Brian Cassidy <bricas@cpan.org>
+        lejeunerenard:Sean Zellmer <sean@lejeunerenard.com>
 
-    brunov: Bruno Vecchi <vecchi.b@gmail.com>
+        littlesavage:Alexey Illarionov <littlesavage@orionet.ru>
 
-    caelum: Rafael Kitover <rkitover@cpan.org>
+        lukes:Luke Saunders <luke.saunders@gmail.com>
 
-    caldrin: Maik Hentsche <maik.hentsche@amd.com>
+        marcus:Marcus Ramberg <mramberg@cpan.org>
 
-    castaway: Jess Robinson
+        mateu:Mateu X. Hunter <hunter@missoula.org>
 
-    claco: Christopher H. Laco
+        Matt LeBlanc <antirice@gmail.com>
 
-    clkao: CL Kao
+        Matt Sickler <imMute@msk4.com>
 
-    da5id: David Jack Olrik <djo@cpan.org>
+        mattlaw:Matt Lawrence
 
-    dariusj: Darius Jokilehto <dariusjokilehto@yahoo.co.uk>
+        mattp:Matt Phillips <mattp@cpan.org>
 
-    davewood: David Schmidt <davewood@gmx.at>
+        mdk:Mark Keating <m.keating@shadowcat.co.uk>
 
-    daxim: Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>
+        melo:Pedro Melo <melo@simplicidade.org>
 
-    debolaz: Anders Nor Berle <berle@cpan.org>
+        metaperl:Terrence Brannon <metaperl@gmail.com>
 
-    dew: Dan Thomas <dan@godders.org>
+        michaelr:Michael Reddick <michael.reddick@gmail.com>
 
-    dkubb: Dan Kubb <dan.kubb-cpan@onautopilot.com>
+        milki:Jonathan Chu <milki@rescomp.berkeley.edu>
 
-    dnm: Justin Wheeler <jwheeler@datademons.com>
+        minty:Murray Walker <perl@minty.org>
 
-    dpetrov: Dimitar Petrov <mitakaa@gmail.com>
+        mithaldu:Christian Walde <walde.christian@gmail.com>
 
-    dwc: Daniel Westermann-Clark <danieltwc@cpan.org>
+        mjemmeson:Michael Jemmeson <michael.jemmeson@gmail.com>
 
-    dyfrgi: Michael Leuchtenburg <michael@slashhome.org>
+        mna:Maya
 
-    edenc: Eden Cardim <edencardim@gmail.com>
+        mo:Moritz Onken <onken@netcubed.de>
 
-    ether: Karen Etheridge <ether@cpan.org>
+        moltar:Roman Filippov <romanf@cpan.org>
 
-    felliott: Fitz Elliott <fitz.elliott@gmail.com>
+        moritz:Moritz Lenz <moritz@faui2k3.org>
 
-    freetime: Bill Moseley <moseley@hank.org>
+        mrf:Mike Francis <ungrim97@gmail.com>
 
-    frew: Arthur Axel "fREW" Schmidt <frioux@gmail.com>
+        mst:Matt S. Trout <mst@shadowcat.co.uk>
 
-    goraxe: Gordon Irving <goraxe@cpan.org>
+        mstratman:Mark A. Stratman <stratman@gmail.com>
 
-    gphat: Cory G Watson <gphat@cpan.org>
+        ned:Neil de Carteret <n3dst4@gmail.com>
 
-    Grant Street Group <http://www.grantstreet.com/>
+        nigel:Nigel Metheringham <nigelm@cpan.org>
 
-    groditi: Guillermo Roditi <groditi@cpan.org>
+        ningu:David Kamholz <dkamholz@cpan.org>
 
-    Haarg: Graham Knop <haarg@haarg.org>
+        Nniuq:Ron "Quinn" Straight" <quinnfazigu@gmail.org>
 
-    hobbs: Andrew Rodland <arodland@cpan.org>
+        norbi:Norbert Buchmuller <norbi@nix.hu>
 
-    ilmari: Dagfinn Ilmari Mannsåker <ilmari@ilmari.org>
+        nothingmuch:Yuval Kogman <nothingmuch@woobling.org>
 
-    initself: Mike Baas <mike@initselftech.com>
+        nuba:Nuba Princigalli <nuba@cpan.org>
 
-    ironcamel: Naveed Massjouni <naveedm9@gmail.com>
+        Numa:Dan Sully <daniel@cpan.org>
 
-    jawnsy: Jonathan Yu <jawnsy@cpan.org>
+        oalders:Olaf Alders <olaf@wundersolutions.com>
 
-    jasonmay: Jason May <jason.a.may@gmail.com>
+        Olly Betts <olly@survex.com>
 
-    jesper: Jesper Krogh
+        osfameron:Hakim Cassimally <osfameron@cpan.org>
 
-    jgoulah: John Goulah <jgoulah@cpan.org>
+        ovid:Curtis "Ovid" Poe <ovid@cpan.org>
 
-    jguenther: Justin Guenther <jguenther@cpan.org>
+        oyse:Øystein Torget <oystein.torget@dnv.com>
 
-    jhannah: Jay Hannah <jay@jays.net>
+        paulm:Paul Makepeace <paulm+pause@paulm.com>
 
-    jmac: Jason McIntosh <jmac@appleseed-sc.com>
+        penguin:K J Cheetham <jamie@shadowcatsystems.co.uk>
 
-    jnapiorkowski: John Napiorkowski <jjn1056@yahoo.com>
+        perigrin:Chris Prather <chris@prather.org>
 
-    jon: Jon Schutz <jjschutz@cpan.org>
+        Peter Siklósi <einon@einon.hu>
 
-    jshirley: J. Shirley <jshirley@gmail.com>
+        Peter Valdemar Mørch <peter@morch.com>
 
-    kaare: Kaare Rasmussen
+        peter:Peter Collingbourne <peter@pcc.me.uk>
 
-    konobi: Scott McWhirter
+        phaylon:Robert Sedlacek <phaylon@dunkelheit.at>
 
-    littlesavage: Alexey Illarionov <littlesavage@orionet.ru>
+        plu:Johannes Plunien <plu@cpan.org>
 
-    lukes: Luke Saunders <luke.saunders@gmail.com>
+        Possum:Daniel LeWarne <possum@cpan.org>
 
-    marcus: Marcus Ramberg <mramberg@cpan.org>
+        pplu:Jose Luis Martinez <jlmartinez@capside.com>
 
-    mattlaw: Matt Lawrence
+        quicksilver:Jules Bean <jules@jellybean.co.uk>
 
-    mattp: Matt Phillips <mattp@cpan.org>
+        racke:Stefan Hornburg <racke@linuxia.de>
 
-    michaelr: Michael Reddick <michael.reddick@gmail.com>
+        rafl:Florian Ragwitz <rafl@debian.org>
 
-    milki: Jonathan Chu <milki@rescomp.berkeley.edu>
+        rainboxx:Matthias Dietrich <perl@rb.ly>
 
-    mithaldu: Christian Walde <walde.christian@gmail.com>
+        rbo:Robert Bohne <rbo@cpan.org>
 
-    mjemmeson: Michael Jemmeson <michael.jemmeson@gmail.com>
+        rbuels:Robert Buels <rmb32@cornell.edu>
 
-    mstratman: Mark A. Stratman <stratman@gmail.com>
+        rdj:Ryan D Johnson <ryan@innerfence.com>
 
-    ned: Neil de Carteret
+        Relequestual:Ben Hutton <relequestual@gmail.com>
 
-    nigel: Nigel Metheringham <nigelm@cpan.org>
+        renormalist:Steffen Schwigon <schwigon@cpan.org>
 
-    ningu: David Kamholz <dkamholz@cpan.org>
+        ribasushi:Peter Rabbitson <ribasushi@cpan.org>
 
-    Nniuq: Ron "Quinn" Straight" <quinnfazigu@gmail.org>
+        rjbs:Ricardo Signes <rjbs@cpan.org>
 
-    norbi: Norbert Buchmuller <norbi@nix.hu>
+        Robert Krimen <rkrimen@cpan.org>
 
-    nuba: Nuba Princigalli <nuba@cpan.org>
+        Robert Olson <bob@rdolson.org>
 
-    Numa: Dan Sully <daniel@cpan.org>
+        robkinyon:Rob Kinyon <rkinyon@cpan.org>
 
-    ovid: Curtis "Ovid" Poe <ovid@cpan.org>
+        Roman Ardern-Corris <spam_in@3legs.com>
 
-    oyse: Øystein Torget <oystein.torget@dnv.com>
+        ruoso:Daniel Ruoso <daniel@ruoso.com>
 
-    paulm: Paul Makepeace
+        Sadrak:Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>
 
-    penguin: K J Cheetham
+        sc_:Just Another Perl Hacker
 
-    perigrin: Chris Prather <chris@prather.org>
+        schwern:Michael G Schwern <mschwern@cpan.org>
 
-    peter: Peter Collingbourne <peter@pcc.me.uk>
+        Scott R. Godin <webdragon.net@gmail.com>
 
-    Peter Siklósi <einon@einon.hu>
+        scotty:Scotty Allen <scotty@scottyallen.com>
 
-    Peter Valdemar Mørch <peter@morch.com>
+        semifor:Marc Mims <marc@questright.com>
 
-    phaylon: Robert Sedlacek <phaylon@dunkelheit.at>
+        Simon Elliott <cpan@browsing.co.uk>
 
-    plu: Johannes Plunien <plu@cpan.org>
+        SineSwiper:Brendan Byrd <perl@resonatorsoft.org>
 
-    Possum: Daniel LeWarne <possum@cpan.org>
+        skaufman:Samuel Kaufman <sam@socialflow.com>
 
-    quicksilver: Jules Bean
+        solomon:Jared Johnson <jaredj@nmgi.com>
 
-    rafl: Florian Ragwitz <rafl@debian.org>
+        spb:Stephen Bennett <stephen@freenode.net>
 
-    rainboxx: Matthias Dietrich <perl@rb.ly>
+        Squeeks <squeek@cpan.org>
 
-    rbo: Robert Bohne <rbo@cpan.org>
+        srezic:Slaven Rezic <slaven@rezic.de>
 
-    rbuels: Robert Buels <rmb32@cornell.edu>
+        sszabo:Stephan Szabo <sszabo@bigpanda.com>
 
-    rdj: Ryan D Johnson <ryan@innerfence.com>
+        Stephen Peters <steve@stephenpeters.me>
 
-    ribasushi: Peter Rabbitson <ribasushi@cpan.org>
+        stonecolddevin:Devin Austin <dhoss@cpan.org>
 
-    rjbs: Ricardo Signes <rjbs@cpan.org>
+        talexb:Alex Beamish <talexb@gmail.com>
 
-    robkinyon: Rob Kinyon <rkinyon@cpan.org>
+        tamias:Ronald J Kimball <rjk@tamias.net>
 
-    Robert Olson <bob@rdolson.org>
+        TBSliver:Tom Bloor <t.bloor@shadowcat.co.uk>
 
-    moltar: Roman Filippov <romanf@cpan.org>
+        teejay:Aaron Trevena <teejay@cpan.org>
 
-    Sadrak: Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>
+        theorbtwo:James Mastros <james@mastros.biz>
 
-    sc_: Just Another Perl Hacker
+        Thomas Kratz <tomk@cpan.org>
 
-    scotty: Scotty Allen <scotty@scottyallen.com>
+        timbunce:Tim Bunce <tim.bunce@pobox.com>
 
-    semifor: Marc Mims <marc@questright.com>
+        Todd Lipcon
 
-    SineSwiper: Brendan Byrd <bbyrd@cpan.org>
+        Tom Hukins <tom@eborcom.com>
 
-    solomon: Jared Johnson <jaredj@nmgi.com>
+        tommy:Tommy Butler <tbutler.cpan.org@internetalias.net>
 
-    spb: Stephen Bennett <stephen@freenode.net>
+        tonvoon:Ton Voon <ton.voon@opsview.com>
 
-    Squeeks <squeek@cpan.org>
+        triode:Pete Gamache <gamache@cpan.org>
 
-    sszabo: Stephan Szabo <sszabo@bigpanda.com>
+        typester:Daisuke Murase <typester@cpan.org>
 
-    talexb: Alex Beamish <talexb@gmail.com>
+        uree:Oriol Soriano <oriol.soriano@capside.com>
 
-    tamias: Ronald J Kimball <rjk@tamias.net>
+        uwe:Uwe Voelker <uwe@uwevoelker.de>
 
-    teejay : Aaron Trevena <teejay@cpan.org>
+        victori:Victor Igumnov <victori@cpan.org>
 
-    Todd Lipcon
+        wdh:Will Hawes <wdhawes@gmail.com>
 
-    Tom Hukins
+        wesm:Wes Malone <wes@mitsi.com>
 
-    tonvoon: Ton Voon <tonvoon@cpan.org>
+        willert:Sebastian Willert <willert@cpan.org>
 
-    triode: Pete Gamache <gamache@cpan.org>
+        wintermute:Toby Corkindale <tjc@cpan.org>
 
-    typester: Daisuke Murase <typester@cpan.org>
+        wreis:Wallace Reis <wreis@cpan.org>
 
-    victori: Victor Igumnov <victori@cpan.org>
+        xenoterracide:Caleb Cushing <xenoterracide@gmail.com>
 
-    wdh: Will Hawes
+        yrlnry:Mark Jason Dominus <mjd@plover.com>
 
-    wesm: Wes Malone <wes@mitsi.com>
+        zamolxes:Bogdan Lucaciu <bogdan@wiz.ro>
 
-    willert: Sebastian Willert <willert@cpan.org>
+        Zefram:Andrew Main <zefram@fysh.org>
 
-    wreis: Wallace Reis <wreis@cpan.org>
+    The canonical source of authors and their details is the AUTHORS file at
+    the root of this distribution (or repository). The canonical source of
+    per-line authorship is the git repository history itself.
 
-    xenoterracide: Caleb Cushing <xenoterracide@gmail.com>
+CAT HERDERS
+    The fine folks nudging the project in a particular direction:
 
-    yrlnry: Mark Jason Dominus <mjd@plover.com>
+        ribasushi: Peter Rabbitson <ribasushi@cpan.org> (present day
+        maintenance and controlled evolution)
 
-    zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
+        castaway: Jess Robinson <castaway@desert-island.me.uk> (lions share
+        of the reference documentation and manuals)
 
-    Zefram: Andrew Main <zefram@fysh.org>
+        mst: Matt S Trout <mst@shadowcat.co.uk> (project founder - original
+        idea, architecture and implementation)
 
-COPYRIGHT
-    Copyright (c) 2005 - 2011 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as
-    listed above.
+COPYRIGHT AND LICENSE
+    Copyright (c) 2005 by mst, castaway, ribasushi, and other DBIx::Class
+    "AUTHORS" as listed above and in AUTHORS.
 
-LICENSE
     This library is free software and may be distributed under the same
-    terms as perl itself.
+    terms as perl5 itself. See LICENSE for the complete licensing terms.
 
diff --git a/var/tmp/source/RIBASUSHI/DBIx-Class-0.08270/DBIx-Class-0.08270/examples/Schema/db/example.db b/var/tmp/source/RIBASUSHI/DBIx-Class-0.082800/DBIx-Class-0.082800/examples/Schema/db/example.db
index 9d2bf228..6966b204 100644
Binary files a/var/tmp/source/RIBASUSHI/DBIx-Class-0.08270/DBIx-Class-0.08270/examples/Schema/db/example.db and b/var/tmp/source/RIBASUSHI/DBIx-Class-0.082800/DBIx-Class-0.082800/examples/Schema/db/example.db differ
@@ -44,13 +44,15 @@ DBIx::Class::AccessorGroup - See Class::Accessor::Grouped
 
 This class now exists in its own right on CPAN as Class::Accessor::Grouped
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
-
@@ -41,7 +41,7 @@ sub pod_authorlic_text {
 
   return join ("\n\n",
     '=head1 AUTHORS',
-    'See L<DBIx::Class/CONTRIBUTORS>',
+    'See L<DBIx::Class/AUTHORS>',
     '=head1 LICENSE',
     'You may distribute this code under the same terms as Perl itself',
     '=cut',
@@ -15,7 +15,7 @@ use MooseX::Types::JSON qw(JSON);
 use MooseX::Types::Path::Class qw(Dir File);
 use MooseX::Types::LoadableClass qw(LoadableClass);
 use Try::Tiny;
-use namespace::autoclean;
+use namespace::clean;
 
 =head1 NAME
 
@@ -451,7 +451,7 @@ sub insert {
   $rs ||= $self->resultset();
   $set ||= $self->set();
   my $resultset = $self->schema->resultset($rs);
-  my $obj = $resultset->create( $set );
+  my $obj = $resultset->new_result($set)->insert;
   print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n" if (!$self->quiet);
 }
 
@@ -582,13 +582,16 @@ sub _find_stanza {
   return $cfg;
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -34,4 +34,17 @@ sub search_where {
     return $class->resultset_instance->search($where, $attr);
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -4,14 +4,21 @@ package # hide from PAUSE Indexer
 use strict;
 use warnings;
 
+use Scalar::Util 'blessed';
+use namespace::clean;
+
 sub mk_group_accessors {
     my ($class, $group, @cols) = @_;
 
     foreach my $col (@cols) {
-        my($accessor, $col) = ref $col ? @$col : (undef, $col);
+        my($accessor, $col) = ref $col eq 'ARRAY' ? @$col : (undef, $col);
 
         my($ro_meth, $wo_meth);
-        if( defined $accessor and ($accessor ne $col)) {
+        if (defined blessed $col and $col->isa('Class::DBI::Column')) {
+            $ro_meth = $col->accessor;
+            $wo_meth = $col->mutator;
+        }
+        elsif (defined $accessor and ($accessor ne $col)) {
             $ro_meth = $wo_meth = $accessor;
         }
         else {
@@ -102,4 +102,17 @@ sub STORE {
                 : $obj->set_column($col => shift);
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -1,14 +1,13 @@
 package # hide from PAUSE
     DBIx::Class::CDBICompat::Constructor;
 
-use base qw(DBIx::Class::CDBICompat::ImaDBI);
-
-use Sub::Name();
-
 use strict;
 use warnings;
 
+use base 'DBIx::Class::CDBICompat::ImaDBI';
+
 use Carp;
+use DBIx::Class::_Util qw(quote_sub perlstring);
 
 __PACKAGE__->set_sql(Retrieve => <<'');
 SELECT __ESSENTIAL__
@@ -17,17 +16,16 @@ WHERE  %s
 
 sub add_constructor {
     my ($class, $method, $fragment) = @_;
-    return croak("constructors needs a name") unless $method;
 
-    no strict 'refs';
-    my $meth = "$class\::$method";
-    return carp("$method already exists in $class")
-            if *$meth{CODE};
+    croak("constructors needs a name") unless $method;
+
+    carp("$method already exists in $class") && return
+       if $class->can($method);
 
-    *$meth = Sub::Name::subname $meth => sub {
-            my $self = shift;
-            $self->sth_to_objects($self->sql_Retrieve($fragment), \@_);
-    };
+    quote_sub "${class}::${method}" => sprintf( <<'EOC', perlstring $fragment );
+      my $self = shift;
+      $self->sth_to_objects($self->sql_Retrieve(%s), \@_);
+EOC
 }
 
 1;
@@ -33,4 +33,17 @@ sub copy {
     return $self->next::method({ $primary_columns[0] => $arg });
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -4,7 +4,7 @@ package # hide from PAUSE
 use strict;
 use warnings;
 use DBIx::ContextualFetch;
-use Sub::Name ();
+use DBIx::Class::_Util qw(quote_sub perlstring);
 
 use base qw(Class::Data::Inheritable);
 
@@ -81,26 +81,20 @@ sub __driver {
 
 sub set_sql {
   my ($class, $name, $sql) = @_;
-  no strict 'refs';
-  my $sql_name = "sql_${name}";
-  my $full_sql_name = join '::', $class, $sql_name;
-  *$full_sql_name = Sub::Name::subname $full_sql_name,
-    sub {
-      my $sql = $sql;
-      my $class = shift;
-      return $class->storage->dbh_do(
-        _prepare_sth => $class->transform_sql($sql, @_)
-      );
-    };
-  if ($sql =~ /select/i) {
-    my $search_name = "search_${name}";
-    my $full_search_name = join '::', $class, $search_name;
-    *$full_search_name = Sub::Name::subname $full_search_name,
-      sub {
-        my ($class, @args) = @_;
-        my $sth = $class->$sql_name;
-        return $class->sth_to_objects($sth, \@args);
-      };
+
+  quote_sub "${class}::sql_${name}", sprintf( <<'EOC', perlstring $sql );
+    my $class = shift;
+    return $class->storage->dbh_do(
+      _prepare_sth => $class->transform_sql(%s, @_)
+    );
+EOC
+
+
+  if ($sql =~ /select/i) {  # FIXME - this should be anchore surely...?
+    quote_sub "${class}::search_${name}", sprintf( <<'EOC', "sql_$name" );
+      my ($class, @args) = @_;
+      $class->sth_to_objects( $class->%s, \@args);
+EOC
   }
 }
 
@@ -32,9 +32,21 @@ sub _init_result_source_instance {
   return $table;
 }
 
+=head1 FURTHER QUESTIONS?
 
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-package DBIx::Class::CDBICompat::Iterator::ResultSet;
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
+package # hide
+  DBIx::Class::CDBICompat::Iterator::ResultSet;
 
 use strict;
 use warnings;
@@ -31,4 +31,17 @@ sub remove_from_object_index {}
 
 sub clear_object_index {}
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -3,7 +3,8 @@ package
 
 use strict;
 use warnings;
-use Sub::Name ();
+
+use DBIx::Class::_Util 'quote_sub';
 
 =head1 NAME
 
@@ -23,20 +24,26 @@ my %method2key = (
     args            => 'args',
 );
 
+quote_sub __PACKAGE__ . "::$_" => "\$_[0]->{$method2key{$_}}"
+  for keys %method2key;
+
 sub new {
     my($class, $args) = @_;
 
     return bless $args, $class;
 }
 
-for my $method (keys %method2key) {
-    my $key = $method2key{$method};
-    my $code = sub {
-        $_[0]->{$key};
-    };
+=head1 FURTHER QUESTIONS?
 
-    no strict 'refs';
-    *{$method} = Sub::Name::subname $method, $code;
-}
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
 
 1;
@@ -3,11 +3,11 @@ package # hide from PAUSE
 
 use strict;
 use warnings;
-use Sub::Name ();
-use base qw/Class::Data::Inheritable/;
+use base 'Class::Data::Inheritable';
 
 use Clone;
 use DBIx::Class::CDBICompat::Relationship;
+use DBIx::Class::_Util qw(quote_sub perlstring);
 
 __PACKAGE__->mk_classdata('__meta_info' => {});
 
@@ -40,6 +40,13 @@ sub _declare_has_a {
 
   my $rel_info;
 
+  # Class::DBI allows Non database has_a with implicit deflate and inflate
+  # Hopefully the following will catch Non-database tables.
+  if( !$f_class->isa('DBIx::Class::Row') and !$f_class->isa('Class::DBI::Row') ) {
+    $args{'inflate'} ||= sub { $f_class->new(shift) }; # implicit inflate by calling new
+    $args{'deflate'} ||= sub { shift() . '' }; # implicit deflate by stringification
+  }
+
   if ($args{'inflate'} || $args{'deflate'}) { # Non-database has_a
     if (!ref $args{'inflate'}) {
       my $meth = $args{'inflate'};
@@ -119,19 +126,14 @@ sub has_many {
   );
 
   if (@f_method) {
-    no strict 'refs';
-    no warnings 'redefine';
-    my $post_proc = sub { my $o = shift; $o = $o->$_ for @f_method; $o; };
-    my $name = join '::', $class, $rel;
-    *$name = Sub::Name::subname $name,
-      sub {
-        my $rs = shift->search_related($rel => @_);
-        $rs->{attrs}{record_filter} = $post_proc;
-        return (wantarray ? $rs->all : $rs);
-      };
+    quote_sub "${class}::${rel}", sprintf( <<'EOC', perlstring $rel), { '$rf' => \sub { my $o = shift; $o = $o->$_ for @f_method; $o } };
+      my $rs = shift->search_related( %s => @_);
+      $rs->{attrs}{record_filter} = $rf;
+      return (wantarray ? $rs->all : $rs);
+EOC
+
     return 1;
   }
-
 }
 
 
@@ -200,4 +202,21 @@ sub search {
   $self->next::method($where, $attrs);
 }
 
+sub new_related {
+  return shift->search_related(shift)->new_result(shift);
+}
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -100,5 +100,17 @@ sub _do_transformation {
     return 1;
 }
 
-1;
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
+=cut
+
+1;
@@ -45,9 +45,10 @@ __PACKAGE__->load_own_components(qw/
   Iterator
 /);
 
-            #DBIx::Class::ObjIndexStubs
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::CDBICompat - Class::DBI Compatibility layer.
@@ -165,13 +166,13 @@ Relationships between tables (has_a, has_many...) must be declared after all tab
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
-
-=head1 LICENSE
+=head1 FURTHER QUESTIONS?
 
-You may distribute this code under the same terms as Perl itself.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=cut
+=head1 COPYRIGHT AND LICENSE
 
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -127,6 +127,8 @@ sub unimport {
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::Carp - Provides advanced Carp::Clan-like warning functions for DBIx::Class internals
@@ -179,4 +181,15 @@ same ruleset as L</carp>).
 Like L</carp> but warns only once for the life of the perl interpreter
 (regardless of callsite).
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
 =cut
@@ -16,6 +16,8 @@ __PACKAGE__->load_components(qw/
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::Core - Core set of DBIx::Class modules
@@ -51,12 +53,13 @@ The core modules currently are:
 A better overview of the methods found in a Result class can be found
 in L<DBIx::Class::Manual::ResultClass>.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -81,4 +81,17 @@ sub all {
   return @all;
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -267,13 +267,16 @@ Alias for L</txn_rollback>
 
 =end hidden
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -40,8 +40,8 @@ overload fallback to give natural boolean/numeric values.
 This is meant for internal use by L<DBIx::Class>'s C<throw_exception>
 code, and shouldn't be used directly elsewhere.
 
-Expects a scalar exception message.  The optional argument
-C<$stacktrace> tells it to output a full trace similar to L<Carp/confess>.
+Expects a scalar exception message. The optional boolean C<$stacktrace>
+causes it to output a full trace similar to L<confess|Carp/DESCRIPTION>.
 
   DBIx::Class::Exception->throw('Foo');
   try { ... } catch { DBIx::Class::Exception->throw(shift) }
@@ -88,13 +88,16 @@ sub rethrow {
     die shift;
 }
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -2,16 +2,17 @@ package DBIx::Class::FilterColumn;
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Row/;
+use base 'DBIx::Class::Row';
+use SQL::Abstract 'is_literal_value';
+use namespace::clean;
 
 sub filter_column {
   my ($self, $col, $attrs) = @_;
 
   my $colinfo = $self->column_info($col);
 
-  $self->throw_exception('FilterColumn does not work with InflateColumn')
-    if $self->isa('DBIx::Class::InflateColumn') &&
-      defined $colinfo->{_inflate_info};
+  $self->throw_exception("FilterColumn can not be used on a column with a declared InflateColumn inflator")
+    if defined $colinfo->{_inflate_info} and $self->isa('DBIx::Class::InflateColumn');
 
   $self->throw_exception("No such column $col to filter")
     unless $self->has_column($col);
@@ -31,9 +32,9 @@ sub filter_column {
 sub _column_from_storage {
   my ($self, $col, $value) = @_;
 
-  return $value unless defined $value;
+  return $value if is_literal_value($value);
 
-  my $info = $self->column_info($col)
+  my $info = $self->result_source->column_info($col)
     or $self->throw_exception("No column info for $col");
 
   return $value unless exists $info->{_filter_info};
@@ -46,7 +47,9 @@ sub _column_from_storage {
 sub _column_to_storage {
   my ($self, $col, $value) = @_;
 
-  my $info = $self->column_info($col) or
+  return $value if is_literal_value($value);
+
+  my $info = $self->result_source->column_info($col) or
     $self->throw_exception("No column info for $col");
 
   return $value unless exists $info->{_filter_info};
@@ -60,20 +63,25 @@ sub get_filtered_column {
   my ($self, $col) = @_;
 
   $self->throw_exception("$col is not a filtered column")
-    unless exists $self->column_info($col)->{_filter_info};
+    unless exists $self->result_source->column_info($col)->{_filter_info};
 
   return $self->{_filtered_column}{$col}
     if exists $self->{_filtered_column}{$col};
 
   my $val = $self->get_column($col);
 
-  return $self->{_filtered_column}{$col} = $self->_column_from_storage($col, $val);
+  return $self->{_filtered_column}{$col} = $self->_column_from_storage(
+    $col, $val
+  );
 }
 
 sub get_column {
   my ($self, $col) = @_;
+
   if (exists $self->{_filtered_column}{$col}) {
-    return $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col});
+    return $self->{_column_data}{$col} ||= $self->_column_to_storage (
+      $col, $self->{_filtered_column}{$col}
+    );
   }
 
   return $self->next::method ($col);
@@ -83,10 +91,12 @@ sub get_column {
 sub get_columns {
   my $self = shift;
 
-  foreach my $col (keys %{$self->{_filtered_column}||{}}) {
-    $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col})
-      if exists $self->{_filtered_column}{$col};
-  }
+  $self->{_column_data}{$_} = $self->_column_to_storage (
+    $_, $self->{_filtered_column}{$_}
+  ) for grep
+    { ! exists $self->{_column_data}{$_} }
+    keys %{$self->{_filtered_column}||{}}
+  ;
 
   $self->next::method (@_);
 }
@@ -100,54 +110,65 @@ sub store_column {
   $self->next::method(@_);
 }
 
+sub has_column_loaded {
+  my ($self, $col) = @_;
+  return 1 if exists $self->{_filtered_column}{$col};
+  return $self->next::method($col);
+}
+
 sub set_filtered_column {
   my ($self, $col, $filtered) = @_;
 
-  # do not blow up the cache via set_column unless necessary
-  # (filtering may be expensive!)
-  if (exists $self->{_filtered_column}{$col}) {
-    return $filtered
-      if ($self->_eq_column_values ($col, $filtered, $self->{_filtered_column}{$col} ) );
-
-    $self->make_column_dirty ($col); # so the comparison won't run again
+  # unlike IC, FC does not need to deal with the 'filter' abomination
+  # thus we can short-curcuit filtering entirely and never call set_column
+  # in case this is already a dirty change OR the row never touched storage
+  if (
+    ! $self->in_storage
+      or
+    $self->is_column_changed($col)
+  ) {
+    $self->make_column_dirty($col);
+    delete $self->{_column_data}{$col};
   }
-
-  $self->set_column($col, $self->_column_to_storage($col, $filtered));
+  else {
+    $self->set_column($col, $self->_column_to_storage($col, $filtered));
+  };
 
   return $self->{_filtered_column}{$col} = $filtered;
 }
 
 sub update {
-  my ($self, $attrs, @rest) = @_;
+  my ($self, $data, @rest) = @_;
 
-  foreach my $key (keys %{$attrs||{}}) {
-    if (
-      $self->has_column($key)
-        &&
-      exists $self->column_info($key)->{_filter_info}
-    ) {
-      $self->set_filtered_column($key, delete $attrs->{$key});
+  my $colinfos = $self->result_source->columns_info;
+
+  foreach my $col (keys %{$data||{}}) {
+    if ( exists $colinfos->{$col}{_filter_info} ) {
+      $self->set_filtered_column($col, delete $data->{$col});
 
       # FIXME update() reaches directly into the object-hash
       # and we may *not* have a filtered value there - thus
       # the void-ctx filter-trigger
-      $self->get_column($key) unless exists $self->{_column_data}{$key};
+      $self->get_column($col) unless exists $self->{_column_data}{$col};
     }
   }
 
-  return $self->next::method($attrs, @rest);
+  return $self->next::method($data, @rest);
 }
 
 sub new {
-  my ($class, $attrs, @rest) = @_;
-  my $source = $attrs->{-result_source}
+  my ($class, $data, @rest) = @_;
+
+  my $rsrc = $data->{-result_source}
     or $class->throw_exception('Sourceless rows are not supported with DBIx::Class::FilterColumn');
 
-  my $obj = $class->next::method($attrs, @rest);
-  foreach my $key (keys %{$attrs||{}}) {
-    if ($obj->has_column($key) &&
-          exists $obj->column_info($key)->{_filter_info} ) {
-      $obj->set_filtered_column($key, $attrs->{$key});
+  my $obj = $class->next::method($data, @rest);
+
+  my $colinfos = $rsrc->columns_info;
+
+  foreach my $col (keys %{$data||{}}) {
+    if (exists $colinfos->{$col}{_filter_info} ) {
+      $obj->set_filtered_column($col, $data->{$col});
     }
   }
 
@@ -156,6 +177,8 @@ sub new {
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::FilterColumn - Automatically convert column data
@@ -240,3 +263,14 @@ and one, using code like this:-
 
 In this case the C<filter_from_storage> is not required, as just
 passing the database value through to perl does the right thing.
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -98,7 +98,18 @@ passing the database value through to perl does the right thing.
 
 =item L<DBIx::Class::Row>
 
-L<copy|DBIx::Class::Row/copy>, L<delete|DBIx::Class::Row/delete>, L<discard_changes|DBIx::Class::Row/discard_changes>, L<get_dirty_columns|DBIx::Class::Row/get_dirty_columns>, L<get_from_storage|DBIx::Class::Row/get_from_storage>, L<get_inflated_columns|DBIx::Class::Row/get_inflated_columns>, L<has_column_loaded|DBIx::Class::Row/has_column_loaded>, L<in_storage|DBIx::Class::Row/in_storage>, L<inflate_result|DBIx::Class::Row/inflate_result>, L<insert|DBIx::Class::Row/insert>, L<insert_or_update|DBIx::Class::Row/insert_or_update>, L<is_changed|DBIx::Class::Row/is_changed>, L<is_column_changed|DBIx::Class::Row/is_column_changed>, L<make_column_dirty|DBIx::Class::Row/make_column_dirty>, L<register_column|DBIx::Class::Row/register_column>, L<result_source|DBIx::Class::Row/result_source>, L<set_column|DBIx::Class::Row/set_column>, L<set_columns|DBIx::Class::Row/set_columns>, L<set_inflated_columns|DBIx::Class::Row/set_inflated_columns>, L<throw_exception|DBIx::Class::Row/throw_exception>, L<update_or_insert|DBIx::Class::Row/update_or_insert>
+L<copy|DBIx::Class::Row/copy>, L<delete|DBIx::Class::Row/delete>, L<discard_changes|DBIx::Class::Row/discard_changes>, L<get_dirty_columns|DBIx::Class::Row/get_dirty_columns>, L<get_from_storage|DBIx::Class::Row/get_from_storage>, L<get_inflated_columns|DBIx::Class::Row/get_inflated_columns>, L<in_storage|DBIx::Class::Row/in_storage>, L<inflate_result|DBIx::Class::Row/inflate_result>, L<insert|DBIx::Class::Row/insert>, L<insert_or_update|DBIx::Class::Row/insert_or_update>, L<is_changed|DBIx::Class::Row/is_changed>, L<is_column_changed|DBIx::Class::Row/is_column_changed>, L<make_column_dirty|DBIx::Class::Row/make_column_dirty>, L<register_column|DBIx::Class::Row/register_column>, L<result_source|DBIx::Class::Row/result_source>, L<set_column|DBIx::Class::Row/set_column>, L<set_columns|DBIx::Class::Row/set_columns>, L<set_inflated_columns|DBIx::Class::Row/set_inflated_columns>, L<throw_exception|DBIx::Class::Row/throw_exception>, L<update_or_insert|DBIx::Class::Row/update_or_insert>
 
 =back
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
@@ -78,8 +78,8 @@ deflation/inflation as defined in the storage class. For instance, for
 a C<datetime> field the methods C<parse_datetime> and C<format_datetime>
 would be called on deflation/inflation. If the storage class does not
 provide a specialized inflator/deflator, C<[parse|format]_datetime> will
-be used as a fallback. See L<DateTime::Format> for more information on
-date formatting.
+be used as a fallback. See L<DateTime/Formatters And Stringification>
+for more information on date formatting.
 
 For more help with using components, see L<DBIx::Class::Manual::Component/USING>.
 
@@ -310,15 +310,13 @@ use the old way you'll see a warning - please fix your code then!
 
 =back
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-Matt S. Trout <mst@shadowcatsystems.co.uk>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 CONTRIBUTORS
-
-Aran Deltac <bluefeet@cpan.org>
-
-=head1 LICENSE
-
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -79,8 +79,8 @@ deflation/inflation as defined in the storage class. For instance, for
 a C<datetime> field the methods C<parse_datetime> and C<format_datetime>
 would be called on deflation/inflation. If the storage class does not
 provide a specialized inflator/deflator, C<[parse|format]_datetime> will
-be used as a fallback. See L<DateTime::Format> for more information on
-date formatting.
+be used as a fallback. See L<DateTime/Formatters And Stringification>
+for more information on date formatting.
 
 For more help with using components, see L<DBIx::Class::Manual::Component/USING>.
 
@@ -161,10 +161,6 @@ use the old way you'll see a warning - please fix your code then!
 
 =back
 
-=head1 AUTHOR
-
-Matt S. Trout <mst@shadowcatsystems.co.uk>
-
 =head1 INHERITED METHODS
 
 =over 4
@@ -179,11 +175,14 @@ L<copy|DBIx::Class::Row/copy>, L<delete|DBIx::Class::Row/delete>, L<discard_chan
 
 =back
 
-=head1 CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-Aran Deltac <bluefeet@cpan.org>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -43,10 +43,14 @@ sub register_column {
 sub _file_column_file {
     my ($self, $column, $filename) = @_;
 
-    my $column_info = $self->column_info($column);
+    my $column_info = $self->result_source->column_info($column);
 
     return unless $column_info->{is_file_column};
 
+    # DO NOT CHANGE
+    # This call to id() is generally incorrect - will not DTRT on
+    # multicolumn key. However changing this may introduce
+    # backwards-comp regressions, thus leaving as is
     my $id = $self->id || $self->throw_exception(
         'id required for filename generation'
     );
@@ -60,8 +64,10 @@ sub _file_column_file {
 sub delete {
     my ( $self, @rest ) = @_;
 
-    for ( $self->columns ) {
-        if ( $self->column_info($_)->{is_file_column} ) {
+    my $colinfos = $self->result_source->columns_info;
+
+    for ( keys %$colinfos ) {
+        if ( $colinfos->{$_}{is_file_column} ) {
             rmtree( [$self->_file_column_file($_)->dir], 0, 0 );
             last; # if we've deleted one, we've deleted them all
         }
@@ -75,9 +81,11 @@ sub insert {
 
     # cache our file columns so we can write them to the fs
     # -after- we have a PK
+    my $colinfos = $self->result_source->columns_info;
+
     my %file_column;
-    for ( $self->columns ) {
-        if ( $self->column_info($_)->{is_file_column} ) {
+    for ( keys %$colinfos ) {
+        if ( $colinfos->{$_}{is_file_column} ) {
             $file_column{$_} = $self->$_;
             $self->store_column($_ => $self->$_->{filename});
         }
@@ -206,14 +214,16 @@ Method made to be overridden for callback purposes.
 
 sub _file_column_callback {}
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-Victor Igumnov
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-This library is free software, you can redistribute it and/or modify
-it under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -3,7 +3,9 @@ package DBIx::Class::InflateColumn;
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Row/;
+use base 'DBIx::Class::Row';
+use SQL::Abstract 'is_literal_value';
+use namespace::clean;
 
 =head1 NAME
 
@@ -87,9 +89,8 @@ sub inflate_column {
 
   my $colinfo = $self->column_info($col);
 
-  $self->throw_exception("InflateColumn does not work with FilterColumn")
-    if $self->isa('DBIx::Class::FilterColumn') &&
-      defined $colinfo->{_filter_info};
+  $self->throw_exception("InflateColumn can not be used on a column with a declared FilterColumn filter")
+    if defined $colinfo->{_filter_info} and $self->isa('DBIx::Class::FilterColumn');
 
   $self->throw_exception("No such column $col to inflate")
     unless $self->has_column($col);
@@ -103,26 +104,45 @@ sub inflate_column {
 
 sub _inflated_column {
   my ($self, $col, $value) = @_;
-  return $value unless defined $value; # NULL is NULL is NULL
-  my $info = $self->column_info($col)
+
+  return $value if (
+    ! defined $value # NULL is NULL is NULL
+      or
+    is_literal_value($value) #that would be a not-yet-reloaded literal update
+  );
+
+  my $info = $self->result_source->column_info($col)
     or $self->throw_exception("No column info for $col");
+
   return $value unless exists $info->{_inflate_info};
-  my $inflate = $info->{_inflate_info}{inflate};
-  $self->throw_exception("No inflator for $col") unless defined $inflate;
-  return $inflate->($value, $self);
+
+  return (
+    $info->{_inflate_info}{inflate}
+      ||
+    $self->throw_exception("No inflator found for '$col'")
+  )->($value, $self);
 }
 
 sub _deflated_column {
   my ($self, $col, $value) = @_;
-#  return $value unless ref $value && blessed($value); # If it's not an object, don't touch it
-  ## Leave scalar refs (ala SQL::Abstract literal SQL), untouched, deflate all other refs
-  return $value unless (ref $value && ref($value) ne 'SCALAR');
-  my $info = $self->column_info($col) or
+
+  ## Deflate any refs except for literals, pass through plain values
+  return $value if (
+    ! length ref $value
+      or
+    is_literal_value($value)
+  );
+
+  my $info = $self->result_source->column_info($col) or
     $self->throw_exception("No column info for $col");
+
   return $value unless exists $info->{_inflate_info};
-  my $deflate = $info->{_inflate_info}{deflate};
-  $self->throw_exception("No deflator for $col") unless defined $deflate;
-  return $deflate->($value, $self);
+
+  return (
+    $info->{_inflate_info}{deflate}
+      ||
+    $self->throw_exception("No deflator found for '$col'")
+  )->($value, $self);
 }
 
 =head2 get_inflated_column
@@ -138,13 +158,15 @@ Throws an exception if the column requested is not an inflated column.
 
 sub get_inflated_column {
   my ($self, $col) = @_;
+
   $self->throw_exception("$col is not an inflated column")
-    unless exists $self->column_info($col)->{_inflate_info};
+    unless exists $self->result_source->column_info($col)->{_inflate_info};
+
+  # we take care of keeping things in sync
   return $self->{_inflated_column}{$col}
     if exists $self->{_inflated_column}{$col};
 
   my $val = $self->get_column($col);
-  return $val if ref $val eq 'SCALAR';  #that would be a not-yet-reloaded sclarref update
 
   return $self->{_inflated_column}{$col} = $self->_inflated_column($col, $val);
 }
@@ -159,15 +181,22 @@ analogous to L<DBIx::Class::Row/set_column>.
 =cut
 
 sub set_inflated_column {
-  my ($self, $col, $inflated) = @_;
-  $self->set_column($col, $self->_deflated_column($col, $inflated));
-#  if (blessed $inflated) {
-  if (ref $inflated && ref($inflated) ne 'SCALAR') {
-    $self->{_inflated_column}{$col} = $inflated;
-  } else {
+  my ($self, $col, $value) = @_;
+
+  # pass through deflated stuff
+  if (! length ref $value or is_literal_value($value)) {
+    $self->set_column($col, $value);
     delete $self->{_inflated_column}{$col};
   }
-  return $inflated;
+  # need to call set_column with the deflate cycle so that
+  # relationship caches are nuked if any
+  # also does the compare-for-dirtyness and change tracking dance
+  else {
+    $self->set_column($col, $self->_deflated_column($col, $value));
+    $self->{_inflated_column}{$col} = $value;
+  }
+
+  return $value;
 }
 
 =head2 store_inflated_column
@@ -180,15 +209,18 @@ as dirty. This is directly analogous to L<DBIx::Class::Row/store_column>.
 =cut
 
 sub store_inflated_column {
-  my ($self, $col, $inflated) = @_;
-#  unless (blessed $inflated) {
-  unless (ref $inflated && ref($inflated) ne 'SCALAR') {
-      delete $self->{_inflated_column}{$col};
-      $self->store_column($col => $inflated);
-      return $inflated;
+  my ($self, $col, $value) = @_;
+
+  if (! length ref $value or is_literal_value($value)) {
+    delete $self->{_inflated_column}{$col};
+    $self->store_column($col => $value);
   }
-  delete $self->{_column_data}{$col};
-  return $self->{_inflated_column}{$col} = $inflated;
+  else {
+    delete $self->{_column_data}{$col};
+    $self->{_inflated_column}{$col} = $value;
+  }
+
+  return $value;
 }
 
 =head1 SEE ALSO
@@ -201,19 +233,16 @@ sub store_inflated_column {
 
 =back
 
-=head1 AUTHOR
-
-Matt S. Trout <mst@shadowcatsystems.co.uk>
-
-=head1 CONTRIBUTORS
-
-Daniel Westermann-Clark <danieltwc@cpan.org> (documentation)
+=head1 FURTHER QUESTIONS?
 
-Jess Robinson <cpan@desert-island.demon.co.uk>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -116,10 +116,6 @@ as dirty. This is directly analogous to L<DBIx::Class::Row/store_column>.
 
 =back
 
-=head1 AUTHOR
-
-Matt S. Trout <mst@shadowcatsystems.co.uk>
-
 =head1 INHERITED METHODS
 
 =over 4
@@ -130,13 +126,14 @@ L<copy|DBIx::Class::Row/copy>, L<delete|DBIx::Class::Row/delete>, L<discard_chan
 
 =back
 
-=head1 CONTRIBUTORS
-
-Daniel Westermann-Clark <danieltwc@cpan.org> (documentation)
+=head1 FURTHER QUESTIONS?
 
-Jess Robinson <cpan@desert-island.demon.co.uk>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -132,6 +132,13 @@ L<DBIx::Class::Row> - Basic row methods.
 
 L<DBIx::Class::Manual::Cookbook>
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-Aran Clary Deltac <bluefeet@cpan.org>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -146,8 +146,9 @@ Next, you can execute your complex query using bind parameters like this:
   );
 
 ... and you'll get back a perfect L<DBIx::Class::ResultSet> (except, of course,
-that you cannot modify the rows it contains, e.g. cannot call L</update>,
-L</delete>, ...  on it).
+that you cannot modify the rows it contains, e.g. cannot call
+L<update|DBIx::Class::ResultSet/update> or
+L<delete|DBIx::Class::ResultSet/delete> on it).
 
 Note that you cannot have bind parameters unless is_virtual is set to true.
 
@@ -448,8 +449,8 @@ See also L<SQL::Abstract/Literal SQL with placeholders and bind values
 =head2 Software Limits
 
 When your RDBMS does not have a working SQL limit mechanism (e.g. Sybase ASE)
-and L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ> is either too slow or does
-not work at all, you can try the
+and L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ> is either
+too slow or does not work at all, you can try the
 L<software_limit|DBIx::Class::ResultSet/software_limit>
 L<DBIx::Class::ResultSet> attribute, which skips over records to simulate limits
 in the Perl layer.
@@ -1065,7 +1066,7 @@ See L<DBIx::Class::ResultSetColumn> for more documentation.
 
 Sometimes you have a (set of) result objects that you want to put into a
 resultset without the need to hit the DB again. You can do that by using the
-L<set_cache|DBIx::Class::Resultset/set_cache> method:
+L<set_cache|DBIx::Class::ResultSet/set_cache> method:
 
  my @uploadable_groups;
  while (my $group = $groups->next) {
@@ -1380,9 +1381,11 @@ row.
   }
 
 In this example it might be hard to see where the rollbacks, releases and
-commits are happening, but it works just the same as for plain L<<txn_do>>: If
-the C<try>-block around C<txn_do> fails, a rollback is issued. If the C<try>
-succeeds, the transaction is committed (or the savepoint released).
+commits are happening, but it works just the same as for plain
+L<txn_do|DBIx::Class::Storage/txn_do>: If the L<try|Try::Tiny/try>-block
+around L<txn_do|DBIx::Class::Storage/txn_do> fails, a rollback is issued.
+If the L<try|Try::Tiny/try> succeeds, the transaction is committed
+(or the savepoint released).
 
 While you can get more fine-grained control using C<svp_begin>, C<svp_release>
 and C<svp_rollback>, it is strongly recommended to use C<txn_do> with coderefs.
@@ -1840,7 +1843,7 @@ See L<DBD::mysql> for further details.
 =head3 Oracle
 
 Information about Oracle support for unicode can be found in
-L<DBD::Oracle/Unicode>.
+L<DBD::Oracle/UNICODE>.
 
 =head3 PostgreSQL
 
@@ -2202,10 +2205,9 @@ classes dynamically based on the database schema then there will be a
 significant startup delay.
 
 For production use a statically defined schema (which can be generated
-using L<DBIx::Class::Schema::Loader|DBIx::Class::Schema::Loader> to dump
-the database schema once - see
+using L<DBIx::Class::Schema::Loader> to dump the database schema once - see
 L<make_schema_at|DBIx::Class::Schema::Loader/make_schema_at> and
-L<dump_directory|DBIx::Class::Schema::Loader/dump_directory> for more
+L<dump_directory|DBIx::Class::Schema::Loader::Base/dump_directory> for more
 details on creating static schemas from a database).
 
 =head2 Move Common Startup into a Base Class
@@ -2251,10 +2253,11 @@ avoiding L<Module::Find|Module::Find>.
 
 =head2 Cached statements
 
-L<DBIx::Class> normally caches all statements with L<< prepare_cached()|DBI/prepare_cached >>.
-This is normally a good idea, but if too many statements are cached, the database may use too much
-memory and may eventually run out and fail entirely.  If you suspect this may be the case, you may want
-to examine DBI's L<< CachedKids|DBI/CachedKidsCachedKids_(hash_ref) >> hash:
+L<DBIx::Class> normally caches all statements with
+L<prepare_cached()|DBI/prepare_cached>. This is normally a good idea, but if
+too many statements are cached, the database may use too much memory and may
+eventually run out and fail entirely. If you suspect this may be the case,
+you may want to examine DBI's L<CachedKids|DBI/CachedKids> hash:
 
     # print all currently cached prepared statements
     print for keys %{$schema->storage->dbh->{CachedKids}};
@@ -2277,3 +2280,14 @@ You can accomplish this with L<Tie::Cache> or L<Tie::Cache::LRU>:
     });
 
 =cut
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -60,3 +60,15 @@ are used most often.
 =item L<DBIx::Class::InflateColumn> - Making objects out of your column values.
 
 =back
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
@@ -364,12 +364,15 @@ C<MyApp::Schema::ResultSet> namespace (although we created the directory
 in the directions above we did not add, or need to add, any resultset
 classes).
 
-=head1 TODO
+=head1 FURTHER QUESTIONS?
 
-=head1 AUTHOR
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-  sc_ from irc.perl.org#dbix-class
-  Kieren Diment <kd@totaldatasolution.com>
-  Nigel Metheringham <nigelm@cpan.org>
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
@@ -78,7 +78,7 @@ lot later.
 If your database server allows you to run queries across multiple
 databases at once, then so can DBIx::Class. All you need to do is make
 sure you write the database name as part of the
-L<DBIx::Class::ResultSource/table> call. Eg:
+L<table|DBIx::Class::ResultSourceProxy::Table/table> call. Eg:
 
   __PACKAGE__->table('mydb.mytablename');
 
@@ -87,10 +87,10 @@ L<DBIx::Class::Schema/load_namespaces> call.
 
 =item .. use DBIx::Class across PostgreSQL/DB2/Oracle schemas?
 
-Add the name of the schema to the L<DBIx::Class::ResultSource/table>
-as part of the name, and make sure you give the one user you are going
-to connect with has permissions to read/write all the schemas/tables as
-necessary.
+Add the name of the schema to the table name, when invoking
+L<table|DBIx::Class::ResultSourceProxy::Table/table>, and make sure the user
+you are about to connect as has permissions to read/write all the
+schemas/tables as necessary.
 
 =back
 
@@ -154,7 +154,7 @@ See L<DBIx::Class::Relationship>.
 =item .. use a relationship?
 
 Use its name. An accessor is created using the name. See examples in
-L<DBIx::Class::Manual::Cookbook/Using relationships>.
+L<DBIx::Class::Manual::Cookbook/USING RELATIONSHIPS>.
 
 =back
 
@@ -451,8 +451,8 @@ what create_related() from L<DBIx::Class::Relationship::Base> does, you could
 add this to Book.pm:
 
   sub foo {
-    my ($self, $relname, $col_data) = @_;
-    return $self->related_resultset($relname)->create($col_data);
+    my ($self, $rel_name, $col_data) = @_;
+    return $self->related_resultset($rel_name)->create($col_data);
   }
 
 Invoked like this:
@@ -658,3 +658,14 @@ Taken from:
 L<http://dev.mysql.com/doc/refman/5.1/en/resetting-permissions.html>.
 
 =back
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -6,9 +6,9 @@ DBIx::Class::Manual::Features - A boatload of DBIx::Class features with links to
 
 =head2 Large Community
 
-Currently there are 88 people listed as contributors to DBIC.  That ranges
-from documentation help, to test help, to added features, to entire database
-support.
+There are L<hundres of DBIC contributors|DBIx::Class/AUTHORS> listed in
+F<AUTHORS>. That ranges from documentation help, to test help, to added
+features, to entire database support.
 
 =head2 Active Community
 
@@ -479,7 +479,9 @@ on our system (Microsoft SQL) is:
    ) rpt_score
  WHERE rno__row__index BETWEEN 1 AND 1
 
-See: L<DBIx::Class::ResultSet/related_resultset>, L<DBIx::ClassResultSet/search_related>, and L<DBIx::Class::ResultSet/get_column>.
+See: L<DBIx::Class::ResultSet/related_resultset>,
+L<DBIx::Class::ResultSet/search_related>, and
+L<DBIx::Class::ResultSet/get_column>.
 
 =head2 bonus rel methods
 
@@ -661,5 +663,15 @@ Better:
     price => \['price + ?', [inc => $inc]],
  });
 
-See L<SQL::Abstract/Literal_SQL_with_placeholders_and_bind_values_(subqueries)>
+See L<SQL::Abstract/Literal SQL with placeholders and bind values (subqueries)>
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -66,11 +66,11 @@ relationships must be used.
 A Schema object represents your entire table collection, plus the
 connection to the database. You can create one or more schema objects,
 connected to various databases, with various users, using the same set
-of table L</Result class> definitions.
+of table L</Result Class> definitions.
 
 At least one L<DBIx::Class::Schema> class is needed per database.
 
-=head2 Result class
+=head2 Result Class
 
 A Result class defines both a source of data (usually one per table),
 and the methods that will be available in the L</Result> objects
@@ -87,7 +87,7 @@ ResultSource objects represent the source of your data, these are
 sometimes (incorrectly) called table objects.
 
 ResultSources do not need to be directly created, a ResultSource
-instance is created for each L</Result class> in your L</Schema>, by
+instance is created for each L</Result Class> in your L</Schema>, by
 the proxied methods C<table> and C<add_columns>.
 
 See also: L<DBIx::Class::ResultSource/METHODS>
@@ -148,17 +148,20 @@ to issue multiple SQL queries.
 A normalised database is a sane database. Each table contains only
 data belonging to one concept, related tables refer to the key field
 or fields of each other. Some links to webpages about normalisation
-can be found in L<DBIx::Class::Manual::FAQ|the FAQ>.
+can be found in L<the FAQ|DBIx::Class::Manual::FAQ>.
 
 =head2 Related data
 
 In SQL, related data actually refers to data that are normalised into
 the same table. (Yes. DBIC does mis-use this term.)
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -471,4 +471,13 @@ information on this can be found in L<DBIx::Class::Manual::Troubleshooting>
 
 =back
 
-=cut
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -242,7 +242,7 @@ To join two relations at the same level, use an arrayref instead:
 
 Or combine the two:
 
-  join => { room => [ 'chair', { table => 'leg' } ]
+  join => { room => [ 'chair', { table => 'leg' } ] }
 
 =head2 Table aliases
 
@@ -274,3 +274,13 @@ The aliases are: C<room> and C<room_2>.
 
 =cut
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -192,3 +192,14 @@ L<DBIx::Class::Manual::SQLHackers>.
 
 Continue with L<DBIx::Class::Tutorial> and
 L<DBIx::Class/"WHERE TO START READING">.
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -179,12 +179,13 @@ Examples and explaining paragraphs can be repeated as necessary.
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -90,11 +90,14 @@ L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::R
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -21,7 +21,7 @@ Alternatively use the C<< storage->debug >> class method:-
 
 To send the output somewhere else set debugfh:-
 
-  $schema->storage->debugfh(IO::File->new('/tmp/trace.out', 'w');
+  $schema->storage->debugfh(IO::File->new('/tmp/trace.out', 'w'));
 
 Alternatively you can do this with the environment variable, too:-
 
@@ -158,5 +158,13 @@ can grow very large.
 
 The solution is to use the smallest practical value for LongReadLen.
 
-=cut
+=head1 FURTHER QUESTIONS?
 
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -55,5 +55,13 @@ documentation. It should behave the same way.
 Existing components, and documentation and example on how to
 develop new ones.
 
-=cut
+=head1 FURTHER QUESTIONS?
 
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -36,7 +36,6 @@ my $admin_basic = {
   %$min_json_any,
   'MooseX::Types::Path::Class'    => '0.05',
   'MooseX::Types::JSON'           => '0.02',
-  'namespace::autoclean'          => '0.09',
 };
 
 my $admin_script = {
@@ -151,11 +150,11 @@ my $reqs = {
 
   deploy => {
     req => {
-      'SQL::Translator'           => '0.11016',
+      'SQL::Translator'           => '0.11018',
     },
     pod => {
       title => 'Storage::DBI::deploy()',
-      desc => 'Modules required for L<DBIx::Class::Storage::DBI/deploy> and L<DBIx::Class::Storage::DBI/deployment_statements>',
+      desc => 'Modules required for L<DBIx::Class::Storage::DBI/deployment_statements> and L<DBIx::Class::Schema/deploy>',
     },
   },
 
@@ -454,7 +453,6 @@ my $reqs = {
         ? (
           # when changing this list make sure to adjust xt/optional_deps.t
           %$rdbms_pg,
-          ($^O ne 'MSWin32' ? ('Sys::SigAction' => '0') : ()),
           'DBD::Pg'               => '2.009002',
         ) : ()
     },
@@ -623,9 +621,8 @@ my $reqs = {
     req => {
       %$test_and_dist_json_any,
       'ExtUtils::MakeMaker' => '6.64',
-      'Pod::Inherit'        => '0.90',
-      'Pod::Tree'           => '0',
-    }
+      'Pod::Inherit'        => '0.91',
+    },
   },
 
   dist_upload => {
@@ -911,10 +908,16 @@ EOD
 Returns a hashref containing the actual errors that occurred while attempting
 to load each module in the requirement group.
 EOD
-    '=head1 AUTHOR',
-    'See L<DBIx::Class/CONTRIBUTORS>.',
-    '=head1 LICENSE',
-    'You may distribute this code under the same terms as Perl itself',
+    '=head1 FURTHER QUESTIONS?',
+    'Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.',
+    '=head1 COPYRIGHT AND LICENSE',
+    <<'EOL',
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+EOL
+
   );
 
   open (my $fh, '>', $podfn) or Carp::croak "Unable to write to $podfn: $!";
@@ -18,7 +18,7 @@ Somewhere in your build-file (e.g. L<Module::Install>'s Makefile.PL):
 
   ...
 
-  configure_requires 'DBIx::Class' => '0.08270';
+  configure_requires 'DBIx::Class' => '0.082800';
 
   require DBIx::Class::Optional::Dependencies;
 
@@ -71,8 +71,6 @@ Modules required for the DBIx::Class administrative library
 
 =item * MooseX::Types::Path::Class >= 0.05
 
-=item * namespace::autoclean >= 0.09
-
 =back
 
 Requirement group: B<admin>
@@ -99,19 +97,17 @@ Modules required for the CLI DBIx::Class interface dbicadmin
 
 =item * Text::CSV >= 1.16
 
-=item * namespace::autoclean >= 0.09
-
 =back
 
 Requirement group: B<admin_script>
 
 =head2 Storage::DBI::deploy()
 
-Modules required for L<DBIx::Class::Storage::DBI/deploy> and L<DBIx::Class::Storage::DBI/deployment_statements>
+Modules required for L<DBIx::Class::Storage::DBI/deployment_statements> and L<DBIx::Class::Schema/deploy>
 
 =over
 
-=item * SQL::Translator >= 0.11016
+=item * SQL::Translator >= 0.11018
 
 =back
 
@@ -418,7 +414,7 @@ be able to use a specific feature.
 For example if some of the requirements for C<deploy> are not available,
 the returned string could look like:
 
- SQL::Translator >= 0.11016 (see DBIx::Class::Optional::Dependencies for details)
+ SQL::Translator >= 0.11018 (see DBIx::Class::Optional::Dependencies for details)
 
 The author is expected to prepend the necessary text to this message before
 returning the actual error seen by the user.
@@ -451,10 +447,14 @@ Returns a hashref containing the actual errors that occurred while attempting
 to load each module in the requirement group.
 
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-See L<DBIx::Class/CONTRIBUTORS>.
+=head1 COPYRIGHT AND LICENSE
 
-=head1 LICENSE
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-You may distribute this code under the same terms as Perl itself
@@ -147,7 +147,7 @@ Returns an B<ordered> resultset of all other objects in the same
 group excluding the one you called it on.
 
 The ordering is a backwards-compatibility artifact - if you need
-a resultset with no ordering applied use L</_siblings>
+a resultset with no ordering applied use C<_siblings>
 
 =cut
 sub siblings {
@@ -367,7 +367,30 @@ sub move_to {
 
     my $position_column = $self->position_column;
 
-    if ($self->is_column_changed ($position_column) ) {
+    my $is_txn;
+    if ($is_txn = $self->result_source->schema->storage->transaction_depth) {
+      # Reload position state from storage
+      # The thinking here is that if we are in a transaction, it is
+      # *more likely* the object went out of sync due to resultset
+      # level shenanigans. Instead of always reloading (slow) - go
+      # ahead and hand-hold only in the case of higher layers
+      # requesting the safety of a txn
+
+      $self->store_column(
+        $position_column,
+        ( $self->result_source
+                ->resultset
+                 ->search($self->_storage_ident_condition, { rows => 1, columns => $position_column })
+                  ->cursor
+                   ->next
+        )[0] || $self->throw_exception(
+          sprintf "Unable to locate object '%s' in storage - object went ouf of sync...?",
+          $self->ID
+        ),
+      );
+      delete $self->{_dirty_columns}{$position_column};
+    }
+    elsif ($self->is_column_changed ($position_column) ) {
       # something changed our position, we need to know where we
       # used to be - use the stashed value
       $self->store_column($position_column, delete $self->{_column_data_in_storage}{$position_column});
@@ -380,7 +403,7 @@ sub move_to {
       return 0;
     }
 
-    my $guard = $self->result_source->schema->txn_scope_guard;
+    my $guard = $is_txn ? undef : $self->result_source->schema->txn_scope_guard;
 
     my ($direction, @between);
     if ( $from_position < $to_position ) {
@@ -402,7 +425,7 @@ sub move_to {
     $self->_shift_siblings ($direction, @between);
     $self->_ordered_internal_update({ $position_column => $new_pos_val });
 
-    $guard->commit;
+    $guard->commit if $guard;
     return 1;
 }
 
@@ -719,20 +742,13 @@ sub _shift_siblings {
     if (
       first { $_ eq $position_column } ( map { @$_ } (values %{{ $rsrc->unique_constraints }} ) )
     ) {
-        my $cursor = $shift_rs->search (
+        my $clean_rs = $rsrc->resultset;
+
+        for ( $shift_rs->search (
           {}, { order_by => { "-$ord", $position_column }, select => [$position_column, @pcols] }
-        )->cursor;
-        my $rs = $rsrc->resultset;
-
-        my @all_data = $cursor->all;
-        while (my $data = shift @all_data) {
-          my $pos = shift @$data;
-          my $cond;
-          for my $i (0.. $#pcols) {
-            $cond->{$pcols[$i]} = $data->[$i];
-          }
-
-          $rs->find($cond)->update ({ $position_column => $pos + ( ($op eq '+') ? 1 : -1 ) });
+        )->cursor->all ) {
+          my $pos = shift @$_;
+          $clean_rs->find(@$_)->update ({ $position_column => $pos + ( ($op eq '+') ? 1 : -1 ) });
         }
     }
     else {
@@ -861,33 +877,31 @@ will prevent such race conditions going undetected.
 
 =head2 Multiple Moves
 
-Be careful when issuing move_* methods to multiple objects.  If
-you've pre-loaded the objects then when you move one of the objects
-the position of the other object will not reflect their new value
-until you reload them from the database - see
-L<DBIx::Class::Row/discard_changes>.
+If you have multiple same-group result objects already loaded from storage,
+you need to be careful when executing C<move_*> operations on them:
+without a L</position_column> reload the L</_position_value> of the
+"siblings" will be out of sync with the underlying storage.
+
+Starting from version C<0.082800> DBIC will implicitly perform such
+reloads when the C<move_*> happens as a part of a transaction
+(a good example of such situation is C<< $ordered_resultset->delete_all >>).
 
-There are times when you will want to move objects as groups, such
-as changing the parent of several objects at once - this directly
-conflicts with this problem.  One solution is for us to write a
-ResultSet class that supports a parent() method, for example.  Another
-solution is to somehow automagically modify the objects that exist
-in the current object's result set to have the new position value.
+If it is not possible for you to wrap the entire call-chain in a transaction,
+you will need to call L<DBIx::Class::Row/discard_changes> to get an object
+up-to-date before proceeding, otherwise undefined behavior will result.
 
 =head2 Default Values
 
 Using a database defined default_value on one of your group columns
 could result in the position not being assigned correctly.
 
-=head1 AUTHOR
-
- Original code framework
-   Aran Deltac <bluefeet@cpan.org>
-
- Constraints support and code generalisation
-   Peter Rabbitson <ribasushi@cpan.org>
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::DB2 - (DEPRECATED) Automatic primary key class for DB2
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::DB2 - (DEPRECATED) Automatic primary key class for DB2
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::MSSQL - (DEPRECATED) Automatic primary key class for MSSQL
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::MSSQL - (DEPRECATED) Automatic primary key class for MSSQ
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::MySQL - (DEPRECATED) Automatic primary key class for MySQL
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::MySQL - (DEPRECATED) Automatic primary key class for MySQ
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::Oracle - (DEPRECATED) Automatic primary key class for Oracle
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::Oracle - (DEPRECATED) Automatic primary key class for Ora
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,6 +10,8 @@ __PACKAGE__->load_components(qw/PK::Auto/);
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto::SQLite - (DEPRECATED) Automatic primary key class for SQLite
@@ -18,12 +20,13 @@ DBIx::Class::PK::Auto::SQLite - (DEPRECATED) Automatic primary key class for SQL
 
 Just load PK::Auto instead; auto-inc is now handled by Storage.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -7,6 +7,8 @@ use warnings;
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::PK::Auto - Automatic primary key class
@@ -41,12 +43,13 @@ The code that was handled here is now in Row for efficiency.
 The code that was handled here is now in ResultSource, and is being proxied to
 Row as well.
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -87,7 +87,7 @@ sub ID {
 
 sub _create_ID {
   my ($self, %vals) = @_;
-  return undef unless 0 == grep { !defined } values %vals;
+  return undef if grep { !defined } values %vals;
   return join '|', ref $self || $self, $self->result_source->name,
     map { $_ . '=' . $vals{$_} } sort keys %vals;
 }
@@ -134,15 +134,17 @@ sub _mk_ident_cond {
   return \%cond;
 }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
+1;
@@ -61,11 +61,14 @@ L<copy|DBIx::Class::Row/copy>, L<delete|DBIx::Class::Row/delete>, L<discard_chan
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -3,9 +3,8 @@ package # hide from PAUSE
 
 use strict;
 use warnings;
-use Sub::Name;
 use DBIx::Class::Carp;
-use DBIx::Class::_Util 'fail_on_internal_wantarray';
+use DBIx::Class::_Util qw(quote_sub perlstring);
 use namespace::clean;
 
 our %_pod_inherit_config =
@@ -23,81 +22,91 @@ sub register_relationship {
 
 sub add_relationship_accessor {
   my ($class, $rel, $acc_type) = @_;
-  my %meth;
+
   if ($acc_type eq 'single') {
-    my $rel_info = $class->relationship_info($rel);
-    $meth{$rel} = sub {
+    quote_sub "${class}::${rel}" => sprintf(<<'EOC', perlstring $rel);
       my $self = shift;
+
       if (@_) {
-        $self->set_from_related($rel, @_);
-        return $self->{_relationship_data}{$rel} = $_[0];
-      } elsif (exists $self->{_relationship_data}{$rel}) {
-        return $self->{_relationship_data}{$rel};
-      } else {
-        my $cond = $self->result_source->_resolve_condition(
-          $rel_info->{cond}, $rel, $self, $rel
+        $self->set_from_related( %1$s => @_ );
+        return $self->{_relationship_data}{%1$s} = $_[0];
+      }
+      elsif (exists $self->{_relationship_data}{%1$s}) {
+        return $self->{_relationship_data}{%1$s};
+      }
+      else {
+        my $relcond = $self->result_source->_resolve_relationship_condition(
+          rel_name => %1$s,
+          foreign_alias => %1$s,
+          self_alias => 'me',
+          self_result_object => $self,
+        );
+
+        return undef if (
+          $relcond->{join_free_condition}
+            and
+          $relcond->{join_free_condition} ne DBIx::Class::_Util::UNRESOLVABLE_CONDITION
+            and
+          scalar grep { not defined $_ } values %%{ $relcond->{join_free_condition} || {} }
+            and
+          $self->result_source->relationship_info(%1$s)->{attrs}{undef_on_null_fk}
         );
-        if ($rel_info->{attrs}->{undef_on_null_fk}){
-          return undef unless ref($cond) eq 'HASH';
-          return undef if grep { not defined $_ } values %$cond;
-        }
-        my $val = $self->find_related($rel, {}, {});
+
+        my $val = $self->search_related( %1$s )->single;
         return $val unless $val;  # $val instead of undef so that null-objects can go through
 
-        return $self->{_relationship_data}{$rel} = $val;
+        return $self->{_relationship_data}{%1$s} = $val;
       }
-    };
-  } elsif ($acc_type eq 'filter') {
+EOC
+  }
+  elsif ($acc_type eq 'filter') {
     $class->throw_exception("No such column '$rel' to filter")
        unless $class->has_column($rel);
+
     my $f_class = $class->relationship_info($rel)->{class};
-    $class->inflate_column($rel,
-      { inflate => sub {
-          my ($val, $self) = @_;
-          return $self->find_or_new_related($rel, {}, {});
-        },
-        deflate => sub {
-          my ($val, $self) = @_;
-          $self->throw_exception("'$val' isn't a $f_class") unless $val->isa($f_class);
-
-          # MASSIVE FIXME - this code assumes we pointed at the PK, but the belongs_to
-          # helper does not check any of this
-          # fixup the code a bit to make things saner, but ideally 'filter' needs to
-          # be deprecated ASAP and removed shortly after
-          # Not doing so before 0.08250 however, too many things in motion already
-          my ($pk_col, @rest) = $val->result_source->_pri_cols_or_die;
-          $self->throw_exception(
-            "Relationship '$rel' of type 'filter' can not work with a multicolumn primary key on source '$f_class'"
-          ) if @rest;
-
-          my $pk_val = $val->get_column($pk_col);
-          carp_unique (
-            "Unable to deflate 'filter'-type relationship '$rel' (related object "
-          . "primary key not retrieved), assuming undef instead"
-          ) if ( ! defined $pk_val and $val->in_storage );
-
-          return $pk_val;
-        }
-      }
-    );
-  } elsif ($acc_type eq 'multi') {
-    $meth{$rel} = sub {
-      DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and wantarray and my $sog = fail_on_internal_wantarray($_[0]);
-      shift->search_related($rel, @_)
-    };
-    $meth{"${rel}_rs"} = sub { shift->search_related_rs($rel, @_) };
-    $meth{"add_to_${rel}"} = sub { shift->create_related($rel, @_); };
-  } else {
-    $class->throw_exception("No such relationship accessor type '$acc_type'");
+
+    $class->inflate_column($rel, {
+      inflate => sub {
+        my ($val, $self) = @_;
+        return $self->find_or_new_related($rel, {}, {});
+      },
+      deflate => sub {
+        my ($val, $self) = @_;
+        $self->throw_exception("'$val' isn't a $f_class") unless $val->isa($f_class);
+
+        # MASSIVE FIXME - this code assumes we pointed at the PK, but the belongs_to
+        # helper does not check any of this
+        # fixup the code a bit to make things saner, but ideally 'filter' needs to
+        # be deprecated ASAP and removed shortly after
+        # Not doing so before 0.08250 however, too many things in motion already
+        my ($pk_col, @rest) = $val->result_source->_pri_cols_or_die;
+        $self->throw_exception(
+          "Relationship '$rel' of type 'filter' can not work with a multicolumn primary key on source '$f_class'"
+        ) if @rest;
+
+        my $pk_val = $val->get_column($pk_col);
+        carp_unique (
+          "Unable to deflate 'filter'-type relationship '$rel' (related object "
+        . "primary key not retrieved), assuming undef instead"
+        ) if ( ! defined $pk_val and $val->in_storage );
+
+        return $pk_val;
+      },
+    });
   }
-  {
-    no strict 'refs';
-    no warnings 'redefine';
-    foreach my $meth (keys %meth) {
-      my $name = join '::', $class, $meth;
-      *$name = subname($name, $meth{$meth});
-    }
+  elsif ($acc_type eq 'multi') {
+
+    quote_sub "${class}::${rel}_rs", "shift->search_related_rs( $rel => \@_ )";
+    quote_sub "${class}::add_to_${rel}", "shift->create_related( $rel => \@_ )";
+    quote_sub "${class}::${rel}", sprintf( <<'EOC', perlstring $rel );
+      DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = DBIx::Class::_Util::fail_on_internal_wantarray;
+      shift->search_related( %s => @_ )
+EOC
+  }
+  else {
+    $class->throw_exception("No such relationship accessor type '$acc_type'");
   }
+
 }
 
 1;
@@ -7,6 +7,7 @@ use base qw/DBIx::Class/;
 
 use Scalar::Util qw/weaken blessed/;
 use Try::Tiny;
+use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION';
 use namespace::clean;
 
 =head1 NAME
@@ -38,11 +39,11 @@ methods, for predefined ones, look in L<DBIx::Class::Relationship>.
 
 =over 4
 
-=item Arguments: 'relname', 'Foreign::Class', $condition, $attrs
+=item Arguments: $rel_name, $foreign_class, $condition, $attrs
 
 =back
 
-  __PACKAGE__->add_relationship('relname',
+  __PACKAGE__->add_relationship('rel_name',
                                 'Foreign::Class',
                                 $condition, $attrs);
 
@@ -180,11 +181,32 @@ L<SQL::Abstract> and the resulting SQL will be used verbatim as the C<ON>
 clause of the C<JOIN> statement associated with this relationship.
 
 While every coderef-based condition must return a valid C<ON> clause, it may
-elect to additionally return a simplified join-free condition hashref when
-invoked as C<< $result->relationship >>, as opposed to
-C<< $rs->related_resultset('relationship') >>. In this case C<$result> is
-passed to the coderef as C<< $args->{self_rowobj} >>, so a user can do the
-following:
+elect to additionally return a simplified B<optional> join-free condition
+consisting of a hashref with B<all keys being fully qualified names of columns
+declared on the corresponding result source>. This boils down to two scenarios:
+
+=over
+
+=item *
+
+When relationship resolution is invoked after C<< $result->$rel_name >>, as
+opposed to C<< $rs->related_resultset($rel_name) >>, the C<$result> object
+is passed to the coderef as C<< $args->{self_result_object} >>.
+
+=item *
+
+Alternatively when the user-space invokes resolution via
+C<< $result->set_from_related( $rel_name => $foreign_values_or_object ) >>, the
+corresponding data is passed to the coderef as C<< $args->{foreign_values} >>,
+B<always> in the form of a hashref. If a foreign result object is supplied
+(which is valid usage of L</set_from_related>), its values will be extracted
+into hashref form by calling L<get_columns|DBIx::Class::Row/get_columns>.
+
+=back
+
+Note that the above scenarios are mutually exclusive, that is you will be supplied
+none or only one of C<self_result_object> and C<foreign_values>. In other words if
+you define your condition coderef as:
 
   sub {
     my $args = shift;
@@ -194,14 +216,17 @@ following:
         "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" },
         "$args->{foreign_alias}.year"   => { '>', "1979", '<', "1990" },
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,
+      ! $args->{self_result_object} ? () : {
+        "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid,
         "$args->{foreign_alias}.year"   => { '>', "1979", '<', "1990" },
       },
+      ! $args->{foreign_values} ? () : {
+        "$args->{self_alias}.artistid" => $args->{foreign_values}{artist},
+      }
     );
   }
 
-Now this code:
+Then this code:
 
     my $artist = $schema->resultset("Artist")->find({ id => 4 });
     $artist->cds_80s->all;
@@ -218,25 +243,46 @@ With the bind values:
 
     '4', '1990', '1979'
 
-Note that in order to be able to use
-L<< $result->create_related|DBIx::Class::Relationship::Base/create_related >>,
-the coderef must not only return as its second such a "simple" condition
-hashref which does not depend on joins being available, but the hashref must
-contain only plain values/deflatable objects, such that the result can be
-passed directly to L<DBIx::Class::Relationship::Base/set_from_related>. For
-instance the C<year> constraint in the above example prevents the relationship
-from being used to create related objects (an exception will be thrown).
+While this code:
+
+    my $cd = $schema->resultset("CD")->search({ artist => 1 }, { rows => 1 })->single;
+    my $artist = $schema->resultset("Artist")->new({});
+    $artist->set_from_related('cds_80s');
+
+Will properly set the C<< $artist->artistid >> field of this new object to C<1>
+
+Note that in order to be able to use L</set_from_related> (and by extension
+L<< $result->create_related|DBIx::Class::Relationship::Base/create_related >>),
+the returned join free condition B<must> contain only plain values/deflatable
+objects. For instance the C<year> constraint in the above example prevents
+the relationship from being used to create related objects using
+C<< $artst->create_related( cds_80s => { title => 'blah' } ) >> (an
+exception will be thrown).
 
 In order to allow the user to go truly crazy when generating a custom C<ON>
 clause, the C<$args> hashref passed to the subroutine contains some extra
 metadata. Currently the supplied coderef is executed as:
 
   $relationship_info->{cond}->({
-    self_alias        => The alias of the invoking resultset ('me' in case of a result object),
-    foreign_alias     => The alias of the to-be-joined resultset (often matches relname),
-    self_resultsource => The invocant's resultsource,
-    foreign_relname   => The relationship name (does *not* always match foreign_alias),
-    self_rowobj       => The invocant itself in case of a $result_object->$relationship call
+    self_resultsource   => The resultsource instance on which rel_name is registered
+    rel_name            => The relationship name (does *NOT* always match foreign_alias)
+
+    self_alias          => The alias of the invoking resultset
+    foreign_alias       => The alias of the to-be-joined resultset (does *NOT* always match rel_name)
+
+    # only one of these (or none at all) will ever be supplied to aid in the
+    # construction of a join-free condition
+
+    self_result_object  => The invocant *object* itself in case of a call like
+                           $result_object->$rel_name( ... )
+
+    foreign_values      => A *hashref* of related data: may be passed in directly or
+                           derived via ->get_columns() from a related object in case of
+                           $result_object->set_from_related( $rel_name, $foreign_result_object )
+
+    # deprecated inconsistent names, will be forever available for legacy code
+    self_rowobj         => Old deprecated slot for self_result_object
+    foreign_relname     => Old deprecated slot for rel_name
   });
 
 =head3 attributes
@@ -288,7 +334,7 @@ Then, assuming MyApp::Schema::LinerNotes has an accessor named notes, you can do
 
 For a 'belongs_to relationship, note the 'cascade_update':
 
-  MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd,
+  MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd,
       { proxy => ['title'], cascade_update => 1 }
   );
   $track->title('New Title');
@@ -299,7 +345,7 @@ For a 'belongs_to relationship, note the 'cascade_update':
 A hashref where each key is the accessor you want installed in the main class,
 and its value is the name of the original in the foreign class.
 
-  MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd', {
+  MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd', {
       proxy => { cd_title => 'title' },
   });
 
@@ -309,7 +355,7 @@ This will create an accessor named C<cd_title> on the C<$track> result object.
 
 NOTE: you can pass a nested struct too, for example:
 
-  MyApp::Schema::Track->belongs_to( cd => 'DBICTest::Schema::CD', 'cd', {
+  MyApp::Schema::Track->belongs_to( cd => 'MyApp::Schema::CD', 'cd', {
     proxy => [ 'year', { cd_title => 'title' } ],
   });
 
@@ -360,7 +406,7 @@ the relationship attributes.
 
 The C<belongs_to> relationship does not update across relationships
 by default, so if you have a 'proxy' attribute on a belongs_to and want to
-use 'update' on it, you muse set C<< cascade_update => 1 >>.
+use 'update' on it, you must set C<< cascade_update => 1 >>.
 
 This is not a RDMS style cascade update - it purely means that when
 an object has update called on it, all the related objects also
@@ -465,7 +511,9 @@ sub related_resultset {
 
   return $self->{related_resultsets}{$rel} = do {
 
-    my $rel_info = $self->relationship_info($rel)
+    my $rsrc = $self->result_source;
+
+    my $rel_info = $rsrc->relationship_info($rel)
       or $self->throw_exception( "No such relationship '$rel'" );
 
     my $attrs = (@_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {});
@@ -475,26 +523,18 @@ sub related_resultset {
       if (@_ > 1 && (@_ % 2 == 1));
     my $query = ((@_ > 1) ? {@_} : shift);
 
-    my $rsrc = $self->result_source;
-
     # condition resolution may fail if an incomplete master-object prefetch
     # is encountered - that is ok during prefetch construction (not yet in_storage)
     my ($cond, $is_crosstable) = try {
       $rsrc->_resolve_condition( $rel_info->{cond}, $rel, $self, $rel )
     }
     catch {
-      if ($self->in_storage) {
-        $self->throw_exception ($_);
-      }
-
-      $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION;  # RV
+      $self->throw_exception ($_) if $self->in_storage;
+      UNRESOLVABLE_CONDITION;  # RV, no return()
     };
 
     # keep in mind that the following if() block is part of a do{} - no return()s!!!
-    if ($is_crosstable) {
-      $self->throw_exception (
-        "A cross-table relationship condition returned for statically declared '$rel'"
-      ) unless ref $rel_info->{cond} eq 'CODE';
+    if ($is_crosstable and ref $rel_info->{cond} eq 'CODE') {
 
       # A WHOREIFFIC hack to reinvoke the entire condition resolution
       # with the correct alias. Another way of doing this involves a
@@ -506,7 +546,12 @@ sub related_resultset {
       # root alias as 'me', instead of $rel (as opposed to invoking
       # $rs->search_related)
 
-      local $rsrc->{_relationships}{me} = $rsrc->{_relationships}{$rel};  # make the fake 'me' rel
+      # make the fake 'me' rel
+      local $rsrc->{_relationships}{me} = {
+        %{ $rsrc->{_relationships}{$rel} },
+        _original_name => $rel,
+      };
+
       my $obj_table_alias = lc($rsrc->source_name) . '__row';
       $obj_table_alias =~ s/\W+/_/g;
 
@@ -519,7 +564,7 @@ sub related_resultset {
       # FIXME - this conditional doesn't seem correct - got to figure out
       # at some point what it does. Also the entire UNRESOLVABLE_CONDITION
       # business seems shady - we could simply not query *at all*
-      if ($cond eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
+      if ($cond eq UNRESOLVABLE_CONDITION) {
         my $reverse = $rsrc->reverse_relationship_info($rel);
         foreach my $rev_rel (keys %$reverse) {
           if ($reverse->{$rev_rel}{attrs}{accessor} && $reverse->{$rev_rel}{attrs}{accessor} eq 'multi') {
@@ -627,38 +672,15 @@ your storage until you call L<DBIx::Class::Row/insert> on it.
 =cut
 
 sub new_related {
-  my ($self, $rel, $values) = @_;
-
-  # FIXME - this is a bad position for this (also an identical copy in
-  # set_from_related), but I have no saner way to hook, and I absolutely
-  # want this to throw at least for coderefs, instead of the "insert a NULL
-  # when it gets hard" insanity --ribasushi
-  #
-  # sanity check - currently throw when a complex coderef rel is encountered
-  # FIXME - should THROW MOAR!
-
-  if (ref $self) {  # cdbi calls this as a class method, /me vomits
-
-    my $rsrc = $self->result_source;
-    my $rel_info = $rsrc->relationship_info($rel)
-      or $self->throw_exception( "No such relationship '$rel'" );
-    my (undef, $crosstable, $cond_targets) = $rsrc->_resolve_condition (
-      $rel_info->{cond}, $rel, $self, $rel
-    );
-
-    $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment")
-      if $crosstable;
-
-    if (my @unspecified_rel_condition_chunks = grep { ! exists $values->{$_} } @{$cond_targets||[]} ) {
-      $self->throw_exception(sprintf (
-        "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s",
-        $rel,
-        map { "'$_'" } @unspecified_rel_condition_chunks
-      ));
-    }
-  }
-
-  return $self->search_related($rel)->new_result($values);
+  my ($self, $rel, $data) = @_;
+
+  return $self->search_related($rel)->new_result( $self->result_source->_resolve_relationship_condition (
+    infer_values_based_on => $data,
+    rel_name => $rel,
+    self_result_object => $self,
+    foreign_alias => $rel,
+    self_alias => 'me',
+  )->{inferred_values} );
 }
 
 =head2 create_related
@@ -792,44 +814,21 @@ call set_from_related on the book.
 This is called internally when you pass existing objects as values to
 L<DBIx::Class::ResultSet/create>, or pass an object to a belongs_to accessor.
 
-The columns are only set in the local copy of the object, call L</update> to
-set them in the storage.
+The columns are only set in the local copy of the object, call
+L<update|DBIx::Class::Row/update> to update them in the storage.
 
 =cut
 
 sub set_from_related {
   my ($self, $rel, $f_obj) = @_;
 
-  my $rsrc = $self->result_source;
-  my $rel_info = $rsrc->relationship_info($rel)
-    or $self->throw_exception( "No such relationship '$rel'" );
-
-  if (defined $f_obj) {
-    my $f_class = $rel_info->{class};
-    $self->throw_exception( "Object '$f_obj' isn't a ".$f_class )
-      unless blessed $f_obj and $f_obj->isa($f_class);
-  }
-
-
-  # FIXME - this is a bad position for this (also an identical copy in
-  # new_related), but I have no saner way to hook, and I absolutely
-  # want this to throw at least for coderefs, instead of the "insert a NULL
-  # when it gets hard" insanity --ribasushi
-  #
-  # sanity check - currently throw when a complex coderef rel is encountered
-  # FIXME - should THROW MOAR!
-  my ($cond, $crosstable, $cond_targets) = $rsrc->_resolve_condition (
-    $rel_info->{cond}, $f_obj, $rel, $rel
-  );
-  $self->throw_exception("Custom relationship '$rel' does not resolve to a join-free condition fragment")
-    if $crosstable;
-  $self->throw_exception(sprintf (
-    "Custom relationship '%s' not definitive - returns conditions instead of values for column(s): %s",
-    $rel,
-    map { "'$_'" } @$cond_targets
-  )) if $cond_targets;
-
-  $self->set_columns($cond);
+  $self->set_columns( $self->result_source->_resolve_relationship_condition (
+    infer_values_based_on => {},
+    rel_name => $rel,
+    foreign_values => $f_obj,
+    foreign_alias => $rel,
+    self_alias => 'me',
+  )->{inferred_values} );
 
   return 1;
 }
@@ -986,13 +985,16 @@ Removes the link between the current object and the related object. Note that
 the related object itself won't be deleted unless you call ->delete() on
 it. This method just removes the link between the two objects.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -60,6 +60,8 @@ sub belongs_to {
   else {
     if (ref $cond eq 'HASH') { # ARRAY is also valid
       my $cond_rel;
+      # FIXME This loop is ridiculously incomplete and dangerous
+      # staving off changes until implmentation of the swindon consensus
       for (keys %$cond) {
         if (m/\./) { # Explicit join condition
           $cond_rel = $cond;
@@ -89,6 +91,7 @@ sub belongs_to {
   $class->add_relationship($rel, $f_class,
     $cond,
     {
+      is_depends_on => 1,
       accessor => $acc_type,
       $fk_columns ? ( fk_columns => $fk_columns ) : (),
       %{$attrs || {}}
@@ -98,14 +101,4 @@ sub belongs_to {
   return 1;
 }
 
-# Attempt to remove the POD so it (maybe) falls off the indexer
-
-#=head1 AUTHORS
-#
-#Alexander Hartmaier <Alexander.Hartmaier@t-systems.at>
-#
-#Matt S. Trout <mst@shadowcatsystems.co.uk>
-#
-#=cut
-
 1;
@@ -46,6 +46,7 @@ sub has_many {
     join_type => 'LEFT',
     cascade_delete => $default_cascade,
     cascade_copy => $default_cascade,
+    is_depends_on => 0,
     %{$attrs||{}}
   });
 }
@@ -77,6 +77,7 @@ sub _has_one {
    { accessor => 'single',
      cascade_update => $default_cascade,
      cascade_delete => $default_cascade,
+     is_depends_on => 0,
      ($join_type ? ('join_type' => $join_type) : ()),
      %{$attrs || {}} });
   1;
@@ -91,8 +92,9 @@ sub _validate_has_one_condition {
     my $self_id = $cond->{$foreign_id};
 
     # we can ignore a bad $self_id because add_relationship handles this
-    # warning
+    # exception
     return unless $self_id =~ /^self\.(.*)$/;
+
     my $key = $1;
     $class->throw_exception("Defining rel on ${class} that includes '$key' but no such column defined here yet")
         unless $class->has_column($key);
@@ -63,7 +63,6 @@ EOW
     *$rs_meth_name = subname $rs_meth_name, sub {
       my $self = shift;
       my $attrs = @_ > 1 && ref $_[$#_] eq 'HASH' ? pop(@_) : {};
-      my @args = ($f_rel, @_ > 0 ? @_ : undef, { %{$rel_attrs||{}}, %$attrs });
       my $rs = $self->search_related($rel)->search_related(
         $f_rel, @_ > 0 ? @_ : undef, { %{$rel_attrs||{}}, %$attrs }
       );
@@ -72,7 +71,7 @@ EOW
 
     my $meth_name = join '::', $class, $meth;
     *$meth_name = subname $meth_name, sub {
-      DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and wantarray and my $sog = fail_on_internal_wantarray($_[0]);
+      DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray;
       my $self = shift;
       my $rs = $self->$rs_meth( @_ );
       return (wantarray ? $rs->all : $rs);
@@ -139,7 +138,7 @@ EOW
       );
 
       $self->throw_exception(
-        "Custom relationship '$rel' does not resolve to a join-free condition, "
+        "Relationship '$rel' does not resolve to a join-free condition, "
        ."unable to use with the ManyToMany helper '$f_rel'"
       ) if $crosstable;
 
@@ -3,8 +3,9 @@ package # hide from PAUSE
 
 use strict;
 use warnings;
-use Sub::Name ();
-use base qw/DBIx::Class/;
+use base 'DBIx::Class';
+use DBIx::Class::_Util 'quote_sub';
+use namespace::clean;
 
 our %_pod_inherit_config =
   (
@@ -22,21 +23,17 @@ sub register_relationship {
 sub proxy_to_related {
   my ($class, $rel, $proxy_args) = @_;
   my %proxy_map = $class->_build_proxy_map_from($proxy_args);
-  no strict 'refs';
-  no warnings 'redefine';
-  foreach my $meth_name ( keys %proxy_map ) {
-    my $proxy_to_col = $proxy_map{$meth_name};
-    my $name = join '::', $class, $meth_name;
-    *$name = Sub::Name::subname $name => sub {
-      my $self = shift;
-      my $relobj = $self->$rel;
-      if (@_ && !defined $relobj) {
-        $relobj = $self->create_related($rel, { $proxy_to_col => $_[0] });
-        @_ = ();
-      }
-      return ($relobj ? $relobj->$proxy_to_col(@_) : undef);
-   }
-  }
+
+  quote_sub "${class}::$_", sprintf( <<'EOC', $rel, $proxy_map{$_} )
+    my $self = shift;
+    my $relobj = $self->%1$s;
+    if (@_ && !defined $relobj) {
+      $relobj = $self->create_related( %1$s => { %2$s => $_[0] } );
+      @_ = ();
+    }
+    $relobj ? $relobj->%2$s(@_) : undef;
+EOC
+    for keys %proxy_map
 }
 
 sub _build_proxy_map_from {
@@ -13,6 +13,10 @@ __PACKAGE__->load_own_components(qw/
   Base
 /);
 
+1;
+
+__END__
+
 =head1 NAME
 
 DBIx::Class::Relationship - Inter-table relationships
@@ -105,7 +109,7 @@ L<DBIx::Class::Relationship::Base>.
 
 All helper methods are called similar to the following template:
 
-  __PACKAGE__->$method_name('relname', 'Foreign::Class', \%cond|\@cond|\&cond?, \%attrs?);
+  __PACKAGE__->$method_name('rel_name', 'Foreign::Class', \%cond|\@cond|\&cond?, \%attrs?);
 
 Both C<cond> and C<attrs> are optional. Pass C<undef> for C<cond> if
 you want to use the default value for it, but still want to set C<attrs>.
@@ -327,7 +331,7 @@ The second is almost exactly the same as the accessor method but "_rs"
 is added to the end of the method name, eg C<$accessor_name_rs()>.
 This method works just like the normal accessor, except that it always
 returns a resultset, even in list context. The third method, named C<<
-add_to_$relname >>, will also be added to your Row items; this allows
+add_to_$rel_name >>, will also be added to your Row items; this allows
 you to insert new related items, using the same mechanism as in
 L<DBIx::Class::Relationship::Base/"create_related">.
 
@@ -629,17 +633,13 @@ L<DBIx::Class::ResultSet> for a L<list of standard resultset
 attributes|DBIx::Class::ResultSet/ATTRIBUTES> which can be assigned to
 relationships as well.
 
-=cut
-
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
-
-=head1 LICENSE
+=head1 FURTHER QUESTIONS?
 
-You may distribute this code under the same terms as Perl itself.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=cut
+=head1 COPYRIGHT AND LICENSE
 
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -103,6 +103,9 @@ sub inflate_result {
   return $mk_hash->($_[2], $_[3], 'is_root');
 }
 
+1;
+
+__END__
 
 =head1 CAVEATS
 
@@ -131,6 +134,13 @@ The returned hash contains the raw database values.
 
 =back
 
-=cut
+=head1 FURTHER QUESTIONS?
 
-1;
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -6,9 +6,10 @@ use base qw/DBIx::Class/;
 use DBIx::Class::Carp;
 use DBIx::Class::ResultSetColumn;
 use Scalar::Util qw/blessed weaken reftype/;
-use DBIx::Class::_Util 'fail_on_internal_wantarray';
+use DBIx::Class::_Util qw(
+  fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION
+);
 use Try::Tiny;
-use Data::Compare (); # no imports!!! guard against insane architecture
 
 # not importing first() as it will clash with our own method
 use List::Util ();
@@ -57,7 +58,7 @@ just stores all the conditions needed to create the query.
 
 A basic ResultSet representing the data of an entire table is returned
 by calling C<resultset> on a L<DBIx::Class::Schema> and passing in a
-L<Source|DBIx::Class::Manual::Glossary/Source> name.
+L<Source|DBIx::Class::Manual::Glossary/ResultSource> name.
 
   my $users_rs = $schema->resultset('User');
 
@@ -78,34 +79,6 @@ However, if it is used in a boolean context it is B<always> true.  So if
 you want to check if a resultset has any results, you must use C<if $rs
 != 0>.
 
-=head1 CUSTOM ResultSet CLASSES THAT USE Moose
-
-If you want to make your custom ResultSet classes with L<Moose>, use a template
-similar to:
-
-    package MyApp::Schema::ResultSet::User;
-
-    use Moose;
-    use namespace::autoclean;
-    use MooseX::NonMoose;
-    extends 'DBIx::Class::ResultSet';
-
-    sub BUILDARGS { $_[2] }
-
-    ...your code...
-
-    __PACKAGE__->meta->make_immutable;
-
-    1;
-
-The L<MooseX::NonMoose> is necessary so that the L<Moose> constructor does not
-clash with the regular ResultSet constructor. Alternatively, you can use:
-
-    __PACKAGE__->meta->make_immutable(inline_constructor => 0);
-
-The L<BUILDARGS|Moose::Manual::Construction/BUILDARGS> is necessary because the
-signature of the ResultSet C<new> is C<< ->new($source, \%args) >>.
-
 =head1 EXAMPLES
 
 =head2 Chaining resultsets
@@ -193,6 +166,93 @@ Which is the same as:
 
 See: L</search>, L</count>, L</get_column>, L</all>, L</create>.
 
+=head2 Custom ResultSet classes
+
+To add methods to your resultsets, you can subclass L<DBIx::Class::ResultSet>, similar to:
+
+  package MyApp::Schema::ResultSet::User;
+
+  use strict;
+  use warnings;
+
+  use base 'DBIx::Class::ResultSet';
+
+  sub active {
+    my $self = shift;
+    $self->search({ $self->current_source_alias . '.active' => 1 });
+  }
+
+  sub unverified {
+    my $self = shift;
+    $self->search({ $self->current_source_alias . '.verified' => 0 });
+  }
+
+  sub created_n_days_ago {
+    my ($self, $days_ago) = @_;
+    $self->search({
+      $self->current_source_alias . '.create_date' => {
+        '<=',
+      $self->result_source->schema->storage->datetime_parser->format_datetime(
+        DateTime->now( time_zone => 'UTC' )->subtract( days => $days_ago )
+      )}
+    });
+  }
+
+  sub users_to_warn { shift->active->unverified->created_n_days_ago(7) }
+
+  1;
+
+See L<DBIx::Class::Schema/load_namespaces> on how DBIC can discover and
+automatically attach L<Result|DBIx::Class::Manual::ResultClass>-specific
+L<ResulSet|DBIx::Class::ResultSet> classes.
+
+=head3 ResultSet subclassing with Moose and similar constructor-providers
+
+Using L<Moose> or L<Moo> in your ResultSet classes is usually overkill, but
+you may find it useful if your ResultSets contain a lot of business logic
+(e.g. C<has xml_parser>, C<has json>, etc) or if you just prefer to organize
+your code via roles.
+
+In order to write custom ResultSet classes with L<Moo> you need to use the
+following template. The L<BUILDARGS|Moo/BUILDARGS> is necessary due to the
+unusual signature of the L<constructor provided by DBIC
+|DBIx::Class::ResultSet/new> C<< ->new($source, \%args) >>.
+
+  use Moo;
+  extends 'DBIx::Class::ResultSet';
+  sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_
+
+  ...your code...
+
+  1;
+
+If you want to build your custom ResultSet classes with L<Moose>, you need
+a similar, though a little more elaborate template in order to interface the
+inlining of the L<Moose>-provided
+L<object constructor|Moose::Manual::Construction/WHERE'S THE CONSTRUCTOR?>,
+with the DBIC one.
+
+  package MyApp::Schema::ResultSet::User;
+
+  use Moose;
+  use MooseX::NonMoose;
+  extends 'DBIx::Class::ResultSet';
+
+  sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_
+
+  ...your code...
+
+  __PACKAGE__->meta->make_immutable;
+
+  1;
+
+The L<MooseX::NonMoose> is necessary so that the L<Moose> constructor does not
+entirely overwrite the DBIC one (in contrast L<Moo> does this automatically).
+Alternatively, you can skip L<MooseX::NonMoose> and get by with just L<Moose>
+instead by doing:
+
+  __PACKAGE__->meta->make_immutable(inline_constructor => 0);
+
 =head1 METHODS
 
 =head2 new
@@ -240,14 +300,18 @@ creation B<will not work>. See also warning pertaining to L</create>.
 
 sub new {
   my $class = shift;
-  return $class->new_result(@_) if ref $class;
+
+  if (ref $class) {
+    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call;
+    return $class->new_result(@_);
+  }
 
   my ($source, $attrs) = @_;
   $source = $source->resolve
     if $source->isa('DBIx::Class::ResultSourceHandle');
 
   $attrs = { %{$attrs||{}} };
-  delete @{$attrs}{qw(_last_sqlmaker_alias_map _related_results_construction)};
+  delete @{$attrs}{qw(_last_sqlmaker_alias_map _simple_passthrough_construction)};
 
   if ($attrs->{page}) {
     $attrs->{rows} ||= 10;
@@ -328,7 +392,7 @@ sub search {
   my $rs = $self->search_rs( @_ );
 
   if (wantarray) {
-    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray($rs);
+    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray;
     return $rs->all;
   }
   elsif (defined wantarray) {
@@ -585,59 +649,22 @@ sub _normalize_selection {
 sub _stack_cond {
   my ($self, $left, $right) = @_;
 
-  # collapse single element top-level conditions
-  # (single pass only, unlikely to need recursion)
-  for ($left, $right) {
-    if (ref $_ eq 'ARRAY') {
-      if (@$_ == 0) {
-        $_ = undef;
-      }
-      elsif (@$_ == 1) {
-        $_ = $_->[0];
-      }
-    }
-    elsif (ref $_ eq 'HASH') {
-      my ($first, $more) = keys %$_;
-
-      # empty hash
-      if (! defined $first) {
-        $_ = undef;
-      }
-      # one element hash
-      elsif (! defined $more) {
-        if ($first eq '-and' and ref $_->{'-and'} eq 'HASH') {
-          $_ = $_->{'-and'};
-        }
-        elsif ($first eq '-or' and ref $_->{'-or'} eq 'ARRAY') {
-          $_ = $_->{'-or'};
-        }
-      }
-    }
-  }
-
-  # merge hashes with weeding out of duplicates (simple cases only)
-  if (ref $left eq 'HASH' and ref $right eq 'HASH') {
-
-    # shallow copy to destroy
-    $right = { %$right };
-    for (grep { exists $right->{$_} } keys %$left) {
-      # the use of eq_deeply here is justified - the rhs of an
-      # expression can contain a lot of twisted weird stuff
-      delete $right->{$_} if Data::Compare::Compare( $left->{$_}, $right->{$_} );
-    }
-
-    $right = undef unless keys %$right;
-  }
-
+  (
+    (ref $_ eq 'ARRAY' and !@$_)
+      or
+    (ref $_ eq 'HASH' and ! keys %$_)
+  ) and $_ = undef for ($left, $right);
 
-  if (defined $left xor defined $right) {
+  # either one of the two undef
+  if ( (defined $left) xor (defined $right) ) {
     return defined $left ? $left : $right;
   }
-  elsif (! defined $left) {
-    return undef;
+  # both undef
+  elsif ( ! defined $left ) {
+    return undef
   }
   else {
-    return { -and => [ $left, $right ] };
+    return $self->result_source->schema->storage->_collapse_cond({ -and => [$left, $right] });
   }
 }
 
@@ -784,40 +811,41 @@ sub find {
     . "corresponding to the columns of the specified unique constraint '$constraint_name'"
     ) unless @c_cols == @_;
 
-    $call_cond = {};
     @{$call_cond}{@c_cols} = @_;
   }
 
-  my %related;
+  # process relationship data if any
   for my $key (keys %$call_cond) {
     if (
-      my $keyref = ref($call_cond->{$key})
+      length ref($call_cond->{$key})
         and
       my $relinfo = $rsrc->relationship_info($key)
+        and
+      # implicitly skip has_many's (likely MC)
+      (ref (my $val = delete $call_cond->{$key}) ne 'ARRAY' )
     ) {
-      my $val = delete $call_cond->{$key};
-
-      next if $keyref eq 'ARRAY'; # has_many for multi_create
-
-      my $rel_q = $rsrc->_resolve_condition(
+      my ($rel_cond, $crosstable) = $rsrc->_resolve_condition(
         $relinfo->{cond}, $val, $key, $key
       );
-      die "Can't handle complex relationship conditions in find" if ref($rel_q) ne 'HASH';
-      @related{keys %$rel_q} = values %$rel_q;
+
+      $self->throw_exception("Complex condition via relationship '$key' is unsupported in find()")
+         if $crosstable or ref($rel_cond) ne 'HASH';
+
+      # supplement condition
+      # relationship conditions take precedence (?)
+      @{$call_cond}{keys %$rel_cond} = values %$rel_cond;
     }
   }
 
-  # relationship conditions take precedence (?)
-  @{$call_cond}{keys %related} = values %related;
-
   my $alias = exists $attrs->{alias} ? $attrs->{alias} : $self->{attrs}{alias};
   my $final_cond;
   if (defined $constraint_name) {
     $final_cond = $self->_qualify_cond_columns (
 
-      $self->_build_unique_cond (
-        $constraint_name,
-        $call_cond,
+      $self->result_source->_minimal_valueset_satisfying_constraint(
+        constraint_name => $constraint_name,
+        values => ($self->_merge_with_rscond($call_cond))[0],
+        carp_on_nulls => 1,
       ),
 
       $alias,
@@ -832,23 +860,42 @@ sub find {
     # relationship
   }
   else {
+    my (@unique_queries, %seen_column_combinations, $ci, @fc_exceptions);
+
     # no key was specified - fall down to heuristics mode:
     # run through all unique queries registered on the resultset, and
     # 'OR' all qualifying queries together
-    my (@unique_queries, %seen_column_combinations);
-    for my $c_name ($rsrc->unique_constraint_names) {
+    #
+    # always start from 'primary' if it exists at all
+    for my $c_name ( sort {
+        $a eq 'primary' ? -1
+      : $b eq 'primary' ? 1
+      : $a cmp $b
+    } $rsrc->unique_constraint_names) {
+
       next if $seen_column_combinations{
         join "\x00", sort $rsrc->unique_constraint_columns($c_name)
       }++;
 
-      push @unique_queries, try {
-        $self->_build_unique_cond ($c_name, $call_cond, 'croak_on_nulls')
-      } || ();
+      try {
+        push @unique_queries, $self->_qualify_cond_columns(
+          $self->result_source->_minimal_valueset_satisfying_constraint(
+            constraint_name => $c_name,
+            values => ($self->_merge_with_rscond($call_cond))[0],
+            columns_info => ($ci ||= $self->result_source->columns_info),
+          ),
+          $alias
+        );
+      }
+      catch {
+        push @fc_exceptions, $_ if $_ =~ /\bFilterColumn\b/;
+      };
     }
 
-    $final_cond = @unique_queries
-      ? [ map { $self->_qualify_cond_columns($_, $alias) } @unique_queries ]
-      : $self->_non_unique_find_fallback ($call_cond, $attrs)
+    $final_cond =
+        @unique_queries   ? \@unique_queries
+      : @fc_exceptions    ? $self->throw_exception(join "; ", map { $_ =~ /(.*) at .+ line \d+$/s } @fc_exceptions )
+      :                     $self->_non_unique_find_fallback ($call_cond, $attrs)
     ;
   }
 
@@ -901,51 +948,20 @@ sub _qualify_cond_columns {
 }
 
 sub _build_unique_cond {
-  my ($self, $constraint_name, $extra_cond, $croak_on_null) = @_;
-
-  my @c_cols = $self->result_source->unique_constraint_columns($constraint_name);
-
-  # combination may fail if $self->{cond} is non-trivial
-  my ($final_cond) = try {
-    $self->_merge_with_rscond ($extra_cond)
-  } catch {
-    +{ %$extra_cond }
-  };
-
-  # trim out everything not in $columns
-  $final_cond = { map {
-    exists $final_cond->{$_}
-      ? ( $_ => $final_cond->{$_} )
-      : ()
-  } @c_cols };
-
-  if (my @missing = grep
-    { ! ($croak_on_null ? defined $final_cond->{$_} : exists $final_cond->{$_}) }
-    (@c_cols)
-  ) {
-    $self->throw_exception( sprintf ( "Unable to satisfy requested constraint '%s', no values for column(s): %s",
-      $constraint_name,
-      join (', ', map { "'$_'" } @missing),
-    ) );
-  }
-
-  if (
-    !$croak_on_null
-      and
-    !$ENV{DBIC_NULLABLE_KEY_NOWARN}
-      and
-    my @undefs = sort grep { ! defined $final_cond->{$_} } (keys %$final_cond)
-  ) {
-    carp_unique ( sprintf (
-      "NULL/undef values supplied for requested unique constraint '%s' (NULL "
-    . 'values in column(s): %s). This is almost certainly not what you wanted, '
-    . 'though you can set DBIC_NULLABLE_KEY_NOWARN to disable this warning.',
-      $constraint_name,
-      join (', ', map { "'$_'" } @undefs),
-    ));
-  }
-
-  return $final_cond;
+  carp_unique sprintf
+    '_build_unique_cond is a private method, and moreover is about to go '
+  . 'away. Please contact the development team at %s if you believe you '
+  . 'have a genuine use for this method, in order to discuss alternatives.',
+    DBIx::Class::_ENV_::HELP_URL,
+  ;
+
+  my ($self, $constraint_name, $cond, $croak_on_null) = @_;
+
+  $self->result_source->_minimal_valueset_satisfying_constraint(
+    constraint_name => $constraint_name,
+    values => $cond,
+    carp_on_nulls => !$croak_on_null
+  );
 }
 
 =head2 search_related
@@ -1090,39 +1106,6 @@ sub single {
   $self->_construct_results->[0];
 }
 
-
-# _collapse_query
-#
-# Recursively collapse the query, accumulating values for each column.
-
-sub _collapse_query {
-  my ($self, $query, $collapsed) = @_;
-
-  $collapsed ||= {};
-
-  if (ref $query eq 'ARRAY') {
-    foreach my $subquery (@$query) {
-      next unless ref $subquery;  # -or
-      $collapsed = $self->_collapse_query($subquery, $collapsed);
-    }
-  }
-  elsif (ref $query eq 'HASH') {
-    if (keys %$query and (keys %$query)[0] eq '-and') {
-      foreach my $subquery (@{$query->{-and}}) {
-        $collapsed = $self->_collapse_query($subquery, $collapsed);
-      }
-    }
-    else {
-      foreach my $col (keys %$query) {
-        my $value = $query->{$col};
-        $collapsed->{$col}{$value}++;
-      }
-    }
-  }
-
-  return $collapsed;
-}
-
 =head2 get_column
 
 =over 4
@@ -1164,7 +1147,7 @@ You most likely want to use L</search> with specific operators.
 
 For more information, see L<DBIx::Class::Manual::Cookbook>.
 
-This method is deprecated and will be removed in 0.09. Use L</search()>
+This method is deprecated and will be removed in 0.09. Use L<search()|/search>
 instead. An example conversion is:
 
   ->search_like({ foo => 'bar' });
@@ -1323,7 +1306,7 @@ sub _construct_results {
           and
         $rsrc->schema
               ->storage
-               ->_main_source_order_by_portion_is_stable($rsrc, $attrs->{order_by}, $attrs->{where})
+               ->_extract_colinfo_of_stable_main_source_order_by_portion($attrs)
       ) ? 1 : 0
     ) unless defined $attrs->{_ordered_for_collapse};
 
@@ -1407,8 +1390,8 @@ sub _construct_results {
   ) ? 1 : 0 ) unless defined $self->{_result_inflator}{is_hri};
 
 
-  if (! $attrs->{_related_results_construction}) {
-    # construct a much simpler array->hash folder for the one-table cases right here
+  if ($attrs->{_simple_passthrough_construction}) {
+    # construct a much simpler array->hash folder for the one-table HRI cases right here
     if ($self->{_result_inflator}{is_hri}) {
       for my $r (@$rows) {
         $r = { map { $infmap->[$_] => $r->[$_] } 0..$#$infmap };
@@ -1421,15 +1404,19 @@ sub _construct_results {
     #
     # crude unscientific benchmarking indicated the shortcut eval is not worth it for
     # this particular resultset size
-    elsif (@$rows < 60) {
+    elsif ( $self->{_result_inflator}{is_core_row} and @$rows < 60 ) {
       for my $r (@$rows) {
         $r = $inflator_cref->($res_class, $rsrc, { map { $infmap->[$_] => $r->[$_] } (0..$#$infmap) } );
       }
     }
     else {
       eval sprintf (
-        '$_ = $inflator_cref->($res_class, $rsrc, { %s }) for @$rows',
-        join (', ', map { "\$infmap->[$_] => \$_->[$_]" } 0..$#$infmap )
+        ( $self->{_result_inflator}{is_core_row}
+          ? '$_ = $inflator_cref->($res_class, $rsrc, { %s }) for @$rows'
+          # a custom inflator may be a multiplier/reductor - put it in direct list ctx
+          : '@$rows = map { $inflator_cref->($res_class, $rsrc, { %s } ) } @$rows'
+        ),
+        ( join (', ', map { "\$infmap->[$_] => \$_->[$_]" } 0..$#$infmap ) )
       );
     }
   }
@@ -1502,10 +1489,15 @@ EOS
       $next_cref ? ( $next_cref, $self->{_stashed_rows} = [] ) : (),
     );
 
-    # Special-case multi-object HRI - there is no $inflator_cref pass
-    unless ($self->{_result_inflator}{is_hri}) {
+    # simple in-place substitution, does not regrow $rows
+    if ($self->{_result_inflator}{is_core_row}) {
       $_ = $inflator_cref->($res_class, $rsrc, @$_) for @$rows
     }
+    # Special-case multi-object HRI - there is no $inflator_cref pass at all
+    elsif ( ! $self->{_result_inflator}{is_hri} ) {
+      # the inflator may be a multiplier/reductor - put it in list ctx
+      @$rows = map { $inflator_cref->($res_class, $rsrc, @$_) } @$rows;
+    }
   }
 
   # The @$rows check seems odd at first - why wouldn't we want to warn
@@ -1550,8 +1542,8 @@ L<"table"|DBIx::Class::Manual::Glossary/"ResultSource"> class.
 
 Note that changing the result_class will also remove any components
 that were originally loaded in the source class via
-L<DBIx::Class::ResultSource/load_components>. Any overloaded methods
-in the original source class will not run.
+L<load_components|Class::C3::Componentised/load_components( @comps )>.
+Any overloaded methods in the original source class will not run.
 
 =cut
 
@@ -2003,7 +1995,6 @@ sub _rs_update_delete {
 
       $guard = $storage->txn_scope_guard;
 
-      $cond = [];
       for my $row ($subrs->cursor->all) {
         push @$cond, { map
           { $idcols->[$_] => $row->[$_] }
@@ -2013,11 +2004,11 @@ sub _rs_update_delete {
     }
   }
 
-  my $res = $storage->$op (
+  my $res = $cond ? $storage->$op (
     $rsrc,
     $op eq 'update' ? $values : (),
     $cond,
-  );
+  ) : '0E0';
 
   $guard->commit if $guard;
 
@@ -2227,127 +2218,275 @@ case there are obviously no benefits to using this method over L</create>.
 sub populate {
   my $self = shift;
 
-  # cruft placed in standalone method
-  my $data = $self->_normalize_populate_args(@_);
+  # this is naive and just a quick check
+  # the types will need to be checked more thoroughly when the
+  # multi-source populate gets added
+  my $data = (
+    ref $_[0] eq 'ARRAY'
+      and
+    ( @{$_[0]} or return )
+      and
+    ( ref $_[0][0] eq 'HASH' or ref $_[0][0] eq 'ARRAY' )
+      and
+    $_[0]
+  ) or $self->throw_exception('Populate expects an arrayref of hashrefs or arrayref of arrayrefs');
 
-  return unless @$data;
+  # FIXME - no cref handling
+  # At this point assume either hashes or arrays
 
   if(defined wantarray) {
-    my @created = map { $self->create($_) } @$data;
-    return wantarray ? @created : \@created;
-  }
-  else {
-    my $first = $data->[0];
+    my (@results, $guard);
 
-    # if a column is a registered relationship, and is a non-blessed hash/array, consider
-    # it relationship data
-    my (@rels, @columns);
-    my $rsrc = $self->result_source;
-    my $rels = { map { $_ => $rsrc->relationship_info($_) } $rsrc->relationships };
-    for (keys %$first) {
-      my $ref = ref $first->{$_};
-      $rels->{$_} && ($ref eq 'ARRAY' or $ref eq 'HASH')
-        ? push @rels, $_
-        : push @columns, $_
+    if (ref $data->[0] eq 'ARRAY') {
+      # column names only, nothing to do
+      return if @$data == 1;
+
+      $guard = $self->result_source->schema->storage->txn_scope_guard
+        if @$data > 2;
+
+      @results = map
+        { my $vals = $_; $self->new_result({ map { $data->[0][$_] => $vals->[$_] } 0..$#{$data->[0]} })->insert }
+        @{$data}[1 .. $#$data]
       ;
     }
+    else {
 
-    my @pks = $rsrc->primary_columns;
+      $guard = $self->result_source->schema->storage->txn_scope_guard
+        if @$data > 1;
 
-    ## do the belongs_to relationships
-    foreach my $index (0..$#$data) {
+      @results = map { $self->new_result($_)->insert } @$data;
+    }
+
+    $guard->commit if $guard;
+    return wantarray ? @results : \@results;
+  }
 
-      # delegate to create() for any dataset without primary keys with specified relationships
-      if (grep { !defined $data->[$index]->{$_} } @pks ) {
-        for my $r (@rels) {
-          if (grep { ref $data->[$index]{$r} eq $_ } qw/HASH ARRAY/) {  # a related set must be a HASH or AoH
-            my @ret = $self->populate($data);
-            return;
+  # we have to deal with *possibly incomplete* related data
+  # this means we have to walk the data structure twice
+  # whether we want this or not
+  # jnap, I hate you ;)
+  my $rsrc = $self->result_source;
+  my $rel_info = { map { $_ => $rsrc->relationship_info($_) } $rsrc->relationships };
+
+  my ($colinfo, $colnames, $slices_with_rels);
+  my $data_start = 0;
+
+  DATA_SLICE:
+  for my $i (0 .. $#$data) {
+
+    my $current_slice_seen_rel_infos;
+
+### Determine/Supplement collists
+### BEWARE - This is a hot piece of code, a lot of weird idioms were used
+    if( ref $data->[$i] eq 'ARRAY' ) {
+
+      # positional(!) explicit column list
+      if ($i == 0) {
+        # column names only, nothing to do
+        return if @$data == 1;
+
+        $colinfo->{$data->[0][$_]} = { pos => $_, name => $data->[0][$_] } and push @$colnames, $data->[0][$_]
+          for 0 .. $#{$data->[0]};
+
+        $data_start = 1;
+
+        next DATA_SLICE;
+      }
+      else {
+        for (values %$colinfo) {
+          if ($_->{is_rel} ||= (
+            $rel_info->{$_->{name}}
+              and
+            (
+              ref $data->[$i][$_->{pos}] eq 'ARRAY'
+                or
+              ref $data->[$i][$_->{pos}] eq 'HASH'
+                or
+              ( defined blessed $data->[$i][$_->{pos}] and $data->[$i][$_->{pos}]->isa('DBIx::Class::Row') )
+            )
+              and
+            1
+          )) {
+
+            # moar sanity check... sigh
+            for ( ref $data->[$i][$_->{pos}] eq 'ARRAY' ? @{$data->[$i][$_->{pos}]} : $data->[$i][$_->{pos}] ) {
+              if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) {
+                carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()");
+                return my $throwaway = $self->populate(@_);
+              }
+            }
+
+            push @$current_slice_seen_rel_infos, $rel_info->{$_->{name}};
           }
         }
       }
 
-      foreach my $rel (@rels) {
-        next unless ref $data->[$index]->{$rel} eq "HASH";
-        my $result = $self->related_resultset($rel)->create($data->[$index]->{$rel});
-        my ($reverse_relname, $reverse_relinfo) = %{$rsrc->reverse_relationship_info($rel)};
-        my $related = $result->result_source->_resolve_condition(
-          $reverse_relinfo->{cond},
-          $self,
-          $result,
-          $rel,
-        );
-
-        delete $data->[$index]->{$rel};
-        $data->[$index] = {%{$data->[$index]}, %$related};
+     if ($current_slice_seen_rel_infos) {
+        push @$slices_with_rels, { map { $colnames->[$_] => $data->[$i][$_] } 0 .. $#$colnames };
 
-        push @columns, keys %$related if $index == 0;
+        # this is needed further down to decide whether or not to fallback to create()
+        $colinfo->{$colnames->[$_]}{seen_null} ||= ! defined $data->[$i][$_]
+          for 0 .. $#$colnames;
       }
     }
+    elsif( ref $data->[$i] eq 'HASH' ) {
 
-    ## inherit the data locked in the conditions of the resultset
-    my ($rs_data) = $self->_merge_with_rscond({});
-    delete @{$rs_data}{@columns};
-
-    ## do bulk insert on current row
-    $rsrc->storage->insert_bulk(
-      $rsrc,
-      [@columns, keys %$rs_data],
-      [ map { [ @$_{@columns}, values %$rs_data ] } @$data ],
-    );
+      for ( sort keys %{$data->[$i]} ) {
 
-    ## do the has_many relationships
-    foreach my $item (@$data) {
+        $colinfo->{$_} ||= do {
 
-      my $main_row;
+          $self->throw_exception("Column '$_' must be present in supplied explicit column list")
+            if $data_start; # it will be 0 on AoH, 1 on AoA
 
-      foreach my $rel (@rels) {
-        next unless ref $item->{$rel} eq "ARRAY" && @{ $item->{$rel} };
+          push @$colnames, $_;
 
-        $main_row ||= $self->new_result({map { $_ => $item->{$_} } @pks});
+          # RV
+          { pos => $#$colnames, name => $_ }
+        };
 
-        my $child = $main_row->$rel;
+        if ($colinfo->{$_}{is_rel} ||= (
+          $rel_info->{$_}
+            and
+          (
+            ref $data->[$i]{$_} eq 'ARRAY'
+              or
+            ref $data->[$i]{$_} eq 'HASH'
+              or
+            ( defined blessed $data->[$i]{$_} and $data->[$i]{$_}->isa('DBIx::Class::Row') )
+          )
+            and
+          1
+        )) {
+
+          # moar sanity check... sigh
+          for ( ref $data->[$i]{$_} eq 'ARRAY' ? @{$data->[$i]{$_}} : $data->[$i]{$_} ) {
+            if ( defined blessed $_ and $_->isa('DBIx::Class::Row' ) ) {
+              carp_unique("Fast-path populate() with supplied related objects is not possible - falling back to regular create()");
+              return my $throwaway = $self->populate(@_);
+            }
+          }
 
-        my $related = $child->result_source->_resolve_condition(
-          $rels->{$rel}{cond},
-          $child,
-          $main_row,
-          $rel,
-        );
+          push @$current_slice_seen_rel_infos, $rel_info->{$_};
+        }
+      }
 
-        my @rows_to_add = ref $item->{$rel} eq 'ARRAY' ? @{$item->{$rel}} : ($item->{$rel});
-        my @populate = map { {%$_, %$related} } @rows_to_add;
+      if ($current_slice_seen_rel_infos) {
+        push @$slices_with_rels, $data->[$i];
 
-        $child->populate( \@populate );
+        # this is needed further down to decide whether or not to fallback to create()
+        $colinfo->{$_}{seen_null} ||= ! defined $data->[$i]{$_}
+          for keys %{$data->[$i]};
       }
     }
+    else {
+      $self->throw_exception('Unexpected populate() data structure member type: ' . ref $data->[$i] );
+    }
+
+    if ( grep
+      { $_->{attrs}{is_depends_on} }
+      @{ $current_slice_seen_rel_infos || [] }
+    ) {
+      carp_unique("Fast-path populate() of belongs_to relationship data is not possible - falling back to regular create()");
+      return my $throwaway = $self->populate(@_);
+    }
   }
-}
 
+  if( $slices_with_rels ) {
 
-# populate() arguments went over several incarnations
-# What we ultimately support is AoH
-sub _normalize_populate_args {
-  my ($self, $arg) = @_;
+    # need to exclude the rel "columns"
+    $colnames = [ grep { ! $colinfo->{$_}{is_rel} } @$colnames ];
 
-  if (ref $arg eq 'ARRAY') {
-    if (!@$arg) {
-      return [];
-    }
-    elsif (ref $arg->[0] eq 'HASH') {
-      return $arg;
+    # extra sanity check - ensure the main source is in fact identifiable
+    # the localizing of nullability is insane, but oh well... the use-case is legit
+    my $ci = $rsrc->columns_info($colnames);
+
+    $ci->{$_} = { %{$ci->{$_}}, is_nullable => 0 }
+      for grep { ! $colinfo->{$_}{seen_null} } keys %$ci;
+
+    unless( $rsrc->_identifying_column_set($ci) ) {
+      carp_unique("Fast-path populate() of non-uniquely identifiable rows with related data is not possible - falling back to regular create()");
+      return my $throwaway = $self->populate(@_);
     }
-    elsif (ref $arg->[0] eq 'ARRAY') {
-      my @ret;
-      my @colnames = @{$arg->[0]};
-      foreach my $values (@{$arg}[1 .. $#$arg]) {
-        push @ret, { map { $colnames[$_] => $values->[$_] } (0 .. $#colnames) };
+  }
+
+### inherit the data locked in the conditions of the resultset
+  my ($rs_data) = $self->_merge_with_rscond({});
+  delete @{$rs_data}{@$colnames};  # passed-in stuff takes precedence
+
+  # if anything left - decompose rs_data
+  my $rs_data_vals;
+  if (keys %$rs_data) {
+     push @$rs_data_vals, $rs_data->{$_}
+      for sort keys %$rs_data;
+  }
+
+### start work
+  my $guard;
+  $guard = $rsrc->schema->storage->txn_scope_guard
+    if $slices_with_rels;
+
+### main source data
+  # FIXME - need to switch entirely to a coderef-based thing,
+  # so that large sets aren't copied several times... I think
+  $rsrc->storage->_insert_bulk(
+    $rsrc,
+    [ @$colnames, sort keys %$rs_data ],
+    [ map {
+      ref $data->[$_] eq 'ARRAY'
+      ? (
+          $slices_with_rels ? [ @{$data->[$_]}[0..$#$colnames], @{$rs_data_vals||[]} ]  # the collist changed
+        : $rs_data_vals     ? [ @{$data->[$_]}, @$rs_data_vals ]
+        :                     $data->[$_]
+      )
+      : [ @{$data->[$_]}{@$colnames}, @{$rs_data_vals||[]} ]
+    } $data_start .. $#$data ],
+  );
+
+### do the children relationships
+  if ( $slices_with_rels ) {
+    my @rels = grep { $colinfo->{$_}{is_rel} } keys %$colinfo
+      or die 'wtf... please report a bug with DBIC_TRACE=1 output (stacktrace)';
+
+    for my $sl (@$slices_with_rels) {
+
+      my ($main_proto, $main_proto_rs);
+      for my $rel (@rels) {
+        next unless defined $sl->{$rel};
+
+        $main_proto ||= {
+          %$rs_data,
+          (map { $_ => $sl->{$_} } @$colnames),
+        };
+
+        unless (defined $colinfo->{$rel}{rs}) {
+
+          $colinfo->{$rel}{rs} = $rsrc->related_source($rel)->resultset;
+
+          $colinfo->{$rel}{fk_map} = { reverse %{ $rsrc->_resolve_relationship_condition(
+            rel_name => $rel,
+            self_alias => "\xFE", # irrelevant
+            foreign_alias => "\xFF", # irrelevant
+          )->{identity_map} || {} } };
+
+        }
+
+        $colinfo->{$rel}{rs}->search({ map # only so that we inherit them values properly, no actual search
+          {
+            $_ => { '=' =>
+              ( $main_proto_rs ||= $rsrc->resultset->search($main_proto) )
+                ->get_column( $colinfo->{$rel}{fk_map}{$_} )
+                 ->as_query
+            }
+          }
+          keys %{$colinfo->{$rel}{fk_map}}
+        })->populate( ref $sl->{$rel} eq 'ARRAY' ? $sl->{$rel} : [ $sl->{$rel} ] );
+
+        1;
       }
-      return \@ret;
     }
   }
 
-  $self->throw_exception('Populate expects an arrayref of hashrefs or arrayref of arrayrefs');
+  $guard->commit if $guard;
 }
 
 =head2 pager
@@ -2443,7 +2582,7 @@ sub new_result {
   $self->throw_exception( "new_result takes only one argument - a hashref of values" )
     if @_ > 2;
 
-  $self->throw_exception( "new_result expects a hashref" )
+  $self->throw_exception( "Result object instantiation requires a hashref as argument" )
     unless (ref $values eq 'HASH');
 
   my ($merged_cond, $cols_from_relations) = $self->_merge_with_rscond($values);
@@ -2482,51 +2621,33 @@ sub new_result {
 sub _merge_with_rscond {
   my ($self, $data) = @_;
 
-  my (%new_data, @cols_from_relations);
+  my ($implied_data, @cols_from_relations);
 
   my $alias = $self->{attrs}{alias};
 
   if (! defined $self->{cond}) {
     # just massage $data below
   }
-  elsif ($self->{cond} eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
-    %new_data = %{ $self->{attrs}{related_objects} || {} };  # nothing might have been inserted yet
-    @cols_from_relations = keys %new_data;
-  }
-  elsif (ref $self->{cond} ne 'HASH') {
-    $self->throw_exception(
-      "Can't abstract implicit construct, resultset condition not a hash"
-    );
+  elsif ($self->{cond} eq UNRESOLVABLE_CONDITION) {
+    $implied_data = $self->{attrs}{related_objects};  # nothing might have been inserted yet
+    @cols_from_relations = keys %{ $implied_data || {} };
   }
   else {
-    # precedence must be given to passed values over values inherited from
-    # the cond, so the order here is important.
-    my $collapsed_cond = $self->_collapse_cond($self->{cond});
-    my %implied = %{$self->_remove_alias($collapsed_cond, $alias)};
-
-    while ( my($col, $value) = each %implied ) {
-      my $vref = ref $value;
-      if (
-        $vref eq 'HASH'
-          and
-        keys(%$value) == 1
-          and
-        (keys %$value)[0] eq '='
-      ) {
-        $new_data{$col} = $value->{'='};
-      }
-      elsif( !$vref or $vref eq 'SCALAR' or blessed($value) ) {
-        $new_data{$col} = $value;
-      }
-    }
+    my $eqs = $self->result_source->schema->storage->_extract_fixed_condition_columns($self->{cond}, 'consider_nulls');
+    $implied_data = { map {
+      ( ($eqs->{$_}||'') eq UNRESOLVABLE_CONDITION ) ? () : ( $_ => $eqs->{$_} )
+    } keys %$eqs };
   }
 
-  %new_data = (
-    %new_data,
-    %{ $self->_remove_alias($data, $alias) },
+  return (
+    { map
+      { %{ $self->_remove_alias($_, $alias) } }
+      # precedence must be given to passed values over values inherited from
+      # the cond, so the order here is important.
+      ( $implied_data||(), $data)
+    },
+    \@cols_from_relations
   );
-
-  return (\%new_data, \@cols_from_relations);
 }
 
 # _has_resolved_attr
@@ -2582,38 +2703,6 @@ sub _has_resolved_attr {
   return 0;
 }
 
-# _collapse_cond
-#
-# Recursively collapse the condition.
-
-sub _collapse_cond {
-  my ($self, $cond, $collapsed) = @_;
-
-  $collapsed ||= {};
-
-  if (ref $cond eq 'ARRAY') {
-    foreach my $subcond (@$cond) {
-      next unless ref $subcond;  # -or
-      $collapsed = $self->_collapse_cond($subcond, $collapsed);
-    }
-  }
-  elsif (ref $cond eq 'HASH') {
-    if (keys %$cond and (keys %$cond)[0] eq '-and') {
-      foreach my $subcond (@{$cond->{-and}}) {
-        $collapsed = $self->_collapse_cond($subcond, $collapsed);
-      }
-    }
-    else {
-      foreach my $col (keys %$cond) {
-        my $value = $cond->{$col};
-        $collapsed->{$col} = $value;
-      }
-    }
-  }
-
-  return $collapsed;
-}
-
 # _remove_alias
 #
 # Remove the specified alias from the specified query hash. A copy is made so
@@ -2794,10 +2883,9 @@ L</new>.
 =cut
 
 sub create {
-  my ($self, $col_data) = @_;
-  $self->throw_exception( "create needs a hashref" )
-    unless ref $col_data eq 'HASH';
-  return $self->new_result($col_data)->insert;
+  #my ($self, $col_data) = @_;
+  DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and fail_on_internal_call;
+  return shift->new_result(shift)->insert;
 }
 
 =head2 find_or_create
@@ -2879,7 +2967,7 @@ sub find_or_create {
   if (keys %$hash and my $row = $self->find($hash, $attrs) ) {
     return $row;
   }
-  return $self->create($hash);
+  return $self->new_result($hash)->insert;
 }
 
 =head2 update_or_create
@@ -2949,7 +3037,7 @@ sub update_or_create {
     return $row;
   }
 
-  return $self->create($cond);
+  return $self->new_result($cond)->insert;
 }
 
 =head2 update_or_new
@@ -3186,11 +3274,11 @@ sub related_resultset {
 
     if (my $cache = $self->get_cache) {
       my @related_cache = map
-        { @{$_->related_resultset($rel)->get_cache||[]} }
+        { $_->related_resultset($rel)->get_cache || () }
         @$cache
       ;
 
-      $new->set_cache(\@related_cache) if @related_cache;
+      $new->set_cache([ map @$_, @related_cache ]) if @related_cache == @$cache;
     }
 
     $new;
@@ -3422,7 +3510,7 @@ sub _resolved_attrs {
   return $self->{_attrs} if $self->{_attrs};
 
   my $attrs  = { %{ $self->{attrs} || {} } };
-  my $source = $self->result_source;
+  my $source = $attrs->{result_source} = $self->result_source;
   my $alias  = $attrs->{alias};
 
   $self->throw_exception("Specifying distinct => 1 in conjunction with collapse => 1 is unsupported")
@@ -3526,18 +3614,18 @@ sub _resolved_attrs {
       ];
   }
 
-  if ( defined $attrs->{order_by} ) {
-    $attrs->{order_by} = (
-      ref( $attrs->{order_by} ) eq 'ARRAY'
-      ? [ @{ $attrs->{order_by} } ]
-      : [ $attrs->{order_by} || () ]
-    );
-  }
+  for my $attr (qw(order_by group_by)) {
 
-  if ($attrs->{group_by} and ref $attrs->{group_by} ne 'ARRAY') {
-    $attrs->{group_by} = [ $attrs->{group_by} ];
-  }
+    if ( defined $attrs->{$attr} ) {
+      $attrs->{$attr} = (
+        ref( $attrs->{$attr} ) eq 'ARRAY'
+        ? [ @{ $attrs->{$attr} } ]
+        : [ $attrs->{$attr} || () ]
+      );
 
+      delete $attrs->{$attr} unless @{$attrs->{$attr}};
+    }
+  }
 
   # generate selections based on the prefetch helper
   my ($prefetch, @prefetch_select, @prefetch_as);
@@ -3654,10 +3742,11 @@ sub _resolved_attrs {
   push @{$attrs->{select}}, @prefetch_select;
   push @{$attrs->{as}}, @prefetch_as;
 
-  # whether we can get away with the dumbest (possibly DBI-internal) collapser
-  if ( List::Util::first { $_ =~ /\./ } @{$attrs->{as}} ) {
-    $attrs->{_related_results_construction} = 1;
-  }
+  $attrs->{_simple_passthrough_construction} = !(
+    $attrs->{collapse}
+      or
+    grep { $_ =~ /\./ } @{$attrs->{as}}
+  );
 
   # if both page and offset are specified, produce a combined offset
   # even though it doesn't make much sense, this is what pre 081xx has
@@ -3977,32 +4066,65 @@ syntax as outlined above.
 Shortcut to request a particular set of columns to be retrieved. Each
 column spec may be a string (a table column name), or a hash (in which
 case the key is the C<as> value, and the value is used as the C<select>
-expression). Adds C<me.> onto the start of any column without a C<.> in
+expression). Adds the L</current_source_alias> onto the start of any column without a C<.> in
 it and sets C<select> from that, then auto-populates C<as> from
 C<select> as normal. (You may also use the C<cols> attribute, as in
-earlier versions of DBIC, but this is deprecated.)
+earlier versions of DBIC, but this is deprecated)
 
 Essentially C<columns> does the same as L</select> and L</as>.
 
-    columns => [ 'foo', { bar => 'baz' } ]
+    columns => [ 'some_column', { dbic_slot => 'another_column' } ]
 
 is the same as
 
-    select => [qw/foo baz/],
-    as => [qw/foo bar/]
+    select => [qw(some_column another_column)],
+    as     => [qw(some_column dbic_slot)]
+
+If you want to individually retrieve related columns (in essence perform
+manual prefetch) you have to make sure to specify the correct inflation slot
+chain such that it matches existing relationships:
+
+    my $rs = $schema->resultset('Artist')->search({}, {
+        # required to tell DBIC to collapse has_many relationships
+        collapse => 1,
+        join     => { cds => 'tracks'},
+        '+columns'  => {
+          'cds.cdid'         => 'cds.cdid',
+          'cds.tracks.title' => 'tracks.title',
+        },
+    });
+
+Like elsewhere, literal SQL or literal values can be included by using a
+scalar reference or a literal bind value, and these values will be available
+in the result with C<get_column> (see also
+L<SQL::Abstract/Literal SQL and value type operators>):
+
+    # equivalent SQL: SELECT 1, 'a string', IF(my_column,?,?) ...
+    # bind values: $true_value, $false_value
+    columns => [
+        {
+            foo => \1,
+            bar => \q{'a string'},
+            baz => \[ 'IF(my_column,?,?)', $true_value, $false_value ],
+        }
+    ]
 
 =head2 +columns
 
+B<NOTE:> You B<MUST> explicitly quote C<'+columns'> when using this attribute.
+Not doing so causes Perl to incorrectly interpret C<+columns> as a bareword
+with a unary plus operator before it, which is the same as simply C<columns>.
+
 =over 4
 
-=item Value: \@columns
+=item Value: \@extra_columns
 
 =back
 
 Indicates additional columns to be selected from storage. Works the same as
-L</columns> but adds columns to the selection. (You may also use the
+L</columns> but adds columns to the current selection. (You may also use the
 C<include_columns> attribute, as in earlier versions of DBIC, but this is
-deprecated). For example:-
+deprecated)
 
   $schema->resultset('CD')->search(undef, {
     '+columns' => ['artist.name'],
@@ -4014,20 +4136,6 @@ passed to object inflation. Note that the 'artist' is the name of the
 column (or relationship) accessor, and 'name' is the name of the column
 accessor in the related table.
 
-B<NOTE:> You need to explicitly quote '+columns' when defining the attribute.
-Not doing so causes Perl to incorrectly interpret +columns as a bareword with a
-unary plus operator before it.
-
-=head2 include_columns
-
-=over 4
-
-=item Value: \@columns
-
-=back
-
-Deprecated.  Acts as a synonym for L</+columns> for backward compatibility.
-
 =head2 select
 
 =over 4
@@ -4053,25 +4161,28 @@ names:
 
 B<NOTE:> You will almost always need a corresponding L</as> attribute when you
 use L</select>, to instruct DBIx::Class how to store the result of the column.
-Also note that the L</as> attribute has nothing to do with the SQL-side 'AS'
-identifier aliasing. You can however alias a function, so you can use it in
-e.g. an C<ORDER BY> clause. This is done via the C<-as> B<select function
-attribute> supplied as shown in the example above.
 
-B<NOTE:> You need to explicitly quote '+select'/'+as' when defining the attributes.
-Not doing so causes Perl to incorrectly interpret them as a bareword with a
-unary plus operator before it.
+Also note that the L</as> attribute has B<nothing to do> with the SQL-side
+C<AS> identifier aliasing. You B<can> alias a function (so you can use it e.g.
+in an C<ORDER BY> clause), however this is done via the C<-as> B<select
+function attribute> supplied as shown in the example above.
 
 =head2 +select
 
+B<NOTE:> You B<MUST> explicitly quote C<'+select'> when using this attribute.
+Not doing so causes Perl to incorrectly interpret C<+select> as a bareword
+with a unary plus operator before it, which is the same as simply C<select>.
+
 =over 4
 
-Indicates additional columns to be selected from storage.  Works the same as
-L</select> but adds columns to the default selection, instead of specifying
-an explicit list.
+=item Value: \@extra_select_columns
 
 =back
 
+Indicates additional columns to be selected from storage.  Works the same as
+L</select> but adds columns to the current selection, instead of specifying
+a new explicit list.
+
 =head2 as
 
 =over 4
@@ -4080,12 +4191,14 @@ an explicit list.
 
 =back
 
-Indicates column names for object inflation. That is L</as> indicates the
+Indicates DBIC-side names for object inflation. That is L</as> indicates the
 slot name in which the column value will be stored within the
 L<Row|DBIx::Class::Row> object. The value will then be accessible via this
 identifier by the C<get_column> method (or via the object accessor B<if one
-with the same name already exists>) as shown below. The L</as> attribute has
-B<nothing to do> with the SQL-side C<AS>. See L</select> for details.
+with the same name already exists>) as shown below.
+
+The L</as> attribute has B<nothing to do> with the SQL-side identifier
+aliasing C<AS>. See L</select> for details.
 
   $rs = $schema->resultset('Employee')->search(undef, {
     select => [
@@ -4116,12 +4229,18 @@ L<DBIx::Class::Manual::Cookbook> for details.
 
 =head2 +as
 
+B<NOTE:> You B<MUST> explicitly quote C<'+as'> when using this attribute.
+Not doing so causes Perl to incorrectly interpret C<+as> as a bareword
+with a unary plus operator before it, which is the same as simply C<as>.
+
 =over 4
 
-Indicates additional column names for those added via L</+select>. See L</as>.
+=item Value: \@extra_inflation_names
 
 =back
 
+Indicates additional inflation names for selectors added via L</+select>. See L</as>.
+
 =head2 join
 
 =over 4
@@ -4255,8 +4374,10 @@ For a more in-depth discussion, see L</PREFETCHING>.
 
 This attribute is a shorthand for specifying a L</join> spec, adding all
 columns from the joined related sources as L</+columns> and setting
-L</collapse> to a true value. For example, the following two queries are
-equivalent:
+L</collapse> to a true value. It can be thought of as a rough B<superset>
+of the L</join> attribute.
+
+For example, the following two queries are equivalent:
 
   my $rs = $schema->resultset('Artist')->search({}, {
     prefetch => { cds => ['genre', 'tracks' ] },
@@ -4433,15 +4554,20 @@ A arrayref of columns to group by. Can include columns of joined tables.
 
 =back
 
-HAVING is a select statement attribute that is applied between GROUP BY and
-ORDER BY. It is applied to the after the grouping calculations have been
-done.
+The HAVING operator specifies a B<secondary> condition applied to the set
+after the grouping calculations have been done. In other words it is a
+constraint just like L</where> (and accepting the same
+L<SQL::Abstract syntax|SQL::Abstract/WHERE CLAUSES>) applied to the data
+as it exists after GROUP BY has taken place. Specifying L</having> without
+L</group_by> is a logical mistake, and a fatal error on most RDBMS engines.
+
+E.g.
 
   having => { 'count_employee' => { '>=', 100 } }
 
 or with an in-place function in which case literal SQL is required:
 
-  having => \[ 'count(employee) >= ?', [ count => 100 ] ]
+  having => \[ 'count(employee) >= ?', 100 ]
 
 =head2 distinct
 
@@ -4683,11 +4809,15 @@ supported:
   [ undef,   $val ] === [ {}, $val ]
   $val              === [ {}, $val ]
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
+=cut
@@ -406,7 +406,7 @@ sub func {
   my $cursor = $self->func_rs($function)->cursor;
 
   if( wantarray ) {
-    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray($self);
+    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_WANTARRAY and my $sog = fail_on_internal_wantarray;
     return map { $_->[ 0 ] } $cursor->all;
   }
 
@@ -487,14 +487,14 @@ sub _resultset {
       unless( $cols{$select} ) {
         carp_unique(
           'Use of distinct => 1 while selecting anything other than a column '
-        . 'declared on the primary ResultSource is deprecated - please supply '
-        . 'an explicit group_by instead'
+        . 'declared on the primary ResultSource is deprecated (you selected '
+        . "'$self->{_as}') - please supply an explicit group_by instead"
         );
 
         # collapse the selector to a literal so that it survives the distinct parse
         # if it turns out to be an aggregate - at least the user will get a proper exception
         # instead of silent drop of the group_by altogether
-        $select = \ $rsrc->storage->sql_maker->_recurse_fields($select);
+        $select = \[ $rsrc->storage->sql_maker->_recurse_fields($select) ];
       }
     }
 
@@ -504,14 +504,18 @@ sub _resultset {
   };
 }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
@@ -88,4 +88,17 @@ sub _register_resultset_class {
     }
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -5,7 +5,7 @@ use strict;
 use warnings;
 
 use List::Util 'first';
-use B 'perlstring';
+use DBIx::Class::_Util 'perlstring';
 
 use constant HAS_DOR => ( $] < 5.010 ? 0 : 1 );
 
@@ -267,7 +267,7 @@ sub __visit_infmap_collapse {
     my $parent_attach_slot = sprintf( '$collapse_idx[%d]%s%s{%s}',
       @{$args}{qw/-parent_node_idx -parent_node_key/},
       $args->{hri_style} ? '' : '[1]',
-      perlstring($args->{-node_relname}),
+      perlstring($args->{-node_rel_name}),
     );
 
     if ($args->{collapse_map}->{-is_single}) {
@@ -300,7 +300,7 @@ sub __visit_infmap_collapse {
       collapse_map => $relinfo,
       -parent_node_idx => $cur_node_idx,
       -parent_node_key => $node_key,
-      -node_relname => $rel,
+      -node_rel_name => $rel,
     });
 
     my $rel_src_pos = $#src + 1;
@@ -28,15 +28,17 @@ Returns the FROM entry for the table (i.e. the table name)
 
 sub from { shift->name; }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
+1;
@@ -34,11 +34,14 @@ L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::R
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -171,15 +171,17 @@ sub new {
     return $new;
 }
 
-1;
-
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
+1;
@@ -159,11 +159,14 @@ L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::R
 
 =back
 
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -9,6 +9,8 @@ use DBIx::Class::ResultSet;
 use DBIx::Class::ResultSourceHandle;
 
 use DBIx::Class::Carp;
+use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION';
+use SQL::Abstract 'is_literal_value';
 use Devel::GlobalDestruction;
 use Try::Tiny;
 use List::Util 'first';
@@ -75,7 +77,7 @@ More specifically, the L<DBIx::Class::Core> base class pulls in the
 L<DBIx::Class::ResultSourceProxy::Table> component, which defines
 the L<table|DBIx::Class::ResultSourceProxy::Table/table> method.
 When called, C<table> creates and stores an instance of
-L<DBIx::Class::ResultSoure::Table>. Luckily, to use tables as result
+L<DBIx::Class::ResultSource::Table>. Luckily, to use tables as result
 sources, you don't need to remember any of this.
 
 Result sources representing select queries, or views, can also be
@@ -84,7 +86,8 @@ created, see L<DBIx::Class::ResultSource::View> for full details.
 =head2 Finding result source objects
 
 As mentioned above, a result source instance is created and stored for
-you when you define a L<result class|DBIx::Class::Manual::Glossary/Result class>.
+you when you define a
+L<Result Class|DBIx::Class::Manual::Glossary/Result Class>.
 
 You can retrieve the result source at runtime in the following ways:
 
@@ -106,7 +109,13 @@ You can retrieve the result source at runtime in the following ways:
 
 =head1 METHODS
 
-=pod
+=head2 new
+
+  $class->new();
+
+  $class->new({attribute_name => value});
+
+Creates a new ResultSource object.  Not normally called directly by end users.
 
 =cut
 
@@ -141,6 +150,11 @@ sub new {
 
   $source->add_columns('col1' => \%col1_info, 'col2' => \%col2_info, ...);
 
+  $source->add_columns(
+    'col1' => { data_type => 'integer', is_nullable => 1, ... },
+    'col2' => { data_type => 'text',    is_auto_increment => 1, ... },
+  );
+
 Adds columns to the result source. If supplied colname => hashref
 pairs, uses the hashref as the L</column_info> for that column. Repeated
 calls of this method will add more columns, not replace them.
@@ -201,7 +215,7 @@ schema, see L<DBIx::Class::Schema/deploy>.
 
    { is_nullable => 1 }
 
-Set this to a true value for a columns that is allowed to contain NULL
+Set this to a true value for a column that is allowed to contain NULL
 values, default is false. This is currently only used to create tables
 from your schema, see L<DBIx::Class::Schema/deploy>.
 
@@ -575,7 +589,7 @@ sub remove_column { shift->remove_columns(@_); } # DO NOT CHANGE THIS TO GLOB
 Defines one or more columns as primary key for this source. Must be
 called after L</add_columns>.
 
-Additionally, defines a L<unique constraint|add_unique_constraint>
+Additionally, defines a L<unique constraint|/add_unique_constraint>
 named C<primary>.
 
 Note: you normally do want to define a primary key on your sources
@@ -639,7 +653,7 @@ sub _pri_cols_or_die {
 }
 
 # same as above but mandating single-column PK (used by relationship condition
-# inferrence)
+# inference)
 sub _single_pri_col_or_die {
   my $self = shift;
   my ($pri, @too_many) = $self->_pri_cols_or_die;
@@ -829,6 +843,7 @@ sub name_unique_constraint {
 
   my $name = $self->name;
   $name = $$name if (ref $name eq 'SCALAR');
+  $name =~ s/ ^ [^\.]+ \. //x;  # strip possible schema qualifier
 
   return join '_', $name, @$cols;
 }
@@ -1172,6 +1187,17 @@ clause contents.
 
 sub from { die 'Virtual method!' }
 
+=head2 source_info
+
+Stores a hashref of per-source metadata.  No specific key names
+have yet been standardized, the examples below are purely hypothetical
+and don't actually accomplish anything on their own:
+
+  __PACKAGE__->source_info({
+    "_tablespace" => 'fast_disk_array_3',
+    "_engine" => 'InnoDB',
+  });
+
 =head2 schema
 
 =over 4
@@ -1313,10 +1339,11 @@ sub add_relationship {
 
   # Check foreign and self are right in cond
   if ( (ref $cond ||'') eq 'HASH') {
-    for (keys %$cond) {
-      $self->throw_exception("Keys of condition should be of form 'foreign.col', not '$_'")
-        if /\./ && !/^foreign\./;
-    }
+    $_ =~ /^foreign\./ or $self->throw_exception("Malformed relationship condition key '$_': must be prefixed with 'foreign.'")
+      for keys %$cond;
+
+    $_ =~ /^self\./ or $self->throw_exception("Malformed relationship condition value '$_': must be prefixed with 'self.'")
+      for values %$cond;
   }
 
   my %rels = %{ $self->_relationships };
@@ -1362,7 +1389,7 @@ sub add_relationship {
 
 =back
 
-  my @relnames = $source->relationships();
+  my @rel_names = $source->relationships();
 
 Returns all relationship names for this source.
 
@@ -1545,6 +1572,67 @@ sub _identifying_column_set {
   return undef;
 }
 
+sub _minimal_valueset_satisfying_constraint {
+  my $self = shift;
+  my $args = { ref $_[0] eq 'HASH' ? %{ $_[0] } : @_ };
+
+  $args->{columns_info} ||= $self->columns_info;
+
+  my $vals = $self->storage->_extract_fixed_condition_columns(
+    $args->{values},
+    ($args->{carp_on_nulls} ? 'consider_nulls' : undef ),
+  );
+
+  my $cols;
+  for my $col ($self->unique_constraint_columns($args->{constraint_name}) ) {
+    if( ! exists $vals->{$col} or ( $vals->{$col}||'' ) eq UNRESOLVABLE_CONDITION ) {
+      $cols->{missing}{$col} = undef;
+    }
+    elsif( ! defined $vals->{$col} ) {
+      $cols->{$args->{carp_on_nulls} ? 'undefined' : 'missing'}{$col} = undef;
+    }
+    else {
+      # we need to inject back the '=' as _extract_fixed_condition_columns
+      # will strip it from literals and values alike, resulting in an invalid
+      # condition in the end
+      $cols->{present}{$col} = { '=' => $vals->{$col} };
+    }
+
+    $cols->{fc}{$col} = 1 if (
+      ( ! $cols->{missing} or ! exists $cols->{missing}{$col} )
+        and
+      keys %{ $args->{columns_info}{$col}{_filter_info} || {} }
+    );
+  }
+
+  $self->throw_exception( sprintf ( "Unable to satisfy requested constraint '%s', missing values for column(s): %s",
+    $args->{constraint_name},
+    join (', ', map { "'$_'" } sort keys %{$cols->{missing}} ),
+  ) ) if $cols->{missing};
+
+  $self->throw_exception( sprintf (
+    "Unable to satisfy requested constraint '%s', FilterColumn values not usable for column(s): %s",
+    $args->{constraint_name},
+    join (', ', map { "'$_'" } sort keys %{$cols->{fc}}),
+  )) if $cols->{fc};
+
+  if (
+    $cols->{undefined}
+      and
+    !$ENV{DBIC_NULLABLE_KEY_NOWARN}
+  ) {
+    carp_unique ( sprintf (
+      "NULL/undef values supplied for requested unique constraint '%s' (NULL "
+    . 'values in column(s): %s). This is almost certainly not what you wanted, '
+    . 'though you can set DBIC_NULLABLE_KEY_NOWARN to disable this warning.',
+      $args->{constraint_name},
+      join (', ', map { "'$_'" } sort keys %{$cols->{undefined}}),
+    ));
+  }
+
+  return { map { %{ $cols->{$_}||{} } } qw(present undefined) };
+}
+
 # Returns the {from} structure used to express JOIN conditions
 sub _resolve_join {
   my ($self, $join, $alias, $seen, $jpath, $parent_force_left) = @_;
@@ -1670,150 +1758,466 @@ sub _pk_depends_on {
 
 sub resolve_condition {
   carp 'resolve_condition is a private method, stop calling it';
-  my $self = shift;
-  $self->_resolve_condition (@_);
+  shift->_resolve_condition (@_);
 }
 
-our $UNRESOLVABLE_CONDITION = \ '1 = 0';
-
-# Resolves the passed condition to a concrete query fragment and a flag
-# indicating whether this is a cross-table condition. Also an optional
-# list of non-trivial values (normally conditions) returned as a part
-# of a joinfree condition hash
 sub _resolve_condition {
-  my ($self, $cond, $as, $for, $rel_name) = @_;
+#  carp_unique sprintf
+#    '_resolve_condition is a private method, and moreover is about to go '
+#  . 'away. Please contact the development team at %s if you believe you '
+#  . 'have a genuine use for this method, in order to discuss alternatives.',
+#    DBIx::Class::_ENV_::HELP_URL,
+#  ;
+
+#######################
+### API Design? What's that...? (a backwards compatible shim, kill me now)
+
+  my ($self, $cond, @res_args, $rel_name);
+
+  # we *SIMPLY DON'T KNOW YET* which arg is which, yay
+  ($self, $cond, $res_args[0], $res_args[1], $rel_name) = @_;
+
+  # assume that an undef is an object-like unset (set_from_related(undef))
+  my @is_objlike = map { ! defined $_ or length ref $_ } (@res_args);
+
+  # turn objlike into proper objects for saner code further down
+  for (0,1) {
+    next unless $is_objlike[$_];
+
+    if ( defined blessed $res_args[$_] ) {
+
+      # but wait - there is more!!! WHAT THE FUCK?!?!?!?!
+      if ($res_args[$_]->isa('DBIx::Class::ResultSet')) {
+        carp('Passing a resultset for relationship resolution makes no sense - invoking __gremlins__');
+        $is_objlike[$_] = 0;
+        $res_args[$_] = '__gremlins__';
+      }
+    }
+    else {
+      $res_args[$_] ||= {};
+
+      # hate everywhere - have to pass in as a plain hash
+      # pretending to be an object at least for now
+      $self->throw_exception("Unsupported object-like structure encountered: $res_args[$_]")
+        unless ref $res_args[$_] eq 'HASH';
+    }
+  }
+
+  my $args = {
+    condition => $cond,
+
+    # where-is-waldo block guesses relname, then further down we override it if available
+    (
+      $is_objlike[1] ? ( rel_name => $res_args[0], self_alias => $res_args[0], foreign_alias => 'me',         self_result_object  => $res_args[1] )
+    : $is_objlike[0] ? ( rel_name => $res_args[1], self_alias => 'me',         foreign_alias => $res_args[1], foreign_values      => $res_args[0] )
+    :                  ( rel_name => $res_args[0], self_alias => $res_args[1], foreign_alias => $res_args[0]                                      )
+    ),
+
+    ( $rel_name ? ( rel_name => $rel_name ) : () ),
+  };
+#######################
+
+  # now it's fucking easy isn't it?!
+  my $rc = $self->_resolve_relationship_condition( $args );
+
+  my @res = (
+    ( $rc->{join_free_condition} || $rc->{condition} ),
+    ! $rc->{join_free_condition},
+  );
+
+  # _resolve_relationship_condition always returns qualified cols even in the
+  # case of join_free_condition, but nothing downstream expects this
+  if ($rc->{join_free_condition} and ref $res[0] eq 'HASH') {
+    $res[0] = { map
+      { ($_ =~ /\.(.+)/) => $res[0]{$_} }
+      keys %{$res[0]}
+    };
+  }
+
+  # and more legacy
+  return wantarray ? @res : $res[0];
+}
+
+# Keep this indefinitely. There is evidence of both CPAN and
+# darkpan using it, and there isn't much harm in an extra var
+# anyway.
+our $UNRESOLVABLE_CONDITION = UNRESOLVABLE_CONDITION;
+# YES I KNOW THIS IS EVIL
+# it is there to save darkpan from themselves, since internally
+# we are moving to a constant
+Internals::SvREADONLY($UNRESOLVABLE_CONDITION => 1);
+
+# Resolves the passed condition to a concrete query fragment and extra
+# metadata
+#
+## self-explanatory API, modeled on the custom cond coderef:
+# rel_name              => (scalar)
+# foreign_alias         => (scalar)
+# foreign_values        => (either not supplied, or a hashref, or a foreign ResultObject (to be ->get_columns()ed), or plain undef )
+# self_alias            => (scalar)
+# self_result_object    => (either not supplied or a result object)
+# require_join_free_condition => (boolean, throws on failure to construct a JF-cond)
+# infer_values_based_on => (either not supplied or a hashref, implies require_join_free_condition)
+# condition             => (sqla cond struct, optional, defeaults to from $self->rel_info(rel_name)->{cond})
+#
+## returns a hash
+# condition           => (a valid *likely fully qualified* sqla cond structure)
+# identity_map        => (a hashref of foreign-to-self *unqualified* column equality names)
+# join_free_condition => (a valid *fully qualified* sqla cond structure, maybe unset)
+# inferred_values     => (in case of an available join_free condition, this is a hashref of
+#                         *unqualified* column/value *EQUALITY* pairs, representing an amalgamation
+#                         of the JF-cond parse and infer_values_based_on
+#                         always either complete or unset)
+#
+sub _resolve_relationship_condition {
+  my $self = shift;
+
+  my $args = { ref $_[0] eq 'HASH' ? %{ $_[0] } : @_ };
+
+  for ( qw( rel_name self_alias foreign_alias ) ) {
+    $self->throw_exception("Mandatory argument '$_' to _resolve_relationship_condition() is not a plain string")
+      if !defined $args->{$_} or length ref $args->{$_};
+  }
+
+  $self->throw_exception("Arguments 'self_alias' and 'foreign_alias' may not be identical")
+    if $args->{self_alias} eq $args->{foreign_alias};
+
+# TEMP
+  my $exception_rel_id = "relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}'";
+
+  my $rel_info = $self->relationship_info($args->{rel_name})
+# TEMP
+#    or $self->throw_exception( "No such $exception_rel_id" );
+    or carp_unique("Requesting resolution on non-existent relationship '$args->{rel_name}' on source '@{[ $self->source_name ]}': fix your code *soon*, as it will break with the next major version");
+
+# TEMP
+  $exception_rel_id = "relationship '$rel_info->{_original_name}' on source '@{[ $self->source_name ]}'"
+    if $rel_info and exists $rel_info->{_original_name};
+
+  $self->throw_exception("No practical way to resolve $exception_rel_id between two data structures")
+    if exists $args->{self_result_object} and exists $args->{foreign_values};
+
+  $self->throw_exception( "Argument to infer_values_based_on must be a hash" )
+    if exists $args->{infer_values_based_on} and ref $args->{infer_values_based_on} ne 'HASH';
+
+  $args->{require_join_free_condition} ||= !!$args->{infer_values_based_on};
+
+  $args->{condition} ||= $rel_info->{cond};
+
+  $self->throw_exception( "Argument 'self_result_object' must be an object of class '@{[ $self->result_class ]}'" )
+    if (
+      exists $args->{self_result_object}
+        and
+      ( ! defined blessed $args->{self_result_object} or ! $args->{self_result_object}->isa($self->result_class) )
+    )
+  ;
+
+#TEMP
+  my $rel_rsrc;# = $self->related_source($args->{rel_name});
+
+  if (exists $args->{foreign_values}) {
+# TEMP
+    $rel_rsrc ||= $self->related_source($args->{rel_name});
+
+    if (defined blessed $args->{foreign_values}) {
+
+      $self->throw_exception( "Objects supplied as 'foreign_values' ($args->{foreign_values}) must inherit from DBIx::Class::Row" )
+        unless $args->{foreign_values}->isa('DBIx::Class::Row');
+
+      carp_unique(
+        "Objects supplied as 'foreign_values' ($args->{foreign_values}) "
+      . "usually should inherit from the related ResultClass ('@{[ $rel_rsrc->result_class ]}'), "
+      . "perhaps you've made a mistake invoking the condition resolver?"
+      ) unless $args->{foreign_values}->isa($rel_rsrc->result_class);
+
+      $args->{foreign_values} = { $args->{foreign_values}->get_columns };
+    }
+    elsif (! defined $args->{foreign_values} or ref $args->{foreign_values} eq 'HASH') {
+      my $ri = { map { $_ => 1 } $rel_rsrc->relationships };
+      my $ci = $rel_rsrc->columns_info;
+      ! exists $ci->{$_} and ! exists $ri->{$_} and $self->throw_exception(
+        "Key '$_' supplied as 'foreign_values' is not a column on related source '@{[ $rel_rsrc->source_name ]}'"
+      ) for keys %{ $args->{foreign_values} ||= {} };
+    }
+    else {
+      $self->throw_exception(
+        "Argument 'foreign_values' must be either an object inheriting from '@{[ $rel_rsrc->result_class ]}', "
+      . "or a hash reference, or undef"
+      );
+    }
+  }
 
-  my $obj_rel = defined blessed $for;
+  my $ret;
 
-  if (ref $cond eq 'CODE') {
-    my $relalias = $obj_rel ? 'me' : $as;
+  if (ref $args->{condition} eq 'CODE') {
 
-    my ($crosstable_cond, $joinfree_cond) = $cond->({
-      self_alias => $obj_rel ? $as : $for,
-      foreign_alias => $relalias,
+    my $cref_args = {
+      rel_name => $args->{rel_name},
       self_resultsource => $self,
-      foreign_relname => $rel_name || ($obj_rel ? $as : $for),
-      self_rowobj => $obj_rel ? $for : undef
-    });
+      self_alias => $args->{self_alias},
+      foreign_alias => $args->{foreign_alias},
+      ( map
+        { (exists $args->{$_}) ? ( $_ => $args->{$_} ) : () }
+        qw( self_result_object foreign_values )
+      ),
+    };
+
+    # legacy - never remove these!!!
+    $cref_args->{foreign_relname} = $cref_args->{rel_name};
+
+    $cref_args->{self_rowobj} = $cref_args->{self_result_object}
+      if exists $cref_args->{self_result_object};
 
-    my $cond_cols;
-    if ($joinfree_cond) {
+    ($ret->{condition}, $ret->{join_free_condition}, my @extra) = $args->{condition}->($cref_args);
+
+    # sanity check
+    $self->throw_exception("A custom condition coderef can return at most 2 conditions, but $exception_rel_id returned extra values: @extra")
+      if @extra;
+
+    if (my $jfc = $ret->{join_free_condition}) {
+
+      $self->throw_exception (
+        "The join-free condition returned for $exception_rel_id must be a hash reference"
+      ) unless ref $jfc eq 'HASH';
+
+# TEMP
+      $rel_rsrc ||= $self->related_source($args->{rel_name});
+
+      my ($joinfree_alias, $joinfree_source);
+      if (defined $args->{self_result_object}) {
+        $joinfree_alias = $args->{foreign_alias};
+        $joinfree_source = $rel_rsrc;
+      }
+      elsif (defined $args->{foreign_values}) {
+        $joinfree_alias = $args->{self_alias};
+        $joinfree_source = $self;
+      }
 
       # FIXME sanity check until things stabilize, remove at some point
       $self->throw_exception (
-        "A join-free condition returned for relationship '$rel_name' without a row-object to chain from"
-      ) unless $obj_rel;
-
-      # FIXME another sanity check
-      if (
-        ref $joinfree_cond ne 'HASH'
-          or
-        first { $_ !~ /^\Q$relalias.\E.+/ } keys %$joinfree_cond
-      ) {
+        "A join-free condition returned for $exception_rel_id without a result object to chain from"
+      ) unless $joinfree_alias;
+
+      my $fq_col_list = { map
+        { ( "$joinfree_alias.$_" => 1 ) }
+        $joinfree_source->columns
+      };
+
+      exists $fq_col_list->{$_} or $self->throw_exception (
+        "The join-free condition returned for $exception_rel_id may only "
+      . 'contain keys that are fully qualified column names of the corresponding source '
+      . "(it returned '$_')"
+      ) for keys %$jfc;
+
+      (
+        length ref $_
+          and
+        defined blessed($_)
+          and
+        $_->isa('DBIx::Class::Row')
+          and
         $self->throw_exception (
-          "The join-free condition returned for relationship '$rel_name' must be a hash "
-         .'reference with all keys being valid columns on the related result source'
-        );
-      }
+          "The join-free condition returned for $exception_rel_id may not "
+        . 'contain result objects as values - perhaps instead of invoking '
+        . '->$something you meant to return ->get_column($something)'
+        )
+      ) for values %$jfc;
 
-      # normalize
-      for (values %$joinfree_cond) {
-        $_ = $_->{'='} if (
-          ref $_ eq 'HASH'
-            and
-          keys %$_ == 1
-            and
-          exists $_->{'='}
-        );
-      }
+    }
+  }
+  elsif (ref $args->{condition} eq 'HASH') {
 
-      # see which parts of the joinfree cond are conditionals
-      my $relcol_list = { map { $_ => 1 } $self->related_source($rel_name)->columns };
+    # the condition is static - use parallel arrays
+    # for a "pivot" depending on which side of the
+    # rel did we get as an object
+    my (@f_cols, @l_cols);
+    for my $fc (keys %{$args->{condition}}) {
+      my $lc = $args->{condition}{$fc};
 
-      for my $c (keys %$joinfree_cond) {
-        my ($colname) = $c =~ /^ (?: \Q$relalias.\E )? (.+)/x;
+      # FIXME STRICTMODE should probably check these are valid columns
+      $fc =~ s/^foreign\.// ||
+        $self->throw_exception("Invalid rel cond key '$fc'");
 
-        unless ($relcol_list->{$colname}) {
-          push @$cond_cols, $colname;
-          next;
-        }
+      $lc =~ s/^self\.// ||
+        $self->throw_exception("Invalid rel cond val '$lc'");
 
-        if (
-          ref $joinfree_cond->{$c}
-            and
-          ref $joinfree_cond->{$c} ne 'SCALAR'
-            and
-          ref $joinfree_cond->{$c} ne 'REF'
-        ) {
-          push @$cond_cols, $colname;
-          next;
+      push @f_cols, $fc;
+      push @l_cols, $lc;
+    }
+
+    # construct the crosstable condition and the identity map
+    for  (0..$#f_cols) {
+      $ret->{condition}{"$args->{foreign_alias}.$f_cols[$_]"} = { -ident => "$args->{self_alias}.$l_cols[$_]" };
+      $ret->{identity_map}{$l_cols[$_]} = $f_cols[$_];
+    };
+
+    if ($args->{foreign_values}) {
+      $ret->{join_free_condition}{"$args->{self_alias}.$l_cols[$_]"} = $args->{foreign_values}{$f_cols[$_]}
+        for 0..$#f_cols;
+    }
+    elsif (defined $args->{self_result_object}) {
+
+      for my $i (0..$#l_cols) {
+        if ( $args->{self_result_object}->has_column_loaded($l_cols[$i]) ) {
+          $ret->{join_free_condition}{"$args->{foreign_alias}.$f_cols[$i]"} = $args->{self_result_object}->get_column($l_cols[$i]);
+        }
+        else {
+          $self->throw_exception(sprintf
+            "Unable to resolve relationship '%s' from object '%s': column '%s' not "
+          . 'loaded from storage (or not passed to new() prior to insert()). You '
+          . 'probably need to call ->discard_changes to get the server-side defaults '
+          . 'from the database.',
+            $args->{rel_name},
+            $args->{self_result_object},
+            $l_cols[$i],
+          ) if $args->{self_result_object}->in_storage;
+
+          # FIXME - temporarly force-override
+          delete $args->{require_join_free_condition};
+          $ret->{join_free_condition} = UNRESOLVABLE_CONDITION;
+          last;
         }
       }
-
-      return wantarray ? ($joinfree_cond, 0, $cond_cols) : $joinfree_cond;
+    }
+  }
+  elsif (ref $args->{condition} eq 'ARRAY') {
+    if (@{$args->{condition}} == 0) {
+      $ret = {
+        condition => UNRESOLVABLE_CONDITION,
+        join_free_condition => UNRESOLVABLE_CONDITION,
+      };
+    }
+    elsif (@{$args->{condition}} == 1) {
+      $ret = $self->_resolve_relationship_condition({
+        %$args,
+        condition => $args->{condition}[0],
+      });
     }
     else {
-      return wantarray ? ($crosstable_cond, 1) : $crosstable_cond;
+      # we are discarding inferred values here... likely incorrect...
+      # then again - the entire thing is an OR, so we *can't* use them anyway
+      for my $subcond ( map
+        { $self->_resolve_relationship_condition({ %$args, condition => $_ }) }
+        @{$args->{condition}}
+      ) {
+        $self->throw_exception('Either all or none of the OR-condition members must resolve to a join-free condition')
+          if ( $ret and ( $ret->{join_free_condition} xor $subcond->{join_free_condition} ) );
+
+        $subcond->{$_} and push @{$ret->{$_}}, $subcond->{$_} for (qw(condition join_free_condition));
+      }
     }
   }
-  elsif (ref $cond eq 'HASH') {
-    my %ret;
-    foreach my $k (keys %{$cond}) {
-      my $v = $cond->{$k};
-      # XXX should probably check these are valid columns
-      $k =~ s/^foreign\.// ||
-        $self->throw_exception("Invalid rel cond key ${k}");
-      $v =~ s/^self\.// ||
-        $self->throw_exception("Invalid rel cond val ${v}");
-      if (ref $for) { # Object
-        #warn "$self $k $for $v";
-        unless ($for->has_column_loaded($v)) {
-          if ($for->in_storage) {
-            $self->throw_exception(sprintf
-              "Unable to resolve relationship '%s' from object %s: column '%s' not "
-            . 'loaded from storage (or not passed to new() prior to insert()). You '
-            . 'probably need to call ->discard_changes to get the server-side defaults '
-            . 'from the database.',
-              $as,
-              $for,
-              $v,
-            );
-          }
-          return $UNRESOLVABLE_CONDITION;
+  else {
+    $self->throw_exception ("Can't handle condition $args->{condition} for $exception_rel_id yet :(");
+  }
+
+  $self->throw_exception(ucfirst "$exception_rel_id does not resolve to a join-free condition fragment") if (
+    $args->{require_join_free_condition}
+      and
+    ( ! $ret->{join_free_condition} or $ret->{join_free_condition} eq UNRESOLVABLE_CONDITION )
+  );
+
+  my $storage = $self->schema->storage;
+
+  # we got something back - sanity check and infer values if we can
+  my @nonvalues;
+  if ( my $jfc = $ret->{join_free_condition} and $ret->{join_free_condition} ne UNRESOLVABLE_CONDITION ) {
+
+    my $jfc_eqs = $storage->_extract_fixed_condition_columns($jfc, 'consider_nulls');
+
+    if (keys %$jfc_eqs) {
+
+      for (keys %$jfc) {
+        # $jfc is fully qualified by definition
+        my ($col) = $_ =~ /\.(.+)/;
+
+        if (exists $jfc_eqs->{$_} and ($jfc_eqs->{$_}||'') ne UNRESOLVABLE_CONDITION) {
+          $ret->{inferred_values}{$col} = $jfc_eqs->{$_};
+        }
+        elsif ( !$args->{infer_values_based_on} or ! exists $args->{infer_values_based_on}{$col} ) {
+          push @nonvalues, $col;
         }
-        $ret{$k} = $for->get_column($v);
-        #$ret{$k} = $for->get_column($v) if $for->has_column_loaded($v);
-        #warn %ret;
-      } elsif (!defined $for) { # undef, i.e. "no object"
-        $ret{$k} = undef;
-      } elsif (ref $as eq 'HASH') { # reverse hashref
-        $ret{$v} = $as->{$k};
-      } elsif (ref $as) { # reverse object
-        $ret{$v} = $as->get_column($k);
-      } elsif (!defined $as) { # undef, i.e. "no reverse object"
-        $ret{$v} = undef;
-      } else {
-        $ret{"${as}.${k}"} = { -ident => "${for}.${v}" };
       }
+
+      # all or nothing
+      delete $ret->{inferred_values} if @nonvalues;
     }
+  }
 
-    return wantarray
-      ? ( \%ret, ($obj_rel || !defined $as || ref $as) ? 0 : 1 )
-      : \%ret
-    ;
+  # did the user explicitly ask
+  if ($args->{infer_values_based_on}) {
+
+    $self->throw_exception(sprintf (
+      "Unable to complete value inferrence - custom $exception_rel_id returns conditions instead of values for column(s): %s",
+      map { "'$_'" } @nonvalues
+    )) if @nonvalues;
+
+
+    $ret->{inferred_values} ||= {};
+
+    $ret->{inferred_values}{$_} = $args->{infer_values_based_on}{$_}
+      for keys %{$args->{infer_values_based_on}};
   }
-  elsif (ref $cond eq 'ARRAY') {
-    my (@ret, $crosstable);
-    for (@$cond) {
-      my ($cond, $crosstab) = $self->_resolve_condition($_, $as, $for, $rel_name);
-      push @ret, $cond;
-      $crosstable ||= $crosstab;
+
+  # add the identities based on the main condition
+  # (may already be there, since easy to calculate on the fly in the HASH case)
+  if ( ! $ret->{identity_map} ) {
+
+    my $col_eqs = $storage->_extract_fixed_condition_columns($ret->{condition});
+
+    my $colinfos;
+    for my $lhs (keys %$col_eqs) {
+
+      next if $col_eqs->{$lhs} eq UNRESOLVABLE_CONDITION;
+
+# TEMP
+      $rel_rsrc ||= $self->related_source($args->{rel_name});
+
+      # there is no way to know who is right and who is left in a cref
+      # therefore a full blown resolution call, and figure out the
+      # direction a bit further below
+      $colinfos ||= $storage->_resolve_column_info([
+        { -alias => $args->{self_alias}, -rsrc => $self },
+        { -alias => $args->{foreign_alias}, -rsrc => $rel_rsrc },
+      ]);
+
+      next unless $colinfos->{$lhs};  # someone is engaging in witchcraft
+
+      if ( my $rhs_ref = is_literal_value( $col_eqs->{$lhs} ) ) {
+
+        if (
+          $colinfos->{$rhs_ref->[0]}
+            and
+          $colinfos->{$lhs}{-source_alias} ne $colinfos->{$rhs_ref->[0]}{-source_alias}
+        ) {
+          ( $colinfos->{$lhs}{-source_alias} eq $args->{self_alias} )
+            ? ( $ret->{identity_map}{$colinfos->{$lhs}{-colname}} = $colinfos->{$rhs_ref->[0]}{-colname} )
+            : ( $ret->{identity_map}{$colinfos->{$rhs_ref->[0]}{-colname}} = $colinfos->{$lhs}{-colname} )
+          ;
+        }
+      }
+      elsif (
+        $col_eqs->{$lhs} =~ /^ ( \Q$args->{self_alias}\E \. .+ ) /x
+          and
+        ($colinfos->{$1}||{})->{-result_source} == $rel_rsrc
+      ) {
+        my ($lcol, $rcol) = map
+          { $colinfos->{$_}{-colname} }
+          ( $lhs, $1 )
+        ;
+        carp_unique(
+          "The $exception_rel_id specifies equality of column '$lcol' and the "
+        . "*VALUE* '$rcol' (you did not use the { -ident => ... } operator)"
+        );
+      }
     }
-    return wantarray ? (\@ret, $crosstable) : \@ret;
-  }
-  else {
-    $self->throw_exception ("Can't handle condition $cond for relationship '$rel_name' yet :(");
   }
+
+  # FIXME - temporary, to fool the idiotic check in SQLMaker::_join_condition
+  $ret->{condition} = { -and => [ $ret->{condition} ] }
+    unless $ret->{condition} eq UNRESOLVABLE_CONDITION;
+
+  $ret;
 }
 
 =head2 related_source
@@ -1969,25 +2373,6 @@ sub throw_exception {
   ;
 }
 
-=head2 source_info
-
-Stores a hashref of per-source metadata.  No specific key names
-have yet been standardized, the examples below are purely hypothetical
-and don't actually accomplish anything on their own:
-
-  __PACKAGE__->source_info({
-    "_tablespace" => 'fast_disk_array_3',
-    "_engine" => 'InnoDB',
-  });
-
-=head2 new
-
-  $class->new();
-
-  $class->new({attribute_name => value});
-
-Creates a new ResultSource object.  Not normally called directly by end users.
-
 =head2 column_info_from_storage
 
 =over
@@ -2004,14 +2389,16 @@ Enables the on-demand automatic loading of the above column
 metadata from storage as necessary.  This is *deprecated*, and
 should not be used.  It will be removed before 1.0.
 
+=head1 FURTHER QUESTIONS?
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -68,7 +68,7 @@ sub resolve {
     # vague error message as this is never supposed to happen
     "Unable to resolve moniker '%s' - please contact the dev team at %s",
     $_[0]->source_moniker,
-    'http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT',
+    DBIx::Class::_ENV_::HELP_URL,
   ), 'full_stacktrace');
 }
 
@@ -128,9 +128,16 @@ sub STORABLE_thaw {
   }
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-Ash Berlin C<< <ash@cpan.org> >>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -110,16 +110,12 @@ sub table {
 
 Gets or sets the table class used for construction and validation.
 
-=cut
-
 =head2 has_column
 
   if ($obj->has_column($col)) { ... }
 
 Returns 1 if the class has a column of this name, 0 otherwise.
 
-=cut
-
 =head2 column_info
 
   my $info = $obj->column_info($col);
@@ -128,23 +124,23 @@ Returns the column metadata hashref for a column. For a description of
 the various types of column data in this hashref, see
 L<DBIx::Class::ResultSource/add_column>
 
-=cut
-
 =head2 columns
 
   my @column_names = $obj->columns;
 
-=cut
+=head1 FURTHER QUESTIONS?
 
-1;
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 COPYRIGHT AND LICENSE
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-=head1 LICENSE
+=cut
 
-You may distribute this code under the same terms as Perl itself.
+1;
 
-=cut
 
@@ -67,11 +67,14 @@ L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::R
 
 =back
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
@@ -4,9 +4,10 @@ package # hide from PAUSE
 use strict;
 use warnings;
 
-use base qw/DBIx::Class/;
-use Scalar::Util qw/blessed/;
-use Sub::Name qw/subname/;
+use base 'DBIx::Class';
+
+use Scalar::Util 'blessed';
+use DBIx::Class::_Util 'quote_sub';
 use namespace::clean;
 
 __PACKAGE__->mk_group_accessors('inherited_ro_instance' => 'source_name');
@@ -80,10 +81,11 @@ for my $method_to_proxy (qw/
   relationship_info
   has_relationship
 /) {
-  no strict qw/refs/;
-  *{__PACKAGE__."::$method_to_proxy"} = subname $method_to_proxy => sub {
-    shift->result_source_instance->$method_to_proxy (@_);
-  };
+  quote_sub __PACKAGE__."::$method_to_proxy", sprintf( <<'EOC', $method_to_proxy );
+    DBIx::Class::_ENV_::ASSERT_NO_INTERNAL_INDIRECT_CALLS and DBIx::Class::_Util::fail_on_internal_call;
+    shift->result_source_instance->%s (@_);
+EOC
+
 }
 
 1;
@@ -9,6 +9,7 @@ use Scalar::Util 'blessed';
 use List::Util 'first';
 use Try::Tiny;
 use DBIx::Class::Carp;
+use SQL::Abstract 'is_literal_value';
 
 ###
 ### Internal method
@@ -51,7 +52,7 @@ All "Row objects" derived from a Schema-attached L<DBIx::Class::ResultSet>
 object (such as a typical C<< L<search|DBIx::Class::ResultSet/search>->
 L<next|DBIx::Class::ResultSet/next> >> call) are actually Result
 instances, based on your application's
-L<Result class|DBIx::Class::Manual::Glossary/Result_class>.
+L<Result Class|DBIx::Class::Manual::Glossary/Result Class>.
 
 L<DBIx::Class::Row> implements most of the row-based communication with the
 underlying storage, but a Result class B<should not inherit from it directly>.
@@ -125,26 +126,26 @@ with NULL as the default, and save yourself a SELECT.
 ## tests!
 
 sub __new_related_find_or_new_helper {
-  my ($self, $relname, $values) = @_;
+  my ($self, $rel_name, $values) = @_;
 
   my $rsrc = $self->result_source;
 
   # create a mock-object so all new/set_column component overrides will run:
-  my $rel_rs = $rsrc->related_source($relname)->resultset;
+  my $rel_rs = $rsrc->related_source($rel_name)->resultset;
   my $new_rel_obj = $rel_rs->new_result($values);
   my $proc_data = { $new_rel_obj->get_columns };
 
-  if ($self->__their_pk_needs_us($relname)) {
-    MULTICREATE_DEBUG and print STDERR "MC $self constructing $relname via new_result\n";
+  if ($self->__their_pk_needs_us($rel_name)) {
+    MULTICREATE_DEBUG and print STDERR "MC $self constructing $rel_name via new_result\n";
     return $new_rel_obj;
   }
-  elsif ($rsrc->_pk_depends_on($relname, $proc_data )) {
+  elsif ($rsrc->_pk_depends_on($rel_name, $proc_data )) {
     if (! keys %$proc_data) {
       # there is nothing to search for - blind create
-      MULTICREATE_DEBUG and print STDERR "MC $self constructing default-insert $relname\n";
+      MULTICREATE_DEBUG and print STDERR "MC $self constructing default-insert $rel_name\n";
     }
     else {
-      MULTICREATE_DEBUG and print STDERR "MC $self constructing $relname via find_or_new\n";
+      MULTICREATE_DEBUG and print STDERR "MC $self constructing $rel_name via find_or_new\n";
       # this is not *really* find or new, as we don't want to double-new the
       # data (thus potentially double encoding or whatever)
       my $exists = $rel_rs->find ($proc_data);
@@ -155,17 +156,17 @@ sub __new_related_find_or_new_helper {
   else {
     my $us = $rsrc->source_name;
     $self->throw_exception (
-      "Unable to determine relationship '$relname' direction from '$us', "
-    . "possibly due to a missing reverse-relationship on '$relname' to '$us'."
+      "Unable to determine relationship '$rel_name' direction from '$us', "
+    . "possibly due to a missing reverse-relationship on '$rel_name' to '$us'."
     );
   }
 }
 
 sub __their_pk_needs_us { # this should maybe be in resultsource.
-  my ($self, $relname) = @_;
+  my ($self, $rel_name) = @_;
   my $rsrc = $self->result_source;
-  my $reverse = $rsrc->reverse_relationship_info($relname);
-  my $rel_source = $rsrc->related_source($relname);
+  my $reverse = $rsrc->reverse_relationship_info($rel_name);
+  my $rel_source = $rsrc->related_source($rel_name);
   my $us = { $self->get_columns };
   foreach my $key (keys %$reverse) {
     # if their primary key depends on us, then we have to
@@ -199,7 +200,7 @@ sub new {
     my ($related,$inflated);
 
     foreach my $key (keys %$attrs) {
-      if (ref $attrs->{$key}) {
+      if (ref $attrs->{$key} and ! is_literal_value($attrs->{$key}) ) {
         ## Can we extract this lot to use with update(_or .. ) ?
         $new->throw_exception("Can't do multi-create without result source")
           unless $rsrc;
@@ -256,14 +257,16 @@ sub new {
           }
           $inflated->{$key} = $rel_obj;
           next;
-        } elsif ($class->has_column($key)
-            && $class->column_info($key)->{_inflate_info}) {
+        }
+        elsif (
+          $rsrc->has_column($key)
+            and
+          $rsrc->column_info($key)->{_inflate_info}
+        ) {
           $inflated->{$key} = $attrs->{$key};
           next;
         }
       }
-      $new->throw_exception("No such column '$key' on $class")
-        unless $class->has_column($key);
       $new->store_column($key => $attrs->{$key});
     }
 
@@ -351,27 +354,27 @@ sub insert {
 
   # insert what needs to be inserted before us
   my %pre_insert;
-  for my $relname (keys %related_stuff) {
-    my $rel_obj = $related_stuff{$relname};
+  for my $rel_name (keys %related_stuff) {
+    my $rel_obj = $related_stuff{$rel_name};
 
-    if (! $self->{_rel_in_storage}{$relname}) {
+    if (! $self->{_rel_in_storage}{$rel_name}) {
       next unless (blessed $rel_obj && $rel_obj->isa('DBIx::Class::Row'));
 
       next unless $rsrc->_pk_depends_on(
-                    $relname, { $rel_obj->get_columns }
+                    $rel_name, { $rel_obj->get_columns }
                   );
 
       # The guard will save us if we blow out of this scope via die
       $rollback_guard ||= $storage->txn_scope_guard;
 
-      MULTICREATE_DEBUG and print STDERR "MC $self pre-reconstructing $relname $rel_obj\n";
+      MULTICREATE_DEBUG and print STDERR "MC $self pre-reconstructing $rel_name $rel_obj\n";
 
       my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_columns };
       my $existing;
 
       # if there are no keys - nothing to search for
       if (keys %$them and $existing = $self->result_source
-                                           ->related_source($relname)
+                                           ->related_source($rel_name)
                                            ->resultset
                                            ->find($them)
       ) {
@@ -381,11 +384,11 @@ sub insert {
         $rel_obj->insert;
       }
 
-      $self->{_rel_in_storage}{$relname} = 1;
+      $self->{_rel_in_storage}{$rel_name} = 1;
     }
 
-    $self->set_from_related($relname, $rel_obj);
-    delete $related_stuff{$relname};
+    $self->set_from_related($rel_name, $rel_obj);
+    delete $related_stuff{$rel_name};
   }
 
   # start a transaction here if not started yet and there is more stuff
@@ -426,25 +429,25 @@ sub insert {
   $self->{_dirty_columns} = {};
   $self->{related_resultsets} = {};
 
-  foreach my $relname (keys %related_stuff) {
-    next unless $rsrc->has_relationship ($relname);
+  foreach my $rel_name (keys %related_stuff) {
+    next unless $rsrc->has_relationship ($rel_name);
 
-    my @cands = ref $related_stuff{$relname} eq 'ARRAY'
-      ? @{$related_stuff{$relname}}
-      : $related_stuff{$relname}
+    my @cands = ref $related_stuff{$rel_name} eq 'ARRAY'
+      ? @{$related_stuff{$rel_name}}
+      : $related_stuff{$rel_name}
     ;
 
     if (@cands && blessed $cands[0] && $cands[0]->isa('DBIx::Class::Row')
     ) {
-      my $reverse = $rsrc->reverse_relationship_info($relname);
+      my $reverse = $rsrc->reverse_relationship_info($rel_name);
       foreach my $obj (@cands) {
         $obj->set_from_related($_, $self) for keys %$reverse;
-        if ($self->__their_pk_needs_us($relname)) {
-          if (exists $self->{_ignore_at_insert}{$relname}) {
-            MULTICREATE_DEBUG and print STDERR "MC $self skipping post-insert on $relname\n";
+        if ($self->__their_pk_needs_us($rel_name)) {
+          if (exists $self->{_ignore_at_insert}{$rel_name}) {
+            MULTICREATE_DEBUG and print STDERR "MC $self skipping post-insert on $rel_name\n";
           }
           else {
-            MULTICREATE_DEBUG and print STDERR "MC $self inserting $relname $obj\n";
+            MULTICREATE_DEBUG and print STDERR "MC $self inserting $rel_name $obj\n";
             $obj->insert;
           }
         } else {
@@ -477,8 +480,8 @@ sub insert {
 
 Indicates whether the object exists as a row in the database or
 not. This is set to true when L<DBIx::Class::ResultSet/find>,
-L<DBIx::Class::ResultSet/create> or L<DBIx::Class::ResultSet/insert>
-are used.
+L<DBIx::Class::ResultSet/create> or L<DBIx::Class::Row/insert>
+are invoked.
 
 Creating a result object using L<DBIx::Class::ResultSet/new_result>, or
 calling L</delete> on one, sets it to false.
@@ -661,12 +664,20 @@ To retrieve all loaded column values as a hash, use L</get_columns>.
 sub get_column {
   my ($self, $column) = @_;
   $self->throw_exception( "Can't fetch data as class method" ) unless ref $self;
-  return $self->{_column_data}{$column} if exists $self->{_column_data}{$column};
+
+  return $self->{_column_data}{$column}
+    if exists $self->{_column_data}{$column};
+
   if (exists $self->{_inflated_column}{$column}) {
-    return $self->store_column($column,
-      $self->_deflated_column($column, $self->{_inflated_column}{$column}));
+    # deflate+return cycle
+    return $self->store_column($column, $self->_deflated_column(
+      $column, $self->{_inflated_column}{$column}
+    ));
   }
-  $self->throw_exception( "No such column '${column}'" ) unless $self->has_column($column);
+
+  $self->throw_exception( "No such column '${column}' on " . ref $self )
+    unless $self->result_source->has_column($column);
+
   return undef;
 }
 
@@ -692,8 +703,12 @@ database (or set locally).
 sub has_column_loaded {
   my ($self, $column) = @_;
   $self->throw_exception( "Can't call has_column data as class method" ) unless ref $self;
-  return 1 if exists $self->{_inflated_column}{$column};
-  return exists $self->{_column_data}{$column};
+
+  return (
+    exists $self->{_inflated_column}{$column}
+      or
+    exists $self->{_column_data}{$column}
+  ) ? 1 : 0;
 }
 
 =head2 get_columns
@@ -718,6 +733,7 @@ See L</get_inflated_columns> to get the inflated values.
 sub get_columns {
   my $self = shift;
   if (exists $self->{_inflated_column}) {
+    # deflate cycle for each inflation, including filter rels
     foreach my $col (keys %{$self->{_inflated_column}}) {
       unless (exists $self->{_column_data}{$col}) {
 
@@ -787,8 +803,8 @@ really changed.
 sub make_column_dirty {
   my ($self, $column) = @_;
 
-  $self->throw_exception( "No such column '${column}'" )
-    unless exists $self->{_column_data}{$column} || $self->has_column($column);
+  $self->throw_exception( "No such column '${column}' on " . ref $self )
+    unless exists $self->{_column_data}{$column} || $self->result_source->has_column($column);
 
   # the entire clean/dirty code relies on exists, not on true/false
   return 1 if exists $self->{_dirty_columns}{$column};
@@ -830,9 +846,9 @@ See L<DBIx::Class::InflateColumn> for how to setup inflation.
 sub get_inflated_columns {
   my $self = shift;
 
-  my $loaded_colinfo = $self->columns_info ([
-    grep { $self->has_column_loaded($_) } $self->columns
-  ]);
+  my $loaded_colinfo = $self->result_source->columns_info;
+  $self->has_column_loaded($_) or delete $loaded_colinfo->{$_}
+    for keys %$loaded_colinfo;
 
   my %cols_to_return = ( %{$self->{_column_data}}, %$loaded_colinfo );
 
@@ -874,8 +890,11 @@ sub get_inflated_columns {
 }
 
 sub _is_column_numeric {
-   my ($self, $column) = @_;
-    my $colinfo = $self->column_info ($column);
+    my ($self, $column) = @_;
+
+    return undef unless $self->result_source->has_column($column);
+
+    my $colinfo = $self->result_source->column_info ($column);
 
     # cache for speed (the object may *not* have a resultsource instance)
     if (
@@ -919,17 +938,17 @@ sub set_column {
   my ($self, $column, $new_value) = @_;
 
   my $had_value = $self->has_column_loaded($column);
-  my ($old_value, $in_storage) = ($self->get_column($column), $self->in_storage)
-    if $had_value;
+  my $old_value = $self->get_column($column);
 
   $new_value = $self->store_column($column, $new_value);
 
   my $dirty =
     $self->{_dirty_columns}{$column}
       ||
-    $in_storage # no point tracking dirtyness on uninserted data
+    ( $self->in_storage # no point tracking dirtyness on uninserted data
       ? ! $self->_eq_column_values ($column, $old_value, $new_value)
       : 1
+    )
   ;
 
   if ($dirty) {
@@ -940,20 +959,20 @@ sub set_column {
     #
     # FIXME - this is a quick *largely incorrect* hack, pending a more
     # serious rework during the merge of single and filter rels
-    my $relnames = $self->result_source->{_relationships};
-    for my $relname (keys %$relnames) {
+    my $rel_names = $self->result_source->{_relationships};
+    for my $rel_name (keys %$rel_names) {
 
-      my $acc = $relnames->{$relname}{attrs}{accessor} || '';
+      my $acc = $rel_names->{$rel_name}{attrs}{accessor} || '';
 
-      if ( $acc eq 'single' and $relnames->{$relname}{attrs}{fk_columns}{$column} ) {
-        delete $self->{related_resultsets}{$relname};
-        delete $self->{_relationship_data}{$relname};
-        #delete $self->{_inflated_column}{$relname};
+      if ( $acc eq 'single' and $rel_names->{$rel_name}{attrs}{fk_columns}{$column} ) {
+        delete $self->{related_resultsets}{$rel_name};
+        delete $self->{_relationship_data}{$rel_name};
+        #delete $self->{_inflated_column}{$rel_name};
       }
-      elsif ( $acc eq 'filter' and $relname eq $column) {
-        delete $self->{related_resultsets}{$relname};
-        #delete $self->{_relationship_data}{$relname};
-        delete $self->{_inflated_column}{$relname};
+      elsif ( $acc eq 'filter' and $rel_name eq $column) {
+        delete $self->{related_resultsets}{$rel_name};
+        #delete $self->{_relationship_data}{$rel_name};
+        delete $self->{_inflated_column}{$rel_name};
       }
     }
 
@@ -962,7 +981,7 @@ sub set_column {
       $had_value
         and
       # no storage - no storage-value
-      $in_storage
+      $self->in_storage
         and
       # no value already stored (multiple changes before commit to storage)
       ! exists $self->{_column_data_in_storage}{$column}
@@ -985,6 +1004,13 @@ sub _eq_column_values {
   elsif (not defined $old) {  # both undef
     return 1;
   }
+  elsif (
+    is_literal_value $old
+      or
+    is_literal_value $new
+  ) {
+    return 0;
+  }
   elsif ($old eq $new) {
     return 1;
   }
@@ -1000,7 +1026,7 @@ sub _eq_column_values {
 # value tracked between column changes and commitment to storage
 sub _track_storage_value {
   my ($self, $col) = @_;
-  return defined first { $col eq $_ } ($self->primary_columns);
+  return defined first { $col eq $_ } ($self->result_source->primary_columns);
 }
 
 =head2 set_columns
@@ -1029,7 +1055,7 @@ sub set_columns {
 
 =head2 set_inflated_columns
 
-  $result->set_inflated_columns({ $col => $val, $relname => $obj, ... });
+  $result->set_inflated_columns({ $col => $val, $rel_name => $obj, ... });
 
 =over
 
@@ -1062,10 +1088,13 @@ See also L<DBIx::Class::Relationship::Base/set_from_related>.
 
 sub set_inflated_columns {
   my ( $self, $upd ) = @_;
+  my $rsrc;
   foreach my $key (keys %$upd) {
     if (ref $upd->{$key}) {
-      my $info = $self->relationship_info($key);
+      $rsrc ||= $self->result_source;
+      my $info = $rsrc->relationship_info($key);
       my $acc_type = $info->{attrs}{accessor} || '';
+
       if ($acc_type eq 'single') {
         my $rel_obj = delete $upd->{$key};
         $self->set_from_related($key => $rel_obj);
@@ -1076,7 +1105,11 @@ sub set_inflated_columns {
           "Recursive update is not supported over relationships of type '$acc_type' ($key)"
         );
       }
-      elsif ($self->has_column($key) && exists $self->column_info($key)->{_inflate_info}) {
+      elsif (
+        $rsrc->has_column($key)
+          and
+        exists $rsrc->column_info($key)->{_inflate_info}
+      ) {
         $self->set_inflated_column($key, delete $upd->{$key});
       }
     }
@@ -1114,41 +1147,41 @@ is set by default on C<has_many> relationships and unset on all others.
 sub copy {
   my ($self, $changes) = @_;
   $changes ||= {};
-  my $col_data = { %{$self->{_column_data}} };
+  my $col_data = { $self->get_columns };
 
-  my $colinfo = $self->columns_info([ keys %$col_data ]);
+  my $rsrc = $self->result_source;
+
+  my $colinfo = $rsrc->columns_info;
   foreach my $col (keys %$col_data) {
     delete $col_data->{$col}
-      if $colinfo->{$col}{is_auto_increment};
+      if ( ! $colinfo->{$col} or $colinfo->{$col}{is_auto_increment} );
   }
 
   my $new = { _column_data => $col_data };
   bless $new, ref $self;
 
-  $new->result_source($self->result_source);
+  $new->result_source($rsrc);
   $new->set_inflated_columns($changes);
   $new->insert;
 
   # Its possible we'll have 2 relations to the same Source. We need to make
   # sure we don't try to insert the same row twice else we'll violate unique
   # constraints
-  my $relnames_copied = {};
+  my $rel_names_copied = {};
 
-  foreach my $relname ($self->result_source->relationships) {
-    my $rel_info = $self->result_source->relationship_info($relname);
+  foreach my $rel_name ($rsrc->relationships) {
+    my $rel_info = $rsrc->relationship_info($rel_name);
 
     next unless $rel_info->{attrs}{cascade_copy};
 
-    my $resolved = $self->result_source->_resolve_condition(
-      $rel_info->{cond}, $relname, $new, $relname
+    my $resolved = $rsrc->_resolve_condition(
+      $rel_info->{cond}, $rel_name, $new, $rel_name
     );
 
-    my $copied = $relnames_copied->{ $rel_info->{source} } ||= {};
-    foreach my $related ($self->search_related($relname)->all) {
-      my $id_str = join("\0", $related->id);
-      next if $copied->{$id_str};
-      $copied->{$id_str} = 1;
-      my $rel_copy = $related->copy($resolved);
+    my $copied = $rel_names_copied->{ $rel_info->{source} } ||= {};
+    foreach my $related ($self->search_related($rel_name)->all) {
+      $related->copy($resolved)
+        unless $copied->{$related->ID}++;
     }
 
   }
@@ -1178,8 +1211,8 @@ extend this method to catch all data setting methods.
 
 sub store_column {
   my ($self, $column, $value) = @_;
-  $self->throw_exception( "No such column '${column}'" )
-    unless exists $self->{_column_data}{$column} || $self->has_column($column);
+  $self->throw_exception( "No such column '${column}' on " . ref $self )
+    unless exists $self->{_column_data}{$column} || $self->result_source->has_column($column);
   $self->throw_exception( "set_column called for ${column} without value" )
     if @_ < 3;
   return $self->{_column_data}{$column} = $value;
@@ -1220,61 +1253,59 @@ sub inflate_result {
   ;
 
   if ($prefetch) {
-    for my $relname ( keys %$prefetch ) {
+    for my $rel_name ( keys %$prefetch ) {
 
-      my $relinfo = $rsrc->relationship_info($relname) or do {
+      my $relinfo = $rsrc->relationship_info($rel_name) or do {
         my $err = sprintf
           "Inflation into non-existent relationship '%s' of '%s' requested",
-          $relname,
+          $rel_name,
           $rsrc->source_name,
         ;
-        if (my ($colname) = sort { length($a) <=> length ($b) } keys %{$prefetch->{$relname}[0] || {}} ) {
+        if (my ($colname) = sort { length($a) <=> length ($b) } keys %{$prefetch->{$rel_name}[0] || {}} ) {
           $err .= sprintf ", check the inflation specification (columns/as) ending in '...%s.%s'",
-          $relname,
+          $rel_name,
           $colname,
         }
 
         $rsrc->throw_exception($err);
       };
 
-      $class->throw_exception("No accessor type declared for prefetched relationship '$relname'")
+      $class->throw_exception("No accessor type declared for prefetched relationship '$rel_name'")
         unless $relinfo->{attrs}{accessor};
 
+      my $rel_rs = $new->related_resultset($rel_name);
+
       my @rel_objects;
       if (
-        $prefetch->{$relname}
+        @{ $prefetch->{$rel_name} || [] }
           and
-        @{$prefetch->{$relname}}
-          and
-        ref($prefetch->{$relname}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class
+        ref($prefetch->{$rel_name}) ne $DBIx::Class::ResultSource::RowParser::Util::null_branch_class
       ) {
 
-        my $rel_rs = $new->related_resultset($relname);
-
-        if (ref $prefetch->{$relname}[0] eq 'ARRAY') {
+        if (ref $prefetch->{$rel_name}[0] eq 'ARRAY') {
           my $rel_rsrc = $rel_rs->result_source;
           my $rel_class = $rel_rs->result_class;
           my $rel_inflator = $rel_class->can('inflate_result');
           @rel_objects = map
             { $rel_class->$rel_inflator ( $rel_rsrc, @$_ ) }
-            @{$prefetch->{$relname}}
+            @{$prefetch->{$rel_name}}
           ;
         }
         else {
           @rel_objects = $rel_rs->result_class->inflate_result(
-            $rel_rs->result_source, @{$prefetch->{$relname}}
+            $rel_rs->result_source, @{$prefetch->{$rel_name}}
           );
         }
       }
 
       if ($relinfo->{attrs}{accessor} eq 'single') {
-        $new->{_relationship_data}{$relname} = $rel_objects[0];
+        $new->{_relationship_data}{$rel_name} = $rel_objects[0];
       }
       elsif ($relinfo->{attrs}{accessor} eq 'filter') {
-        $new->{_inflated_column}{$relname} = $rel_objects[0];
+        $new->{_inflated_column}{$rel_name} = $rel_objects[0];
       }
 
-      $new->related_resultset($relname)->set_cache(\@rel_objects);
+      $rel_rs->set_cache(\@rel_objects);
     }
   }
 
@@ -1294,7 +1325,7 @@ sub inflate_result {
 
 =back
 
-L</Update>s the object if it's already in the database, according to
+L</update>s the object if it's already in the database, according to
 L</in_storage>, else L</insert>s it.
 
 =head2 insert_or_update
@@ -1492,11 +1523,12 @@ $attrs, if supplied, is expected to be a hashref of attributes suitable for pass
 second argument to C<< $resultset->search($cond, $attrs) >>;
 
 Note: If you are using L<DBIx::Class::Storage::DBI::Replicated> as your
-storage, please kept in mind that if you L</discard_changes> on a row that you
-just updated or created, you should wrap the entire bit inside a transaction.
-Otherwise you run the risk that you insert or update to the master database
-but read from a replicant database that has not yet been updated from the
-master.  This will result in unexpected results.
+storage, a default of
+L<< C<< { force_pool => 'master' } >>
+|DBIx::Class::Storage::DBI::Replicated/SYNOPSIS >>  is automatically set for
+you. Prior to C<< DBIx::Class 0.08109 >> (before 2010) one would have been
+required to explicitly wrap the entire operation in a transaction to guarantee
+that up-to-date results are read from the master database.
 
 =cut
 
@@ -1556,13 +1588,16 @@ sub throw_exception {
 Returns the primary key(s) for a row. Can't be called as a class method.
 Actually implemented in L<DBIx::Class::PK>
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -61,7 +61,7 @@ sub _LimitOffset {
 
 =head2 LimitXY
 
- SELECT ... LIMIT $offset $limit
+ SELECT ... LIMIT $offset, $limit
 
 Supported by B<MySQL> and any L<SQL::Statement> based DBD
 
@@ -221,7 +221,7 @@ sub _FirstSkip {
 Depending on the resultset attributes one of:
 
  SELECT * FROM (
-  SELECT *, ROWNUM rownum__index FROM (
+  SELECT *, ROWNUM AS rownum__index FROM (
    SELECT ...
   ) WHERE ROWNUM <= ($limit+$offset)
  ) WHERE rownum__index >= ($offset+1)
@@ -229,7 +229,7 @@ Depending on the resultset attributes one of:
 or
 
  SELECT * FROM (
-  SELECT *, ROWNUM rownum__index FROM (
+  SELECT *, ROWNUM AS rownum__index FROM (
     SELECT ...
   )
  ) WHERE rownum__index BETWEEN ($offset+1) AND ($limit+$offset)
@@ -278,7 +278,7 @@ EOS
   if (
     $rs_attrs->{order_by}
       and
-    $rs_attrs->{_rsroot_rsrc}->storage->_order_by_is_stable(
+    $rs_attrs->{result_source}->storage->_order_by_is_stable(
       @{$rs_attrs}{qw/from order_by where/}
     )
   ) {
@@ -286,7 +286,7 @@ EOS
 
     return <<EOS;
 SELECT $sq_attrs->{selection_outer} FROM (
-  SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM (
+  SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM (
     SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having}
   ) $qalias WHERE ROWNUM <= ?
 ) $qalias WHERE $idx_name >= ?
@@ -297,7 +297,7 @@ EOS
 
     return <<EOS;
 SELECT $sq_attrs->{selection_outer} FROM (
-  SELECT $sq_attrs->{selection_outer}, ROWNUM $idx_name FROM (
+  SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM (
     SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having}
   ) $qalias
 ) $qalias WHERE $idx_name BETWEEN ? AND ?
@@ -331,7 +331,7 @@ sub _prep_for_skimming_limit {
     if ($sq_attrs->{order_by_requested}) {
       $self->throw_exception (
         'Unable to safely perform "skimming type" limit with supplied unstable order criteria'
-      ) unless ($rs_attrs->{_rsroot_rsrc}->schema->storage->_order_by_is_stable(
+      ) unless ($rs_attrs->{result_source}->schema->storage->_order_by_is_stable(
         $rs_attrs->{from},
         $requested_order,
         $rs_attrs->{where},
@@ -343,11 +343,11 @@ sub _prep_for_skimming_limit {
       $inner_order = [ map
         { "$rs_attrs->{alias}.$_" }
         ( @{
-          $rs_attrs->{_rsroot_rsrc}->_identifying_column_set
+          $rs_attrs->{result_source}->_identifying_column_set
             ||
           $self->throw_exception(sprintf(
             'Unable to auto-construct stable order criteria for "skimming type" limit '
-          . "dialect based on source '%s'", $rs_attrs->{_rsroot_rsrc}->name) );
+          . "dialect based on source '%s'", $rs_attrs->{result_source}->name) );
         } )
       ];
     }
@@ -532,29 +532,37 @@ Currently used by B<Sybase ASE>, due to lack of any other option.
 sub _GenericSubQ {
   my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
-  my $root_rsrc = $rs_attrs->{_rsroot_rsrc};
+  my $main_rsrc = $rs_attrs->{result_source};
 
   # Explicitly require an order_by
   # GenSubQ is slow enough as it is, just emulating things
   # like in other cases is not wise - make the user work
   # to shoot their DBA in the foot
-  my $supplied_order = delete $rs_attrs->{order_by} or $self->throw_exception (
+  $self->throw_exception (
     'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, '
-  . 'root-table-based order criteria.'
+  . 'main-table-based order criteria.'
+  ) unless $rs_attrs->{order_by};
+
+  my $usable_order_colinfo = $main_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion(
+    $rs_attrs
   );
 
-  my $usable_order_ci = $root_rsrc->storage->_main_source_order_by_portion_is_stable(
-    $root_rsrc,
-    $supplied_order,
-    $rs_attrs->{where},
-  ) or $self->throw_exception(
-    'Generic Subquery Limit can not work with order criteria based on sources other than the current one'
+  $self->throw_exception(
+    'Generic Subquery Limit can not work with order criteria based on sources other than the main one'
+  ) if (
+    ! keys %{$usable_order_colinfo||{}}
+      or
+    grep
+      { $_->{-source_alias} ne $rs_attrs->{alias} }
+      (values %$usable_order_colinfo)
   );
 
 ###
 ###
 ### we need to know the directions after we figured out the above - reextract *again*
 ### this is eyebleed - trying to get it to work at first
+  my $supplied_order = delete $rs_attrs->{order_by};
+
   my @order_bits = do {
     local $self->{quote_char};
     local $self->{order_bind};
@@ -562,20 +570,20 @@ sub _GenericSubQ {
   };
 
   # truncate to what we'll use
-  $#order_bits = ( (keys %$usable_order_ci) - 1 );
+  $#order_bits = ( (keys %$usable_order_colinfo) - 1 );
 
   # @order_bits likely will come back quoted (due to how the prefetch
   # rewriter operates
   # Hence supplement the column_info lookup table with quoted versions
   if ($self->quote_char) {
-    $usable_order_ci->{$self->_quote($_)} = $usable_order_ci->{$_}
-      for keys %$usable_order_ci;
+    $usable_order_colinfo->{$self->_quote($_)} = $usable_order_colinfo->{$_}
+      for keys %$usable_order_colinfo;
   }
 
 # calculate the condition
   my $count_tbl_alias = 'rownum__emulation';
-  my $root_alias = $rs_attrs->{alias};
-  my $root_tbl_name = $root_rsrc->name;
+  my $main_alias = $rs_attrs->{alias};
+  my $main_tbl_name = $main_rsrc->name;
 
   my (@unqualified_names, @qualified_names, @is_desc, @new_order_by);
 
@@ -584,17 +592,17 @@ sub _GenericSubQ {
     ($bit, my $is_desc) = $self->_split_order_chunk($bit);
 
     push @is_desc, $is_desc;
-    push @unqualified_names, $usable_order_ci->{$bit}{-colname};
-    push @qualified_names, $usable_order_ci->{$bit}{-fq_colname};
+    push @unqualified_names, $usable_order_colinfo->{$bit}{-colname};
+    push @qualified_names, $usable_order_colinfo->{$bit}{-fq_colname};
 
-    push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_ci->{$bit}{-fq_colname} };
+    push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_colinfo->{$bit}{-fq_colname} };
   };
 
   my (@where_cond, @skip_colpair_stack);
   for my $i (0 .. $#order_bits) {
-    my $ci = $usable_order_ci->{$order_bits[$i]};
+    my $ci = $usable_order_colinfo->{$order_bits[$i]};
 
-    my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $root_alias);
+    my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $main_alias);
     my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } };
 
     push @skip_colpair_stack, [
@@ -683,7 +691,7 @@ WHERE ( SELECT COUNT(*) FROM %s %s $counted_where ) $rownum_cond
 $inner_order_sql
   ", map { $self->_quote ($_) } (
     $rs_attrs->{alias},
-    $root_tbl_name,
+    $main_tbl_name,
     $count_tbl_alias,
   ));
 }
@@ -693,7 +701,7 @@ $inner_order_sql
 #
 # Generates inner/outer select lists for various limit dialects
 # which result in one or more subqueries (e.g. RNO, Top, RowNum)
-# Any non-root-table columns need to have their table qualifier
+# Any non-main-table columns need to have their table qualifier
 # turned into a column alias (otherwise names in subqueries clash
 # and/or lose their source table)
 #
@@ -725,23 +733,22 @@ sub _subqueried_limit_attrs {
 
   my ($re_sep, $re_alias) = map { quotemeta $_ } ( $self->{name_sep}, $rs_attrs->{alias} );
 
-  # insulate from the multiple _recurse_fields calls below
-  local $self->{select_bind};
-
   # correlate select and as, build selection index
   my (@sel, $in_sel_index);
   for my $i (0 .. $#{$rs_attrs->{select}}) {
 
     my $s = $rs_attrs->{select}[$i];
-    my $sql_sel = $self->_recurse_fields ($s);
     my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef;
 
+    # we throw away the @bind here deliberately
+    my ($sql_sel) = $self->_recurse_fields ($s);
+
     push @sel, {
       arg => $s,
       sql => $sql_sel,
       unquoted_sql => do {
         local $self->{quote_char};
-        $self->_recurse_fields ($s);
+        ($self->_recurse_fields ($s))[0]; # ignore binds again
       },
       as =>
         $sql_alias
@@ -822,14 +829,17 @@ sub _unqualify_colname {
   return $fqcn;
 }
 
-1;
-
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -80,6 +80,19 @@ sub _recurse_oracle_joins {
         && $jt !~ /inner/i;
     }
 
+    # FIXME - the code below *UTTERLY* doesn't work with custom conds... sigh
+    # for the time being do not do any processing with the likes of _collapse_cond
+    # instead only unroll the -and hack if present
+    $on = $on->{-and}[0] if (
+      ref $on eq 'HASH'
+        and
+      keys %$on == 1
+        and
+      ref $on->{-and} eq 'ARRAY'
+        and
+      @{$on->{-and}} == 1
+    );
+
     # sadly SQLA treats where($scalar) as literal, so we need to jump some hoops
     push @where, map { \sprintf ('%s%s = %s%s',
       ref $_ ? $self->_recurse_where($_) : $self->_quote($_),
@@ -94,7 +107,7 @@ sub _recurse_oracle_joins {
 
 1;
 
-=pod
+__END__
 
 =head1 NAME
 
@@ -152,17 +165,13 @@ Does not support full outer joins (however neither really does DBIC itself)
 
 =back
 
-=head1 AUTHOR
-
-Justin Wheeler C<< <jwheeler@datademons.com> >>
-
-=head1 CONTRIBUTORS
-
-David Jack Olrik C<< <djo@cpan.org> >>
-
-=head1 LICENSE
+=head1 FURTHER QUESTIONS?
 
-This module is licensed under the same terms as Perl itself.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=cut
+=head1 COPYRIGHT AND LICENSE
 
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -0,0 +1,87 @@
+=for comment POD_DERIVED_INDEX_GENERATED
+The following documentation is automatically generated.  Please do not edit
+this file, but rather the original, inline with DBIx::Class::SQLMaker::OracleJoins
+at lib/DBIx/Class/SQLMaker/OracleJoins.pm
+(on the system that originally ran this).
+If you do edit this file, and don't want your changes to be removed, make
+sure you change the first line.
+
+=cut
+
+=head1 NAME
+
+DBIx::Class::SQLMaker::OracleJoins - Pre-ANSI Joins-via-Where-Clause Syntax
+
+=head1 PURPOSE
+
+This module is used with Oracle < 9.0 due to lack of support for standard
+ANSI join syntax.
+
+=head1 SYNOPSIS
+
+Not intended for use directly; used as the sql_maker_class for schemas and components.
+
+=head1 DESCRIPTION
+
+Implements pre-ANSI joins specified in the where clause.  Instead of:
+
+    SELECT x FROM y JOIN z ON y.id = z.id
+
+It will write:
+
+    SELECT x FROM y, z WHERE y.id = z.id
+
+It should properly support left joins, and right joins.  Full outer joins are
+not possible due to the fact that Oracle requires the entire query be written
+to union the results of a left and right join, and by the time this module is
+called to create the where query and table definition part of the sql query,
+it's already too late.
+
+=head1 METHODS
+
+=over 4
+
+=item select
+
+Overrides DBIx::Class::SQLMaker's select() method, which calls _oracle_joins()
+to modify the column and table list before calling next::method().
+
+=back
+
+=head1 BUGS
+
+Does not support full outer joins (however neither really does DBIC itself)
+
+=head1 SEE ALSO
+
+=over 4
+
+=item L<DBIx::Class::Storage::DBI::Oracle::WhereJoins> - Storage class using this
+
+=item L<DBIx::Class::SQLMaker> - Parent module
+
+=item L<DBIx::Class> - Duh
+
+=back
+
+=head1 INHERITED METHODS
+
+=over 4
+
+=item L<SQL::Abstract>
+
+L<is_literal_value|SQL::Abstract/is_literal_value>, L<is_plain_value|SQL::Abstract/is_plain_value>
+
+=back
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
@@ -44,8 +44,16 @@ use namespace::clean;
 
 __PACKAGE__->mk_group_accessors (simple => qw/quote_char name_sep limit_dialect/);
 
+sub _quoting_enabled {
+  ( defined $_[0]->{quote_char} and length $_[0]->{quote_char} ) ? 1 : 0
+}
+
 # for when I need a normalized l/r pair
 sub _quote_chars {
+
+  # in case we are called in the old !!$sm->_quote_chars fashion
+  return () if !wantarray and ( ! defined $_[0]->{quote_char} or ! length $_[0]->{quote_char} );
+
   map
     { defined $_ ? $_ : '' }
     ( ref $_[0]->{quote_char} ? (@{$_[0]->{quote_char}}) : ( ($_[0]->{quote_char}) x 2 ) )
@@ -110,7 +118,7 @@ sub select {
   my ($self, $table, $fields, $where, $rs_attrs, $limit, $offset) = @_;
 
 
-  $fields = $self->_recurse_fields($fields);
+  ($fields, @{$self->{select_bind}}) = $self->_recurse_fields($fields);
 
   if (defined $offset) {
     $self->throw_exception('A supplied offset must be a non-negative integer')
@@ -231,42 +239,47 @@ sub _recurse_fields {
   return $$fields if $ref eq 'SCALAR';
 
   if ($ref eq 'ARRAY') {
-    return join(', ', map { $self->_recurse_fields($_) } @$fields);
+    my (@select, @bind);
+    for my $field (@$fields) {
+      my ($select, @new_bind) = $self->_recurse_fields($field);
+      push @select, $select;
+      push @bind, @new_bind;
+    }
+    return (join(', ', @select), @bind);
   }
   elsif ($ref eq 'HASH') {
     my %hash = %$fields;  # shallow copy
 
     my $as = delete $hash{-as};   # if supplied
 
-    my ($func, $args, @toomany) = %hash;
+    my ($func, $rhs, @toomany) = %hash;
 
     # there should be only one pair
     if (@toomany) {
       $self->throw_exception( "Malformed select argument - too many keys in hash: " . join (',', keys %$fields ) );
     }
 
-    if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) {
+    if (lc ($func) eq 'distinct' && ref $rhs eq 'ARRAY' && @$rhs > 1) {
       $self->throw_exception (
         'The select => { distinct => ... } syntax is not supported for multiple columns.'
-       .' Instead please use { group_by => [ qw/' . (join ' ', @$args) . '/ ] }'
-       .' or { select => [ qw/' . (join ' ', @$args) . '/ ], distinct => 1 }'
+       .' Instead please use { group_by => [ qw/' . (join ' ', @$rhs) . '/ ] }'
+       .' or { select => [ qw/' . (join ' ', @$rhs) . '/ ], distinct => 1 }'
       );
     }
 
+    my ($rhs_sql, @rhs_bind) = $self->_recurse_fields($rhs);
     my $select = sprintf ('%s( %s )%s',
       $self->_sqlcase($func),
-      $self->_recurse_fields($args),
+      $rhs_sql,
       $as
         ? sprintf (' %s %s', $self->_sqlcase('as'), $self->_quote ($as) )
         : ''
     );
 
-    return $select;
+    return ($select, @rhs_bind);
   }
-  # Is the second check absolutely necessary?
   elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) {
-    push @{$self->{select_bind}}, @{$$fields}[1..$#$$fields];
-    return $$fields->[0];
+    return @{$$fields};
   }
   else {
     $self->throw_exception( $ref . qq{ unexpected in _recurse_fields()} );
@@ -288,11 +301,9 @@ sub _parse_rs_attrs {
   my $sql = '';
 
   if ($arg->{group_by}) {
-    # horrible horrible, waiting for refactor
-    local $self->{select_bind};
-    if (my $g = $self->_recurse_fields($arg->{group_by}) ) {
-      $sql .= $self->_sqlcase(' group by ') . $g;
-      push @{$self->{group_bind} ||= []}, @{$self->{select_bind}||[]};
+    if ( my ($group_sql, @group_bind) = $self->_recurse_fields($arg->{group_by}) ) {
+      $sql .= $self->_sqlcase(' group by ') . $group_sql;
+      push @{$self->{group_bind}}, @group_bind;
     }
   }
 
@@ -441,8 +452,6 @@ sub _join_condition {
 
   # Backcompat for the old days when a plain hashref
   # { 't1.col1' => 't2.col2' } meant ON t1.col1 = t2.col2
-  # Once things settle we should start warning here so that
-  # folks unroll their hacks
   if (
     ref $cond eq 'HASH'
       and
@@ -452,6 +461,12 @@ sub _join_condition {
       and
     ! ref ( (values %$cond)[0] )
   ) {
+    carp_unique(
+      "ResultSet {from} structures with conditions not conforming to the "
+    . "SQL::Abstract syntax are deprecated: you either need to stop abusing "
+    . "{from} altogether, or express the condition properly using the "
+    . "{ -ident => ... } operator"
+    );
     $cond = { keys %$cond => { -ident => values %$cond } }
   }
   elsif ( ref $cond eq 'ARRAY' ) {
@@ -518,14 +533,17 @@ sub _where_op_multicolumn_in {
   \[ join( ' IN ', shift @$$lhs, shift @$$rhs ), @$$lhs, @$$rhs ];
 }
 
-1;
-
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -0,0 +1,57 @@
+=for comment POD_DERIVED_INDEX_GENERATED
+The following documentation is automatically generated.  Please do not edit
+this file, but rather the original, inline with DBIx::Class::SQLMaker
+at lib/DBIx/Class/SQLMaker.pm
+(on the system that originally ran this).
+If you do edit this file, and don't want your changes to be removed, make
+sure you change the first line.
+
+=cut
+
+=head1 NAME
+
+DBIx::Class::SQLMaker - An SQL::Abstract-based SQL maker class
+
+=head1 DESCRIPTION
+
+This module is a subclass of L<SQL::Abstract> and includes a number of
+DBIC-specific workarounds, not yet suitable for inclusion into the
+L<SQL::Abstract> core. It also provides all (and more than) the functionality
+of L<SQL::Abstract::Limit>, see L<DBIx::Class::SQLMaker::LimitDialects> for
+more info.
+
+Currently the enhancements to L<SQL::Abstract> are:
+
+=over 4
+
+=item * Support for C<JOIN> statements (via extended C<table/from> support)
+
+=item * Support of functions in C<SELECT> lists
+
+=item * C<GROUP BY>/C<HAVING> support (via extensions to the order_by parameter)
+
+=item * Support of C<...FOR UPDATE> type of select statement modifiers
+
+=back
+
+=head1 INHERITED METHODS
+
+=over 4
+
+=item L<SQL::Abstract>
+
+L<is_literal_value|SQL::Abstract/is_literal_value>, L<is_plain_value|SQL::Abstract/is_plain_value>
+
+=back
+
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
@@ -204,6 +204,7 @@ use base 'DBIx::Class::Schema';
 use DBIx::Class::Carp;
 use Time::HiRes qw/gettimeofday/;
 use Try::Tiny;
+use Scalar::Util 'weaken';
 use namespace::clean;
 
 __PACKAGE__->mk_classdata('_filedata');
@@ -238,7 +239,7 @@ Call this to initialise a previously unversioned database. The table 'dbix_class
 
 Takes one argument which should be the version that the database is currently at. Defaults to the return value of L</schema_version>.
 
-See L</getting_started> for more details.
+See L</GETTING STARTED> for more details.
 
 =cut
 
@@ -589,9 +590,10 @@ sub _on_connect
 {
   my ($self) = @_;
 
-  my $conn_info = $self->storage->connect_info;
-  $self->{vschema} = DBIx::Class::Version->connect(@$conn_info);
-  my $conn_attrs = $self->{vschema}->storage->_dbic_connect_attributes || {};
+  weaken (my $w_self = $self );
+
+  $self->{vschema} = DBIx::Class::Version->connect(sub { $w_self->storage->dbh });
+  my $conn_attrs = $self->storage->_dbic_connect_attributes || {};
 
   my $vtable = $self->{vschema}->resultset('Table');
 
@@ -600,10 +602,10 @@ sub _on_connect
 
   # check for legacy versions table and move to new if exists
   unless ($self->_source_exists($vtable)) {
-    my $vtable_compat = DBIx::Class::VersionCompat->connect(@$conn_info)->resultset('TableCompat');
+    my $vtable_compat = DBIx::Class::VersionCompat->connect(sub { $w_self->storage->dbh })->resultset('TableCompat');
     if ($self->_source_exists($vtable_compat)) {
       $self->{vschema}->deploy;
-      map { $vtable->create({ installed => $_->Installed, version => $_->Version }) } $vtable_compat->all;
+      map { $vtable->new_result({ installed => $_->Installed, version => $_->Version })->insert } $vtable_compat->all;
       $self->storage->_get_dbh->do("DROP TABLE " . $vtable_compat->result_source->from);
     }
   }
@@ -710,7 +712,7 @@ sub _set_db_version {
   # formatted by this new function will sort _after_ any existing 200... strings.
   my @tm = gettimeofday();
   my @dt = gmtime ($tm[0]);
-  my $o = $vtable->create({
+  my $o = $vtable->new_result({
     version => $version,
     installed => sprintf("v%04d%02d%02d_%02d%02d%02d.%03.0f",
       $dt[5] + 1900,
@@ -721,7 +723,7 @@ sub _set_db_version {
       $dt[0],
       int($tm[1] / 1000), # convert to millisecs
     ),
-  });
+  })->insert;
 }
 
 sub _read_sql_file {
@@ -754,13 +756,17 @@ sub _source_exists
     };
 }
 
-1;
+=head1 FURTHER QUESTIONS?
 
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 COPYRIGHT AND LICENSE
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-=head1 LICENSE
+=cut
 
-You may distribute this code under the same terms as Perl itself.
+1;
@@ -8,8 +8,7 @@ use base 'DBIx::Class';
 use DBIx::Class::Carp;
 use Try::Tiny;
 use Scalar::Util qw/weaken blessed/;
-use DBIx::Class::_Util 'refcount';
-use Sub::Name 'subname';
+use DBIx::Class::_Util qw(refcount quote_sub);
 use Devel::GlobalDestruction;
 use namespace::clean;
 
@@ -109,11 +108,12 @@ are no matching Result classes like this:
 
   load_namespaces found ResultSet class $classname with no corresponding Result class
 
-If a Result class is found to already have a ResultSet class set using
-L</resultset_class> to some other class, you will be warned like this:
+If a ResultSource instance is found to already have a ResultSet class set
+using L<resultset_class|DBIx::Class::ResultSource/resultset_class> to some
+other class, you will be warned like this:
 
-  We found ResultSet class '$rs_class' for '$result', but it seems
-  that you had already set '$result' to use '$rs_set' instead
+  We found ResultSet class '$rs_class' for '$result_class', but it seems
+  that you had already set '$result_class' to use '$rs_set' instead
 
 =head3 Examples
 
@@ -897,7 +897,6 @@ sub compose_namespace {
     local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO;
     use warnings qw/redefine/;
 
-    no strict qw/refs/;
     foreach my $source_name ($self->sources) {
       my $orig_source = $self->source($source_name);
 
@@ -919,11 +918,8 @@ sub compose_namespace {
       }
     }
 
-    foreach my $meth (qw/class source resultset/) {
-      no warnings 'redefine';
-      *{"${target}::${meth}"} = subname "${target}::${meth}" =>
-        sub { shift->schema->$meth(@_) };
-    }
+    quote_sub "${target}::${_}" => "shift->schema->$_(\@_)"
+      for qw(class source resultset);
   }
 
   Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
@@ -1122,8 +1118,8 @@ sub deploy {
 
 A convenient shortcut to
 C<< $self->storage->deployment_statements($self, @args) >>.
-Returns the SQL statements used by L</deploy> and
-L<DBIx::Class::Schema::Storage/deploy>.
+Returns the statements used by L</deploy> and
+L<DBIx::Class::Storage/deploy>.
 
 =cut
 
@@ -1223,8 +1219,8 @@ sub thaw {
 
 =head2 freeze
 
-This doesn't actually do anything more than call L<Storable/nfreeze>, it is just
-provided here for symmetry.
+This doesn't actually do anything beyond calling L<nfreeze|Storable/SYNOPSIS>,
+it is just provided here for symmetry.
 
 =cut
 
@@ -1497,11 +1493,7 @@ sub compose_connection {
   }
 
   my $schema = $self->compose_namespace($target, $base);
-  {
-    no strict 'refs';
-    my $name = join '::', $target, 'schema';
-    *$name = subname $name, sub { $schema };
-  }
+  quote_sub "${target}::schema", '$s', { '$s' => \$schema };
 
   $schema->connection(@info);
   foreach my $source_name ($schema->sources) {
@@ -1515,14 +1507,17 @@ sub compose_connection {
   return $schema;
 }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -73,12 +73,13 @@ method.
 
 The deserializing hook called on the object during deserialization.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -3,6 +3,10 @@ package DBIx::Class::StartupCheck;
 use strict;
 use warnings;
 
+1;
+
+__END__
+
 =head1 NAME
 
 DBIx::Class::StartupCheck - Run environment checks on startup
@@ -30,22 +34,13 @@ warning message on startup sent to STDERR, explaining what to do about
 it and how to suppress the message. If you don't see any messages, you
 have nothing to worry about.
 
-=head1 CONTRIBUTORS
-
-Nigel Metheringham
-
-Brandon Black
+=head1 FURTHER QUESTIONS?
 
-Matt S. Trout
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 AUTHOR
+=head1 COPYRIGHT AND LICENSE
 
-Jon Schutz
-
-=head1 LICENSE
-
-You may distribute this code under the same terms as Perl itself.
-
-=cut
-
-1;
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -1,22 +1,36 @@
 package # hide from pause until we figure it all out
   DBIx::Class::Storage::BlockRunner;
 
+use warnings;
 use strict;
 
 use DBIx::Class::Exception;
 use DBIx::Class::Carp;
 use Context::Preserve 'preserve_context';
-use DBIx::Class::_Util 'is_exception';
+use DBIx::Class::_Util qw(is_exception qsub);
 use Scalar::Util qw(weaken blessed reftype);
 use Try::Tiny;
 
 # DO NOT edit away without talking to riba first, he will just put it back
+# BEGIN pre-Moo2 import block
 BEGIN {
+  my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all};
+
   local $ENV{PERL_STRICTURES_EXTRA} = 0;
-  require Moo; Moo->import;
-  require Sub::Quote; Sub::Quote->import('quote_sub');
+  # load all of these now, so that lazy-loading does not escape
+  # the current PERL_STRICTURES_EXTRA setting
+  require Sub::Quote;
+  require Sub::Defer;
+  require Moo;
+  require Moo::Object;
+  require Method::Generate::Accessor;
+  require Method::Generate::Constructor;
+
+  Moo->import;
+  ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} );
 }
-use warnings NONFATAL => 'all';
+# END pre-Moo2 import block
+
 use namespace::clean;
 
 =head1 NAME
@@ -43,16 +57,16 @@ has wrap_txn => (
 has retry_handler => (
   is => 'ro',
   required => 1,
-  isa => quote_sub( q{
+  isa => qsub q{
     (Scalar::Util::reftype($_[0])||'') eq 'CODE'
       or DBIx::Class::Exception->throw('retry_handler must be a CODE reference')
-  }),
+  },
 );
 
 has retry_debug => (
   is => 'rw',
   # use a sub - to be evaluated on the spot lazily
-  default => quote_sub( '$ENV{DBIC_STORAGE_RETRY_DEBUG}' ),
+  default => qsub '$ENV{DBIC_STORAGE_RETRY_DEBUG}',
   lazy => 1,
 );
 
@@ -67,19 +81,19 @@ has failed_attempt_count => (
   writer => '_set_failed_attempt_count',
   default => 0,
   lazy => 1,
-  trigger => quote_sub(q{
+  trigger => qsub q{
     $_[0]->throw_exception( sprintf (
       'Reached max_attempts amount of %d, latest exception: %s',
       $_[0]->max_attempts, $_[0]->last_exception
     )) if $_[0]->max_attempts <= ($_[1]||0);
-  }),
+  },
 );
 
 has exception_stack => (
   is => 'ro',
   init_arg => undef,
   clearer => '_reset_exception_stack',
-  default => quote_sub(q{ [] }),
+  default => qsub q{ [] },
   lazy => 1,
 );
 
@@ -144,7 +158,7 @@ sub _run {
     my $storage = $self->storage;
     my $cur_depth = $storage->transaction_depth;
 
-    if (defined $txn_init_depth and $run_err eq '') {
+    if (defined $txn_init_depth and ! is_exception $run_err) {
       my $delta_txn = (1 + $txn_init_depth) - $cur_depth;
 
       if ($delta_txn) {
@@ -219,13 +233,16 @@ sub _run {
   };
 }
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -122,15 +122,19 @@ sub _exec_svp_rollback {
   $self->_exec_txn_rollback;
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -64,16 +64,19 @@ sub all {
   return @rows;
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
+1;
+
 # vim:sts=2 sw=2:
@@ -141,15 +141,19 @@ sub format_datetime {
   return $datetime_parser->format_datetime(shift);
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -87,16 +87,19 @@ sub all {
   return @rows;
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
+1;
+
 # vim:sts=2 sw=2:
@@ -60,7 +60,7 @@ size of the bind sizes in the first prepare call:
 
 L<https://rt.cpan.org/Ticket/Display.html?id=52048>
 
-The C<ado_size> workaround is used (see L<DBD::ADO/"ADO Providers">) with the
+The C<ado_size> workaround is used (see L<DBD::ADO/ADO providers>) with the
 approximate maximum size of the data_type of the bound column, or 8000 (maximum
 VARCHAR size) if the data_type is not available.
 
@@ -189,9 +189,9 @@ sub _dbi_attrs_for_bind {
   return $attrs;
 }
 
-# Can't edit all the binds in _dbi_attrs_for_bind for insert_bulk, so we take
+# Can't edit all the binds in _dbi_attrs_for_bind for _insert_bulk, so we take
 # care of those GUIDs here.
-sub insert_bulk {
+sub _insert_bulk {
   my $self = shift;
   my ($source, $cols, $data) = @_;
 
@@ -436,15 +436,19 @@ sub format_datetime {
   return $datetime_parser->format_datetime(shift);
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -75,15 +75,19 @@ sub _init {
 #  $sth;
 #}
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -23,7 +23,8 @@ statements with values bound to columns or conditions that are not strings will
 throw implicit type conversion errors.
 
 As long as a column L<data_type|DBIx::Class::ResultSource/add_columns> is
-defined and resolves to a base RDBMS native type via L</_native_data_type> as
+defined and resolves to a base RDBMS native type via
+L<_native_data_type|DBIx::Class::Storage::DBI/_native_data_type> as
 defined in your Storage driver, the placeholder for this column will be
 converted to:
 
@@ -77,13 +78,16 @@ sub connect_call_set_auto_cast {
   $self->auto_cast(1);
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -3,10 +3,11 @@ package DBIx::Class::Storage::DBI::Cursor;
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Cursor/;
+use base 'DBIx::Class::Cursor';
 
 use Try::Tiny;
-use Scalar::Util qw/refaddr weaken/;
+use Scalar::Util qw(refaddr weaken);
+use List::Util 'shuffle';
 use namespace::clean;
 
 __PACKAGE__->mk_group_accessors('simple' =>
@@ -177,7 +178,14 @@ sub all {
 
   (undef, $sth) = $self->storage->_select( @{$self->{args}} );
 
-  return @{$sth->fetchall_arrayref};
+  return (
+    DBIx::Class::_ENV_::SHUFFLE_UNORDERED_RESULTSETS
+      and
+    ! $self->{attrs}{order_by}
+  )
+    ? shuffle @{$sth->fetchall_arrayref}
+    : @{$sth->fetchall_arrayref}
+  ;
 }
 
 sub sth {
@@ -245,4 +253,17 @@ sub __finish_sth {
   );
 }
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
+=cut
+
 1;
@@ -60,8 +60,6 @@ sub _dbh_last_insert_id {
   return @res ? $res[0] : undef;
 }
 
-1;
-
 =head1 NAME
 
 DBIx::Class::Storage::DBI::DB2 - IBM DB2 support for DBIx::Class
@@ -73,13 +71,19 @@ RowNumberOver over FetchFirst depending on the availability of support for
 RowNumberOver, queries the server name_sep from L<DBI> and sets the L<DateTime>
 parser to L<DateTime::Format::DB2>.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -164,8 +164,6 @@ sub format_date {
   return $date_parser->format_datetime(shift);
 }
 
-1;
-
 =head1 CAVEATS
 
 =over 4
@@ -178,13 +176,19 @@ work with earlier versions.
 
 =back
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -8,9 +8,10 @@ use warnings;
 # in ::Storage::DBI::InterBase as opposed to inheriting
 # directly from ::Storage::DBI::Firebird::Common
 use base qw/DBIx::Class::Storage::DBI::InterBase/;
-
 use mro 'c3';
 
+1;
+
 =head1 NAME
 
 DBIx::Class::Storage::DBI::Firebird - Driver for the Firebird RDBMS via
@@ -21,17 +22,13 @@ L<DBD::Firebird>
 This is an empty subclass of L<DBIx::Class::Storage::DBI::InterBase> for use
 with L<DBD::Firebird>, see that driver for details.
 
-=cut
-
-1;
-
-=head1 AUTHOR
-
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
-# vim:sts=2 sw=2:
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -51,13 +51,16 @@ EOS
 
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -169,14 +169,18 @@ sub format_date {
   return $date_parser->format_datetime(shift);
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
@@ -131,8 +131,6 @@ sub connect_call_datetime_setup {
   $self->_get_dbh->{ib_time_all} = 'ISO';
 }
 
-1;
-
 =head1 CAVEATS
 
 =over 4
@@ -149,13 +147,19 @@ Alternately, use the L<ODBC|DBIx::Class::Storage::DBI::ODBC::Firebird> driver.
 
 =back
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -327,12 +327,13 @@ for this flag - you are urged to do so. If DBIC internals insist that an
 ordered subselect is necessary for an operation, and you believe there is a
 different/better way to get the same result - please file a bugreport.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -119,13 +119,16 @@ sub _prep_interpolated_value {
   return $_[2];
 }
 
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -143,15 +143,19 @@ sub format_datetime {
   return $datetime_parser->format_datetime(shift);
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -20,13 +20,14 @@ over ODBC
 
 This is an empty subclass of L<DBIx::Class::Storage::DBI::DB2>.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-=cut
-# vim:sts=2 sw=2:
@@ -59,13 +59,20 @@ sub _exec_svp_rollback {
   };
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-You may distribute this code under the same terms as Perl itself.
 
 =cut
+
 # vim:sts=2 sw=2:
+
+1;
@@ -308,15 +308,19 @@ sub connect_call_use_server_cursors {
   $self->_get_dbh->{odbc_SQL_ROWSET_SIZE} = $sql_rowset_size;
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sw=2 sts=2 et
@@ -32,12 +32,14 @@ fail with:
 B<WORKAROUND:> use the C<uniqueidentifier> type instead, it is more efficient
 anyway.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-=cut
@@ -50,8 +50,6 @@ sub _disable_odbc_array_ops {
   }
 }
 
-1;
-
 =head1 NAME
 
 DBIx::Class::Storage::DBI::ODBC - Base class for ODBC drivers
@@ -61,13 +59,19 @@ DBIx::Class::Storage::DBI::ODBC - Base class for ODBC drivers
 This class simply provides a mechanism for discovering and loading a sub-class
 for a specific ODBC backend.  It should be transparent to the user.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -103,9 +103,6 @@ sub deployment_statements {
   my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
 
   $sqltargs ||= {};
-  my $quote_char = $self->schema->storage->sql_maker->quote_char;
-  $sqltargs->{quote_table_names} = $quote_char ? 1 : 0;
-  $sqltargs->{quote_field_names} = $quote_char ? 1 : 0;
 
   if (
     ! exists $sqltargs->{producer_args}{oracle_version}
@@ -638,7 +635,7 @@ Unfortunately, Oracle doesn't support identifiers over 30 chars in length, so
 the L<DBIx::Class::Relationship> name is shortened and appended with half of an
 MD5 hash.
 
-See L<DBIx::Class::Storage/"relname_to_table_alias">.
+See L<DBIx::Class::Storage::DBI/relname_to_table_alias>.
 
 =cut
 
@@ -766,13 +763,16 @@ It uses the same syntax as L<DBIx::Class::ResultSet/order_by>
   # ORDER SIBLINGS BY
   #     firstname ASC
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -68,16 +68,13 @@ Probably lots more.
 
 =back
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-Justin Wheeler C<< <jwheeler@datademons.com> >>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 CONTRIBUTORS
+=head1 COPYRIGHT AND LICENSE
 
-David Jack Olrik C<< <djo@cpan.org> >>
-
-=head1 LICENSE
-
-This module is licensed under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -40,12 +40,13 @@ no matter the database version, add
 
 to your Schema class.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -266,12 +266,13 @@ option to connect(), for example:
                     },
                   );
 
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -39,13 +39,16 @@ sub next_storage {
   return  (shift->pool->active_replicants)[0];
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <john.napiorkowski@takkle.com>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -78,13 +78,16 @@ sub _random_number {
   rand($_[1])
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <john.napiorkowski@takkle.com>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -97,7 +97,7 @@ This class defines the following methods.
 
 =head2 _build_current_replicant
 
-Lazy builder for the L</current_replicant_storage> attribute.
+Lazy builder for the L</current_replicant> attribute.
 
 =cut
 
@@ -243,13 +243,16 @@ sub _get_forced_pool {
   }
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <jjnapiork@cpan.org>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -1,24 +1,22 @@
-package DBIx::Class::Storage::DBI::Replicated::Introduction;
-
 =head1 NAME
 
 DBIx::Class::Storage::DBI::Replicated::Introduction - Minimum Need to Know
 
 =head1 SYNOPSIS
 
-This is an introductory document for L<DBIx::Class::Storage::Replication>.
+This is an introductory document for L<DBIx::Class::Storage::DBI::Replicated>.
 
 This document is not an overview of what replication is or why you should be
-using it.  It is not a document explaining how to setup MySQL native replication
-either.  Copious external resources are available for both.  This document
+using it. It is not a document explaining how to setup MySQL native replication
+either. Copious external resources are available for both. This document
 presumes you have the basics down.
 
 =head1 DESCRIPTION
 
-L<DBIx::Class> supports a framework for using database replication.  This system
+L<DBIx::Class> supports a framework for using database replication. This system
 is integrated completely, which means once it's setup you should be able to
 automatically just start using a replication cluster without additional work or
-changes to your code.  Some caveats apply, primarily related to the proper use
+changes to your code. Some caveats apply, primarily related to the proper use
 of transactions (you are wrapping all your database modifying statements inside
 a transaction, right ;) ) however in our experience properly written DBIC will
 work transparently with Replicated storage.
@@ -137,7 +135,7 @@ will result in increased database loads, so choose a number with care.  Our
 experience is that setting the number around 5 seconds results in a good
 performance / integrity balance.
 
-'master_read_weight' is an option associated with the ::Random balancer.  It
+'master_read_weight' is an option associated with the ::Random balancer. It
 allows you to let the master be read from.  I usually leave this off (default
 is off).
 
@@ -171,14 +169,14 @@ will find L<MySQL::Sandbox> an easy way to set up a replication cluster.
 
 And now your $schema object is properly configured!  Enjoy!
 
-=head1 AUTHOR
-
-John Napiorkowski <jjnapiork@cpan.org>
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
-1;
@@ -410,13 +410,16 @@ sub validate_replicants {
   $self->_last_validated(time);
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <john.napiorkowski@takkle.com>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -38,7 +38,9 @@ when it gets too far behind the master, if it stops replicating, etc.
 
 This attribute DOES NOT reflect a replicant's internal status, i.e. if it is
 properly replicating from a master and has not fallen too many seconds behind a
-reliability threshold.  For that, use L</is_replicating>  and L</lag_behind_master>.
+reliability threshold. For that, use
+L<DBIx::Class::Storage::DBI::Replicated/is_replicating> and
+L<DBIx::Class::Storage::DBI::Replicated/lag_behind_master>.
 Since the implementation of those functions database specific (and not all DBIC
 supported DBs support replication) you should refer your database-specific
 storage driver for more information.
@@ -85,13 +87,16 @@ sub debugobj {
 L<http://en.wikipedia.org/wiki/Replicant>,
 L<DBIx::Class::Storage::DBI::Replicated>
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <john.napiorkowski@takkle.com>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -35,12 +35,4 @@ subtype Weight,
   where { $_ >= 0 },
   message { 'weight must be a decimal greater than 0' };
 
-# AUTHOR
-#
-#  John Napiorkowski <john.napiorkowski@takkle.com>
-#
-# LICENSE
-#
-#  You may distribute this code under the same terms as Perl itself.
-
 1;
@@ -57,13 +57,16 @@ around '_query_start' => sub {
 
 L<DBIx::Class::Storage::DBI>
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-John Napiorkowski <john.napiorkowski@takkle.com>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -20,8 +20,6 @@ use Try::Tiny;
 
 use namespace::clean -except => 'meta';
 
-=encoding utf8
-
 =head1 NAME
 
 DBIx::Class::Storage::DBI::Replicated - BETA Replicated database support
@@ -37,7 +35,7 @@ also define your arguments, such as which balancer you want and any arguments
 that the Pool object should get.
 
   my $schema = Schema::Class->clone;
-  $schema->storage_type( ['::DBI::Replicated', {balancer=>'::Random'}] );
+  $schema->storage_type(['::DBI::Replicated', { balancer_type => '::Random' }]);
   $schema->connection(...);
 
 Next, you need to add in the Replicants.  Basically this is an array of
@@ -265,7 +263,6 @@ my $method_dispatch = {
     build_datetime_parser
     last_insert_id
     insert
-    insert_bulk
     update
     delete
     dbh
@@ -315,6 +312,8 @@ my $method_dispatch = {
     _native_data_type
     _get_dbh
     sql_maker_class
+    insert_bulk
+    _insert_bulk
     _execute
     _do_query
     _dbh_execute
@@ -404,7 +403,7 @@ for my $method (@{$method_dispatch->{unimplemented}}) {
 
 =head2 read_handler
 
-Defines an object that implements the read side of L<BIx::Class::Storage::DBI>.
+Defines an object that implements the read side of L<DBIx::Class::Storage::DBI>.
 
 =cut
 
@@ -417,7 +416,7 @@ has 'read_handler' => (
 
 =head2 write_handler
 
-Defines an object that implements the write side of L<BIx::Class::Storage::DBI>,
+Defines an object that implements the write side of L<DBIx::Class::Storage::DBI>,
 as well as methods that don't write or read that can be called on only one
 storage, methods that return a C<$dbh>, and any methods that don't make sense to
 run on a replicant.
@@ -589,7 +588,8 @@ sub _build_read_handler {
 =head2 around: connect_replicants
 
 All calls to connect_replicants needs to have an existing $schema tacked onto
-top of the args, since L<DBIx::Storage::DBI> needs it, and any C<connect_info>
+top of the args, since L<DBIx::Class::Storage::DBI> needs it, and any
+L<connect_info|DBIx::Class::Storage::DBI/connect_info>
 options merged with the master, with replicant opts having higher priority.
 
 =cut
@@ -1080,7 +1080,8 @@ sub _get_server_version {
 Due to the fact that replicants can lag behind a master, you must take care to
 make sure you use one of the methods to force read queries to a master should
 you need realtime data integrity.  For example, if you insert a row, and then
-immediately re-read it from the database (say, by doing $result->discard_changes)
+immediately re-read it from the database (say, by doing
+L<< $result->discard_changes|DBIx::Class::Row/discard_changes >>)
 or you insert a row and then immediately build a query that expects that row
 to be an item, you should force the master to handle reads.  Otherwise, due to
 the lag, there is no certainty your data will be in the expected state.
@@ -1094,7 +1095,7 @@ attribute:
 
   my $result = $resultset->search(undef, {force_pool=>'master'})->find($pk);
 
-This attribute will safely be ignore by non replicated storages, so you can use
+This attribute will safely be ignored by non replicated storages, so you can use
 the same code for both types of systems.
 
 Lastly, you can use the L</execute_reliably> method, which works very much like
@@ -1112,18 +1113,16 @@ using the Schema clone method.
   ## $new_schema will use only the Master storage for all reads/writes while
   ## the $schema object will use replicated storage.
 
-=head1 AUTHOR
-
-  John Napiorkowski <john.napiorkowski@takkle.com>
-
-Based on code originated by:
+=head1 FURTHER QUESTIONS?
 
-  Norbert Csongrádi <bert@cpan.org>
-  Peter Siklósi <einon@einon.hu>
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -85,15 +85,19 @@ sub all {
   return @rows;
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
 # vim:sts=2 sw=2:
@@ -212,12 +212,13 @@ be turned off (or increased) by the DBA by executing:
 
 Highly recommended.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -6,6 +6,7 @@ use warnings;
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
 
+use SQL::Abstract 'is_plain_value';
 use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer);
 use DBIx::Class::Carp;
 use Try::Tiny;
@@ -126,11 +127,23 @@ sub _exec_svp_release {
 sub _exec_svp_rollback {
   my ($self, $name) = @_;
 
-  # For some reason this statement changes the value of $dbh->{AutoCommit}, so
-  # we localize it here to preserve the original value.
-  local $self->_dbh->{AutoCommit} = $self->_dbh->{AutoCommit};
+  $self->_dbh->do("ROLLBACK TO SAVEPOINT $name");
+}
+
+# older SQLite has issues here too - both of these are in fact
+# completely benign warnings (or at least so say the tests)
+sub _exec_txn_rollback {
+  local $SIG{__WARN__} = sigwarn_silencer( qr/rollback ineffective/ )
+    unless $DBD::SQLite::__DBIC_TXN_SYNC_SANE__;
+
+  shift->next::method(@_);
+}
+
+sub _exec_txn_commit {
+  local $SIG{__WARN__} = sigwarn_silencer( qr/commit ineffective/ )
+    unless $DBD::SQLite::__DBIC_TXN_SYNC_SANE__;
 
-  $self->_dbh->do("ROLLBACK TRANSACTION TO SAVEPOINT $name");
+  shift->next::method(@_);
 }
 
 sub _ping {
@@ -232,10 +245,6 @@ sub deployment_statements {
     $sqltargs->{producer_args}{sqlite_version} = $dver;
   }
 
-  $sqltargs->{quote_identifiers}
-    = !!$self->sql_maker->_quote_chars
-  if ! exists $sqltargs->{quote_identifiers};
-
   $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
 }
 
@@ -314,7 +323,7 @@ sub _dbi_attrs_for_bind {
 
   for my $i (0.. $#$bindattrs) {
 
-    $stringifiable++ if ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') );
+    $stringifiable++ if ( length ref $bind->[$i][1] and is_plain_value($bind->[$i][1]) );
 
     if (
       defined $bindattrs->[$i]
@@ -394,14 +403,17 @@ sub connect_call_use_foreign_keys {
   );
 }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -81,8 +81,10 @@ You can also enable this driver explicitly using:
   $schema->storage_type('::DBI::Sybase::ASE::NoBindVars');
   $schema->connect($dsn, $user, $pass, \%opts);
 
-See the discussion in L<< DBD::Sybase/Using ? Placeholders & bind parameters to
-$sth->execute >> for details on the pros and cons of using placeholders.
+See the discussion in
+L<< DBD::Sybase/Using ? Placeholders & bind parameters to $sth->execute >>
+for details on the pros and cons of using placeholders with this particular
+driver.
 
 One advantage of not using placeholders is that C<select @@identity> will work
 for obtaining the last insert id of an C<IDENTITY> column, instead of having to
@@ -94,13 +96,13 @@ course) into the SQL query itself, without using placeholders.
 The caching of prepared statements is also explicitly disabled, as the
 interpolation renders it useless.
 
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
-# vim:sts=2 sw=2:
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -179,7 +179,7 @@ sub disconnect {
 
 # Even though we call $sth->finish for uses off the bulk API, there's still an
 # "active statement" warning on disconnect, which we throw away here.
-# This is due to the bug described in insert_bulk.
+# This is due to the bug described in _insert_bulk.
 # Currently a noop because 'prepare' is used instead of 'prepare_cached'.
   local $SIG{__WARN__} = sigwarn_silencer(qr/active statement/i)
     if $self->_is_bulk_storage;
@@ -233,7 +233,7 @@ Also sets the C<log_on_update> value for blob write operations. The default is
 C<1>, but C<0> is better if your database is configured for it.
 
 See
-L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>.
+L<DBD::Sybase/Handling IMAGE/TEXT data with syb_ct_get_data()/syb_ct_send_data()>.
 
 =cut
 
@@ -501,7 +501,7 @@ sub update {
   }
 }
 
-sub insert_bulk {
+sub _insert_bulk {
   my $self = shift;
   my ($source, $cols, $data) = @_;
 
@@ -607,7 +607,7 @@ sub insert_bulk {
 # This ignores any data conversion errors detected by the client side libs, as
 # they are usually harmless.
   my $orig_cslib_cb = DBD::Sybase::set_cslib_cb(
-    Sub::Name::subname insert_bulk => sub {
+    Sub::Name::subname _insert_bulk_cslib_errhandler => sub {
       my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_;
 
       return 1 if $errno == 36;
@@ -685,7 +685,7 @@ sub insert_bulk {
 
     $self->_bulk_storage(undef);
     unshift @_, $self;
-    goto \&insert_bulk;
+    goto \&_insert_bulk;
   }
   elsif ($exception) {
 # rollback makes the bulkLogin connection unusable
@@ -717,7 +717,7 @@ sub _remove_blob_cols {
   return %blob_cols ? \%blob_cols : undef;
 }
 
-# same for insert_bulk
+# same for _insert_bulk
 sub _remove_blob_cols_array {
   my ($self, $source, $cols, $data) = @_;
 
@@ -1082,15 +1082,15 @@ for L<DBIx::Class::InflateColumn::DateTime>.
 
 =head1 LIMITED QUERIES
 
-Because ASE does not have a good way to limit results in SQL that works for all
-types of queries, the limit dialect is set to
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ>.
+Because ASE does not have a good way to limit results in SQL that works for
+all types of queries, the limit dialect is set to
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ>.
 
 Fortunately, ASE and L<DBD::Sybase> support cursors properly, so when
-L<GenericSubQ|SQL::Abstract::Limit/GenericSubQ> is too slow you can use
-the L<software_limit|DBIx::Class::ResultSet/software_limit>
-L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping over
-records.
+L<GenericSubQ|DBIx::Class::SQLMaker::LimitDialects/GenericSubQ> is too slow
+you can use the L<software_limit|DBIx::Class::ResultSet/software_limit>
+L<DBIx::Class::ResultSet> attribute to simulate limited queries by skipping
+over records.
 
 =head1 TEXT/IMAGE COLUMNS
 
@@ -1196,13 +1196,13 @@ bulk_insert using prepare_cached (see comments.)
 
 =back
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
-# vim:sts=2 sw=2:
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -103,14 +103,17 @@ sub _exec_txn_rollback {
   $dbh->do('ROLLBACK');
 }
 
-1;
-
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -38,12 +38,13 @@ This subclass supports MSSQL connected via L<DBD::Sybase>.
   $schema->storage_type('::DBI::Sybase::MSSQL');
   $schema->connect_info('dbi:Sybase:....', ...);
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -43,12 +43,13 @@ disabled, as the interpolation renders it useless.
 In all other respects, it is a subclass of
 L<DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server>.
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -171,14 +171,18 @@ sub format_datetime {
   return $datetime_formatter->format_datetime(shift);
 }
 
-1;
-
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
@@ -131,14 +131,18 @@ sub _using_freetds_version {
   return $inf =~ /v([0-9\.]+)/ ? $1 : 0;
 }
 
-1;
-
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
+
@@ -106,13 +106,16 @@ sub _prefetch_autovalues  {
   return $self->next::method(@_);
 }
 
-=head1 AUTHOR
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -106,15 +106,17 @@ sub _run_connection_actions {
 sub sql_maker {
   my $self = shift;
 
-  unless ($self->_sql_maker) {
-    my $maker = $self->next::method (@_);
+  # it is critical to get the version *before* calling next::method
+  # otherwise the potential connect will obliterate the sql_maker
+  # next::method will populate in the _sql_maker accessor
+  my $mysql_ver = $self->_server_info->{normalized_dbms_version};
 
-    # mysql 3 does not understand a bare JOIN
-    my $mysql_ver = $self->_dbh_get_info('SQL_DBMS_VER');
-    $maker->{_default_jointype} = 'INNER' if $mysql_ver =~ /^3/;
-  }
+  my $sm = $self->next::method(@_);
+
+  # mysql 3 does not understand a bare JOIN
+  $sm->{_default_jointype} = 'INNER' if $mysql_ver < 4;
 
-  return $self->_sql_maker;
+  $sm;
 }
 
 sub sqlt_type {
@@ -204,12 +206,13 @@ Enables session-wide strict options upon connecting. Equivalent to:
     ]
   });
 
-=head1 AUTHORS
-
-See L<DBIx::Class/CONTRIBUTORS>
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -10,11 +10,10 @@ use mro 'c3';
 use DBIx::Class::Carp;
 use Scalar::Util qw/refaddr weaken reftype blessed/;
 use List::Util qw/first/;
-use Sub::Name 'subname';
 use Context::Preserve 'preserve_context';
 use Try::Tiny;
-use overload ();
-use Data::Compare (); # no imports!!! guard against insane architecture
+use SQL::Abstract qw(is_plain_value is_literal_value);
+use DBIx::Class::_Util qw(quote_sub perlstring serialize);
 use namespace::clean;
 
 # default cursor class, overridable in connect_info attributes
@@ -79,30 +78,35 @@ __PACKAGE__->_use_join_optimizer (1);
 sub _determine_supports_join_optimizer { 1 };
 
 # Each of these methods need _determine_driver called before itself
-# in order to function reliably. This is a purely DRY optimization
+# in order to function reliably. We also need to separate accessors
+# from plain old method calls, since an accessor called as a setter
+# does *not* need the driver determination loop fired (and in fact
+# can produce hard to find bugs, like e.g. losing on_connect_*
+# semantics on fresh connections)
 #
-# get_(use)_dbms_capability need to be called on the correct Storage
-# class, as _use_X may be hardcoded class-wide, and _supports_X calls
-# _determine_supports_X which obv. needs a correct driver as well
-my @rdbms_specific_methods = qw/
+# The construct below is simply a parameterized around()
+my $storage_accessor_idx = { map { $_ => 1 } qw(
   sqlt_type
-  deployment_statements
+  datetime_parser_type
 
   sql_maker
   cursor_class
+)};
+for my $meth (keys %$storage_accessor_idx, qw(
+  deployment_statements
 
   build_datetime_parser
-  datetime_parser_type
 
   txn_begin
 
   insert
-  insert_bulk
   update
   delete
   select
   select_single
 
+  _insert_bulk
+
   with_deferred_fk_checks
 
   get_use_dbms_capability
@@ -110,16 +114,16 @@ my @rdbms_specific_methods = qw/
 
   _server_info
   _get_server_version
-/;
-
-for my $meth (@rdbms_specific_methods) {
+)) {
 
   my $orig = __PACKAGE__->can ($meth)
     or die "$meth is not a ::Storage::DBI method!";
 
-  no strict qw/refs/;
-  no warnings qw/redefine/;
-  *{__PACKAGE__ ."::$meth"} = subname $meth => sub {
+  my $is_getter = $storage_accessor_idx->{$meth} ? 0 : 1;
+
+  quote_sub
+    __PACKAGE__ ."::$meth", sprintf( <<'EOC', $is_getter, perlstring $meth ), { '$orig' => \$orig };
+
     if (
       # only fire when invoked on an instance, a valid class-based invocation
       # would e.g. be setting a default for an inherited accessor
@@ -129,6 +133,10 @@ for my $meth (@rdbms_specific_methods) {
         and
       ! $_[0]->{_in_determine_driver}
         and
+      # if this is a known *setter* - just set it, no need to connect
+      # and determine the driver
+      ( %1$s or @_ <= 1 )
+        and
       # Only try to determine stuff if we have *something* that either is or can
       # provide a DSN. Allows for bare $schema's generated with a plain ->connect()
       # to still be marginally useful
@@ -136,16 +144,15 @@ for my $meth (@rdbms_specific_methods) {
     ) {
       $_[0]->_determine_driver;
 
-      # This for some reason crashes and burns on perl 5.8.1
-      # IFF the method ends up throwing an exception
-      #goto $_[0]->can ($meth);
+      # work around http://rt.perl.org/rt3/Public/Bug/Display.html?id=35878
+      goto $_[0]->can(%2$s) unless DBIx::Class::_ENV_::BROKEN_GOTO;
 
-      my $cref = $_[0]->can ($meth);
+      my $cref = $_[0]->can(%2$s);
       goto $cref;
     }
 
     goto $orig;
-  };
+EOC
 }
 
 =head1 NAME
@@ -246,12 +253,10 @@ sub new {
 }
 
 sub DESTROY {
-  my $self = shift;
-
+  $_[0]->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
   # some databases spew warnings on implicit disconnect
-  $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
   local $SIG{__WARN__} = sub {};
-  $self->_dbh(undef);
+  $_[0]->_dbh(undef);
 
   # this op is necessary, since the very last perl runtime statement
   # triggers a global destruction shootout, and the $SIG localization
@@ -262,14 +267,14 @@ sub DESTROY {
 
 # handle pid changes correctly - do not destroy parent's connection
 sub _verify_pid {
-  my $self = shift;
 
-  my $pid = $self->_conn_pid;
-  if( defined $pid and $pid != $$ and my $dbh = $self->_dbh ) {
+  my $pid = $_[0]->_conn_pid;
+
+  if( defined $pid and $pid != $$ and my $dbh = $_[0]->_dbh ) {
     $dbh->{InactiveDestroy} = 1;
-    $self->_dbh(undef);
-    $self->transaction_depth(0);
-    $self->savepoints([]);
+    $_[0]->_dbh(undef);
+    $_[0]->transaction_depth(0);
+    $_[0]->savepoints([]);
   }
 
   return;
@@ -863,22 +868,20 @@ database is not in C<AutoCommit> mode.
 =cut
 
 sub disconnect {
-  my ($self) = @_;
-
-  if( $self->_dbh ) {
-    my @actions;
 
-    push @actions, ( $self->on_disconnect_call || () );
-    push @actions, $self->_parse_connect_do ('on_disconnect_do');
+  if( my $dbh = $_[0]->_dbh ) {
 
-    $self->_do_connection_actions(disconnect_call_ => $_) for @actions;
+    $_[0]->_do_connection_actions(disconnect_call_ => $_) for (
+      ( $_[0]->on_disconnect_call || () ),
+      $_[0]->_parse_connect_do ('on_disconnect_do')
+    );
 
     # stops the "implicit rollback on disconnect" warning
-    $self->_exec_txn_rollback unless $self->_dbh_autocommit;
+    $_[0]->_exec_txn_rollback unless $_[0]->_dbh_autocommit;
 
-    %{ $self->_dbh->{CachedKids} } = ();
-    $self->_dbh->disconnect;
-    $self->_dbh(undef);
+    %{ $dbh->{CachedKids} } = ();
+    $dbh->disconnect;
+    $_[0]->_dbh(undef);
   }
 }
 
@@ -899,8 +902,8 @@ in MySQL's case disabled entirely.
 
 # Storage subclasses should override this
 sub with_deferred_fk_checks {
-  my ($self, $sub) = @_;
-  $sub->();
+  #my ($self, $sub) = @_;
+  $_[1]->();
 }
 
 =head2 connected
@@ -920,40 +923,26 @@ answering, etc.) This method is used internally by L</dbh>.
 =cut
 
 sub connected {
-  my $self = shift;
-  return 0 unless $self->_seems_connected;
+  return 0 unless $_[0]->_seems_connected;
 
   #be on the safe side
-  local $self->_dbh->{RaiseError} = 1;
+  local $_[0]->_dbh->{RaiseError} = 1;
 
-  return $self->_ping;
+  return $_[0]->_ping;
 }
 
 sub _seems_connected {
-  my $self = shift;
-
-  $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
+  $_[0]->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
 
-  my $dbh = $self->_dbh
-    or return 0;
-
-  return $dbh->FETCH('Active');
+  ($_[0]->_dbh || return 0)->FETCH('Active');
 }
 
 sub _ping {
-  my $self = shift;
-
-  my $dbh = $self->_dbh or return 0;
-
-  return $dbh->ping;
+  ($_[0]->_dbh || return 0)->ping;
 }
 
 sub ensure_connected {
-  my ($self) = @_;
-
-  unless ($self->connected) {
-    $self->_populate_dbh;
-  }
+  $_[0]->connected || ( $_[0]->_populate_dbh && 1 );
 }
 
 =head2 dbh
@@ -967,26 +956,26 @@ instead.
 =cut
 
 sub dbh {
-  my ($self) = @_;
-
-  if (not $self->_dbh) {
-    $self->_populate_dbh;
-  } else {
-    $self->ensure_connected;
-  }
-  return $self->_dbh;
+  # maybe save a ping call
+  $_[0]->_dbh
+    ? ( $_[0]->ensure_connected and $_[0]->_dbh )
+    : $_[0]->_populate_dbh
+  ;
 }
 
 # this is the internal "get dbh or connect (don't check)" method
 sub _get_dbh {
-  my $self = shift;
-  $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
-  $self->_populate_dbh unless $self->_dbh;
-  return $self->_dbh;
+  $_[0]->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
+  $_[0]->_dbh || $_[0]->_populate_dbh;
 }
 
+# *DELIBERATELY* not a setter (for the time being)
+# Too intertwined with everything else for any kind of sanity
 sub sql_maker {
-  my ($self) = @_;
+  my $self = shift;
+
+  $self->throw_exception('sql_maker() is not a setter method') if @_;
+
   unless ($self->_sql_maker) {
     my $sql_maker_class = $self->sql_maker_class;
 
@@ -1046,32 +1035,35 @@ sub _rebless {}
 sub _init {}
 
 sub _populate_dbh {
-  my ($self) = @_;
 
-  $self->_dbh(undef); # in case ->connected failed we might get sent here
-  $self->_dbh_details({}); # reset everything we know
+  $_[0]->_dbh(undef); # in case ->connected failed we might get sent here
+
+  $_[0]->_dbh_details({}); # reset everything we know
+
+  # FIXME - this needs reenabling with the proper "no reset on same DSN" check
+  #$_[0]->_sql_maker(undef); # this may also end up being different
 
-  $self->_dbh($self->_connect);
+  $_[0]->_dbh($_[0]->_connect);
 
-  $self->_conn_pid($$) unless DBIx::Class::_ENV_::BROKEN_FORK; # on win32 these are in fact threads
+  $_[0]->_conn_pid($$) unless DBIx::Class::_ENV_::BROKEN_FORK; # on win32 these are in fact threads
 
-  $self->_determine_driver;
+  $_[0]->_determine_driver;
 
   # Always set the transaction depth on connect, since
   #  there is no transaction in progress by definition
-  $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
+  $_[0]->{transaction_depth} = $_[0]->_dbh_autocommit ? 0 : 1;
 
-  $self->_run_connection_actions unless $self->{_in_determine_driver};
+  $_[0]->_run_connection_actions unless $_[0]->{_in_determine_driver};
+
+  $_[0]->_dbh;
 }
 
 sub _run_connection_actions {
-  my $self = shift;
-  my @actions;
 
-  push @actions, ( $self->on_connect_call || () );
-  push @actions, $self->_parse_connect_do ('on_connect_do');
-
-  $self->_do_connection_actions(connect_call_ => $_) for @actions;
+  $_[0]->_do_connection_actions(connect_call_ => $_) for (
+    ( $_[0]->on_connect_call || () ),
+    $_[0]->_parse_connect_do ('on_connect_do'),
+  );
 }
 
 
@@ -1298,7 +1290,7 @@ sub _determine_driver {
         "Your storage subclass @{[ ref $self ]} provides (or inherits) the method "
       . 'source_bind_attributes() for which support has been removed as of Jan 2013. '
       . 'If you are not sure how to proceed please contact the development team via '
-      . 'http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT'
+      . DBIx::Class::_ENV_::HELP_URL
       );
     }
 
@@ -1409,7 +1401,19 @@ sub disconnect_call_do_sql {
   $self->_do_query(@_);
 }
 
-# override in db-specific backend when necessary
+=head2 connect_call_datetime_setup
+
+A no-op stub method, provided so that one can always safely supply the
+L<connection option|/DBIx::Class specific connection attributes>
+
+ on_connect_call => 'datetime_setup'
+
+This way one does not need to know in advance whether the underlying
+storage requires any sort of hand-holding when dealing with calendar
+data.
+
+=cut
+
 sub connect_call_datetime_setup { 1 }
 
 sub _do_query {
@@ -1529,19 +1533,17 @@ sub _connect {
 }
 
 sub txn_begin {
-  my $self = shift;
-
   # this means we have not yet connected and do not know the AC status
   # (e.g. coderef $dbh), need a full-fledged connection check
-  if (! defined $self->_dbh_autocommit) {
-    $self->ensure_connected;
+  if (! defined $_[0]->_dbh_autocommit) {
+    $_[0]->ensure_connected;
   }
   # Otherwise simply connect or re-connect on pid changes
   else {
-    $self->_get_dbh;
+    $_[0]->_get_dbh;
   }
 
-  $self->next::method(@_);
+  shift->next::method(@_);
 }
 
 sub _exec_txn_begin {
@@ -1562,9 +1564,8 @@ sub _exec_txn_begin {
 sub txn_commit {
   my $self = shift;
 
-  $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
   $self->throw_exception("Unable to txn_commit() on a disconnected storage")
-    unless $self->_dbh;
+    unless $self->_seems_connected;
 
   # esoteric case for folks using external $dbh handles
   if (! $self->transaction_depth and ! $self->_dbh->FETCH('AutoCommit') ) {
@@ -1593,9 +1594,8 @@ sub _exec_txn_commit {
 sub txn_rollback {
   my $self = shift;
 
-  $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
   $self->throw_exception("Unable to txn_rollback() on a disconnected storage")
-    unless $self->_dbh;
+    unless $self->_seems_connected;
 
   # esoteric case for folks using external $dbh handles
   if (! $self->transaction_depth and ! $self->_dbh->FETCH('AutoCommit') ) {
@@ -1621,17 +1621,12 @@ sub _exec_txn_rollback {
   shift->_dbh->rollback;
 }
 
-# generate some identical methods
-for my $meth (qw/svp_begin svp_release svp_rollback/) {
-  no strict qw/refs/;
-  *{__PACKAGE__ ."::$meth"} = subname $meth => sub {
-    my $self = shift;
-    $self->_verify_pid unless DBIx::Class::_ENV_::BROKEN_FORK;
-    $self->throw_exception("Unable to $meth() on a disconnected storage")
-      unless $self->_dbh;
-    $self->next::method(@_);
-  };
-}
+# generate the DBI-specific stubs, which then fallback to ::Storage proper
+quote_sub __PACKAGE__ . "::$_" => sprintf (<<'EOS', $_) for qw(svp_begin svp_release svp_rollback);
+  $_[0]->throw_exception('Unable to %s() on a disconnected storage')
+    unless $_[0]->_seems_connected;
+  shift->next::method(@_);
+EOS
 
 # This used to be the top-half of _execute.  It was split out to make it
 #  easier to override in NoBindVars without duping the rest.  It takes up
@@ -1683,13 +1678,10 @@ sub _gen_sql_bind {
 sub _resolve_bindattrs {
   my ($self, $ident, $bind, $colinfos) = @_;
 
-  $colinfos ||= {};
-
   my $resolve_bindinfo = sub {
     #my $infohash = shift;
 
-    %$colinfos = %{ $self->_resolve_column_info($ident) }
-      unless keys %$colinfos;
+    $colinfos ||= { %{ $self->_resolve_column_info($ident) } };
 
     my $ret;
     if (my $col = $_[0]->{dbic_colname}) {
@@ -1709,10 +1701,16 @@ sub _resolve_bindattrs {
     my $resolved =
       ( ref $_ ne 'ARRAY' or @$_ != 2 ) ? [ {}, $_ ]
     : ( ! defined $_->[0] )             ? [ {}, $_->[1] ]
-    : (ref $_->[0] eq 'HASH')           ? [ (exists $_->[0]{dbd_attrs} or $_->[0]{sqlt_datatype})
-                                              ? $_->[0]
-                                              : $resolve_bindinfo->($_->[0])
-                                            , $_->[1] ]
+    : (ref $_->[0] eq 'HASH')           ? [(
+                                            ! keys %{$_->[0]}
+                                              or
+                                            exists $_->[0]{dbd_attrs}
+                                              or
+                                            $_->[0]{sqlt_datatype}
+                                           ) ? $_->[0]
+                                             : $resolve_bindinfo->($_->[0])
+                                           , $_->[1]
+                                          ]
     : (ref $_->[0] eq 'SCALAR')         ? [ { sqlt_datatype => ${$_->[0]} }, $_->[1] ]
     :                                     [ $resolve_bindinfo->(
                                               { dbic_colname => $_->[0] }
@@ -1726,7 +1724,7 @@ sub _resolve_bindattrs {
         and
       length ref $resolved->[1]
         and
-      ! overload::Method($resolved->[1], '""')
+      ! is_plain_value $resolved->[1]
     ) {
       require Data::Dumper;
       local $Data::Dumper::Maxdepth = 1;
@@ -1880,14 +1878,15 @@ sub _bind_sth_params {
       );
     }
     else {
-      # FIXME SUBOPTIMAL - most likely this is not necessary at all
-      # confirm with dbi-dev whether explicit stringification is needed
-      my $v = ( length ref $bind->[$i][1] and overload::Method($bind->[$i][1], '""') )
+      # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD
+      my $v = ( length ref $bind->[$i][1] and is_plain_value $bind->[$i][1] )
         ? "$bind->[$i][1]"
         : $bind->[$i][1]
       ;
+
       $sth->bind_param(
         $i + 1,
+        # The temp-var is CRUCIAL - DO NOT REMOVE IT, breaks older DBD::SQLite RT#79576
         $v,
         $bind_attrs->[$i],
       );
@@ -1908,9 +1907,7 @@ sub _prefetch_autovalues {
       (
         ! exists $to_insert->{$col}
           or
-        ref $to_insert->{$col} eq 'SCALAR'
-          or
-        (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY')
+        is_literal_value($to_insert->{$col})
       )
     ) {
       $values{$col} = $self->_sequence_fetch(
@@ -1947,11 +1944,9 @@ sub insert {
     }
 
     # nothing to retrieve when explicit values are supplied
-    next if (defined $to_insert->{$col} and ! (
-      ref $to_insert->{$col} eq 'SCALAR'
-        or
-      (ref $to_insert->{$col} eq 'REF' and ref ${$to_insert->{$col}} eq 'ARRAY')
-    ));
+    next if (
+      defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col})
+    );
 
     # the 'scalar keys' is a trick to preserve the ->columns declaration order
     $retrieve_cols{$col} = scalar keys %retrieve_cols if (
@@ -2027,26 +2022,28 @@ sub insert {
 }
 
 sub insert_bulk {
-  my ($self, $source, $cols, $data) = @_;
+  carp_unique(
+    'insert_bulk() should have never been exposed as a public method and '
+  . 'calling it is depecated as of Aug 2014. If you believe having a genuine '
+  . 'use for this method please contact the development team via '
+  . DBIx::Class::_ENV_::HELP_URL
+  );
 
-  my @col_range = (0..$#$cols);
+  return '0E0' unless @{$_[3]||[]};
 
-  # FIXME SUBOPTIMAL - most likely this is not necessary at all
-  # confirm with dbi-dev whether explicit stringification is needed
-  #
-  # forcibly stringify whatever is stringifiable
-  # ResultSet::populate() hands us a copy - safe to mangle
-  for my $r (0 .. $#$data) {
-    for my $c (0 .. $#{$data->[$r]}) {
-      $data->[$r][$c] = "$data->[$r][$c]"
-        if ( length ref $data->[$r][$c] and overload::Method($data->[$r][$c], '""') );
-    }
-  }
+  shift->_insert_bulk(@_);
+}
+
+sub _insert_bulk {
+  my ($self, $source, $cols, $data) = @_;
+
+  $self->throw_exception('Calling _insert_bulk without a dataset to process makes no sense')
+    unless @{$data||[]};
 
   my $colinfos = $source->columns_info($cols);
 
   local $self->{_autoinc_supplied_for_op} =
-    (first { $_->{is_auto_increment} } values %$colinfos)
+    (grep { $_->{is_auto_increment} } values %$colinfos)
       ? 1
       : 0
   ;
@@ -2072,17 +2069,17 @@ sub insert_bulk {
   # can't just hand SQLA a set of some known "values" (e.g. hashrefs that
   # can be later matched up by address), because we want to supply a real
   # value on which perhaps e.g. datatype checks will be performed
-  my ($proto_data, $value_type_by_col_idx);
-  for my $i (@col_range) {
-    my $colname = $cols->[$i];
-    if (ref $data->[0][$i] eq 'SCALAR') {
+  my ($proto_data, $serialized_bind_type_by_col_idx);
+  for my $col_idx (0..$#$cols) {
+    my $colname = $cols->[$col_idx];
+    if (ref $data->[0][$col_idx] eq 'SCALAR') {
       # no bind value at all - no type
 
-      $proto_data->{$colname} = $data->[0][$i];
+      $proto_data->{$colname} = $data->[0][$col_idx];
     }
-    elsif (ref $data->[0][$i] eq 'REF' and ref ${$data->[0][$i]} eq 'ARRAY' ) {
+    elsif (ref $data->[0][$col_idx] eq 'REF' and ref ${$data->[0][$col_idx]} eq 'ARRAY' ) {
       # repack, so we don't end up mangling the original \[]
-      my ($sql, @bind) = @${$data->[0][$i]};
+      my ($sql, @bind) = @${$data->[0][$col_idx]};
 
       # normalization of user supplied stuff
       my $resolved_bind = $self->_resolve_bindattrs(
@@ -2091,23 +2088,23 @@ sub insert_bulk {
 
       # store value-less (attrs only) bind info - we will be comparing all
       # supplied binds against this for sanity
-      $value_type_by_col_idx->{$i} = [ map { $_->[0] } @$resolved_bind ];
+      $serialized_bind_type_by_col_idx->{$col_idx} = serialize [ map { $_->[0] } @$resolved_bind ];
 
       $proto_data->{$colname} = \[ $sql, map { [
         # inject slice order to use for $proto_bind construction
-          { %{$resolved_bind->[$_][0]}, _bind_data_slice_idx => $i, _literal_bind_subindex => $_+1 }
+          { %{$resolved_bind->[$_][0]}, _bind_data_slice_idx => $col_idx, _literal_bind_subindex => $_+1 }
             =>
           $resolved_bind->[$_][1]
         ] } (0 .. $#bind)
       ];
     }
     else {
-      $value_type_by_col_idx->{$i} = undef;
+      $serialized_bind_type_by_col_idx->{$col_idx} = undef;
 
       $proto_data->{$colname} = \[ '?', [
-        { dbic_colname => $colname, _bind_data_slice_idx => $i }
+        { dbic_colname => $colname, _bind_data_slice_idx => $col_idx }
           =>
-        $data->[0][$i]
+        $data->[0][$col_idx]
       ] ];
     }
   }
@@ -2118,11 +2115,11 @@ sub insert_bulk {
     [ $proto_data ],
   );
 
-  if (! @$proto_bind and keys %$value_type_by_col_idx) {
+  if (! @$proto_bind and keys %$serialized_bind_type_by_col_idx) {
     # if the bindlist is empty and we had some dynamic binds, this means the
     # storage ate them away (e.g. the NoBindVars component) and interpolated
     # them directly into the SQL. This obviously can't be good for multi-inserts
-    $self->throw_exception('Cannot insert_bulk without support for placeholders');
+    $self->throw_exception('Unable to invoke fast-path insert without storage placeholder support');
   }
 
   # sanity checks
@@ -2140,19 +2137,19 @@ sub insert_bulk {
         Data::Dumper::Concise::Dumper ({
           map { $cols->[$_] =>
             $data->[$r_idx][$_]
-          } @col_range
+          } 0..$#$cols
         }),
       }
     );
   };
 
-  for my $col_idx (@col_range) {
+  for my $col_idx (0..$#$cols) {
     my $reference_val = $data->[0][$col_idx];
 
     for my $row_idx (1..$#$data) {  # we are comparing against what we got from [0] above, hence start from 1
       my $val = $data->[$row_idx][$col_idx];
 
-      if (! exists $value_type_by_col_idx->{$col_idx}) { # literal no binds
+      if (! exists $serialized_bind_type_by_col_idx->{$col_idx}) { # literal no binds
         if (ref $val ne 'SCALAR') {
           $bad_slice_report_cref->(
             "Incorrect value (expecting SCALAR-ref \\'$$reference_val')",
@@ -2168,8 +2165,8 @@ sub insert_bulk {
           );
         }
       }
-      elsif (! defined $value_type_by_col_idx->{$col_idx} ) {  # regular non-literal value
-        if (ref $val eq 'SCALAR' or (ref $val eq 'REF' and ref $$val eq 'ARRAY') ) {
+      elsif (! defined $serialized_bind_type_by_col_idx->{$col_idx} ) {  # regular non-literal value
+        if (is_literal_value($val)) {
           $bad_slice_report_cref->("Literal SQL found where a plain bind value is expected", $row_idx, $col_idx);
         }
       }
@@ -2196,16 +2193,17 @@ sub insert_bulk {
           }
           # need to check the bind attrs - a bind will happen only once for
           # the entire dataset, so any changes further down will be ignored.
-          elsif (! Data::Compare::Compare(
-            $value_type_by_col_idx->{$col_idx},
-            [
+          elsif (
+            $serialized_bind_type_by_col_idx->{$col_idx}
+              ne
+            serialize [
               map
               { $_->[0] }
               @{$self->_resolve_bindattrs(
                 $source, [ @{$$val}[1 .. $#$$val] ], $colinfos,
               )}
-            ],
-          )) {
+            ]
+          ) {
             $bad_slice_report_cref->(
               'Differing bind attributes on literal/bind values not supported',
               $row_idx,
@@ -2250,16 +2248,13 @@ sub insert_bulk {
 sub _dbh_execute_for_fetch {
   my ($self, $source, $sth, $proto_bind, $cols, $data) = @_;
 
-  my @idx_range = ( 0 .. $#$proto_bind );
-
   # If we have any bind attributes to take care of, we will bind the
   # proto-bind data (which will never be used by execute_for_fetch)
   # However since column bindtypes are "sticky", this is sufficient
   # to get the DBD to apply the bindtype to all values later on
-
   my $bind_attrs = $self->_dbi_attrs_for_bind($source, $proto_bind);
 
-  for my $i (@idx_range) {
+  for my $i (0 .. $#$proto_bind) {
     $sth->bind_param (
       $i+1, # DBI bind indexes are 1-based
       $proto_bind->[$i][1],
@@ -2279,12 +2274,28 @@ sub _dbh_execute_for_fetch {
   my $fetch_tuple = sub {
     return undef if ++$fetch_row_idx > $#$data;
 
-    return [ map { defined $_->{_literal_bind_subindex}
-      ? ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]}
-         ->[ $_->{_literal_bind_subindex} ]
-          ->[1]
-      : $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]
-    } map { $_->[0] } @$proto_bind];
+    return [ map {
+      my $v = ! defined $_->{_literal_bind_subindex}
+
+        ? $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]
+
+        # There are no attributes to resolve here - we already did everything
+        # when we constructed proto_bind. However we still want to sanity-check
+        # what the user supplied, so pass stuff through to the resolver *anyway*
+        : $self->_resolve_bindattrs (
+            undef,  # a fake rsrc
+            [ ${ $data->[ $fetch_row_idx ]->[ $_->{_bind_data_slice_idx} ]}->[ $_->{_literal_bind_subindex} ] ],
+            {},     # a fake column_info bag
+          )->[0][1]
+      ;
+
+      # FIXME SUBOPTIMAL - DBI needs fixing to always stringify regardless of DBD
+      # For the time being forcibly stringify whatever is stringifiable
+      (length ref $v and is_plain_value $v)
+        ? "$v"
+        : $v
+      ;
+    } map { $_->[0] } @$proto_bind ];
   };
 
   my $tuple_status = [];
@@ -2411,20 +2422,12 @@ sub _select_args {
   #) if $orig_attrs->{!args_as_stored_at_the_end_of_this_method!};
 
   my $sql_maker = $self->sql_maker;
-  my $alias2source = $self->_resolve_ident_sources ($ident);
 
   my $attrs = {
     %$orig_attrs,
     select => $select,
     from => $ident,
     where => $where,
-
-    # limit dialects use this stuff
-    # yes, some CDBICompat crap does not supply an {alias} >.<
-    ( $orig_attrs->{alias} and $alias2source->{$orig_attrs->{alias}} )
-      ? ( _rsroot_rsrc => $alias2source->{$orig_attrs->{alias}} )
-      : ()
-    ,
   };
 
   # Sanity check the attributes (SQLMaker does it too, but
@@ -2461,7 +2464,7 @@ sub _select_args {
     # are happy (this includes MySQL in strict_mode)
     # If any of the other joined tables are referenced in the group_by
     # however - the user is on their own
-    ( $prefetch_needs_subquery or $attrs->{_related_results_construction} )
+    ( $prefetch_needs_subquery or ! $attrs->{_simple_passthrough_construction} )
       and
     $attrs->{group_by}
       and
@@ -2515,6 +2518,8 @@ sub _select_args {
   $orig_attrs->{_last_sqlmaker_alias_map} = $attrs->{_aliastypes};
 
 ###
+  #   my $alias2source = $self->_resolve_ident_sources ($ident);
+  #
   # This would be the point to deflate anything found in $attrs->{where}
   # (and leave $attrs->{bind} intact). Problem is - inflators historically
   # expect a result object. And all we have is a resultsource (it is trivial
@@ -2862,6 +2867,7 @@ sub create_ddl_dir {
     add_drop_table => 1,
     ignore_constraint_names => 1,
     ignore_index_names => 1,
+    quote_identifiers => $self->sql_maker->_quoting_enabled,
     %{$sqltargs || {}}
   };
 
@@ -2956,10 +2962,21 @@ sub create_ddl_dir {
         unless $dest_schema->name;
     }
 
-    my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db,
-                                                  $dest_schema,   $db,
-                                                  $sqltargs
-                                                 );
+    my $diff = do {
+      # FIXME - this is a terrible workaround for
+      # https://github.com/dbsrgits/sql-translator/commit/2d23c1e
+      # Fixing it in this sloppy manner so that we don't hve to
+      # lockstep an SQLT release as well. Needs to be removed at
+      # some point, and SQLT dep bumped
+      local $SQL::Translator::Producer::SQLite::NO_QUOTES
+        if $SQL::Translator::Producer::SQLite::NO_QUOTES;
+
+      SQL::Translator::Diff::schema_diff($source_schema, $db,
+                                         $dest_schema,   $db,
+                                         $sqltargs
+                                       );
+    };
+
     if(!open $file, ">$difffile") {
       $self->throw_exception("Can't write to $difffile ($!)");
       next;
@@ -2977,7 +2994,8 @@ sub create_ddl_dir {
 
 =back
 
-Returns the statements used by L</deploy> and L<DBIx::Class::Schema/deploy>.
+Returns the statements used by L<DBIx::Class::Storage/deploy>
+and L<DBIx::Class::Schema/deploy>.
 
 The L<SQL::Translator> (not L<DBI>) database driver name can be explicitly
 provided in C<$type>, otherwise the result of L</sqlt_type> is used as default.
@@ -3019,6 +3037,9 @@ sub deployment_statements {
   $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources}
       if exists $sqltargs->{sources};
 
+  $sqltargs->{quote_identifiers} = $self->sql_maker->_quoting_enabled
+    unless exists $sqltargs->{quote_identifiers};
+
   my $tr = SQL::Translator->new(
     producer => "SQL::Translator::Producer::${type}",
     %$sqltargs,
@@ -3251,13 +3272,13 @@ transactions.  You're on your own for handling all sorts of exceptional
 cases if you choose the C<< AutoCommit => 0 >> path, just as you would
 be with raw DBI.
 
+=head1 FURTHER QUESTIONS?
 
-=head1 AUTHOR AND CONTRIBUTORS
-
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
-
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -15,7 +15,8 @@ use mro 'c3';
 
 use List::Util 'first';
 use Scalar::Util 'blessed';
-use Sub::Name 'subname';
+use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize);
+use SQL::Abstract qw(is_plain_value is_literal_value);
 use namespace::clean;
 
 #
@@ -110,8 +111,8 @@ sub _adjust_select_args_for_complex_prefetch {
   my $outer_attrs = { %$attrs };
   delete @{$outer_attrs}{qw(from bind rows offset group_by _grouped_by_distinct having)};
 
-  my $inner_attrs = { %$attrs };
-  delete @{$inner_attrs}{qw(for collapse select as _related_results_construction)};
+  my $inner_attrs = { %$attrs, _simple_passthrough_construction => 1 };
+  delete @{$inner_attrs}{qw(for collapse select as)};
 
   # there is no point of ordering the insides if there is no limit
   delete $inner_attrs->{order_by} if (
@@ -389,7 +390,6 @@ sub _resolve_aliastypes_from_select_args {
   my $sql_maker = $self->sql_maker;
 
   # these are throw away results, do not pollute the bind stack
-  local $sql_maker->{select_bind};
   local $sql_maker->{where_bind};
   local $sql_maker->{group_bind};
   local $sql_maker->{having_bind};
@@ -416,7 +416,7 @@ sub _resolve_aliastypes_from_select_args {
   # generate sql chunks
   my $to_scan = {
     restricting => [
-      $sql_maker->_recurse_where ($attrs->{where}),
+      ($sql_maker->_recurse_where ($attrs->{where}))[0],
       $sql_maker->_parse_rs_attrs ({ having => $attrs->{having} }),
     ],
     grouping => [
@@ -429,7 +429,7 @@ sub _resolve_aliastypes_from_select_args {
       ),
     ],
     selecting => [
-      map { $sql_maker->_recurse_fields($_) } @{$attrs->{select}},
+      map { ($sql_maker->_recurse_fields($_))[0] } @{$attrs->{select}},
     ],
     ordering => [
       map { $_->[0] } $self->_extract_order_criteria ($attrs->{order_by}, $sql_maker),
@@ -654,9 +654,10 @@ sub _group_over_selection {
   }
 
   $self->throw_exception ( sprintf
-    'A required group_by clause could not be constructed automatically due to a complex '
-  . 'order_by criteria (%s). Either order_by columns only (no functions) or construct a suitable '
-  . 'group_by by hand',
+    'Unable to programatically derive a required group_by from the supplied '
+  . 'order_by criteria. To proceed either add an explicit group_by, or '
+  . 'simplify your order_by to only include plain columns '
+  . '(supplied order_by: %s)',
     join ', ', map { "'$_'" } @$leftovers,
   ) if $leftovers;
 
@@ -710,6 +711,9 @@ sub _resolve_ident_sources {
 # for all sources
 sub _resolve_column_info {
   my ($self, $ident, $colnames) = @_;
+
+  return {} if $colnames and ! @$colnames;
+
   my $alias2src = $self->_resolve_ident_sources($ident);
 
   my (%seen_cols, @auto_colnames);
@@ -780,31 +784,9 @@ sub _resolve_column_info {
 sub _inner_join_to_node {
   my ($self, $from, $alias) = @_;
 
-  # subqueries and other oddness are naturally not supported
-  return $from if (
-    ref $from ne 'ARRAY'
-      ||
-    @$from <= 1
-      ||
-    ref $from->[0] ne 'HASH'
-      ||
-    ! $from->[0]{-alias}
-      ||
-    $from->[0]{-alias} eq $alias  # this last bit means $alias is the head of $from - nothing to do
-  );
-
-  # find the current $alias in the $from structure
-  my $switch_branch;
-  JOINSCAN:
-  for my $j (@{$from}[1 .. $#$from]) {
-    if ($j->[0]{-alias} eq $alias) {
-      $switch_branch = $j->[0]{-join_path};
-      last JOINSCAN;
-    }
-  }
+  my $switch_branch = $self->_find_join_path_to_node($from, $alias);
 
-  # something else went quite wrong
-  return $from unless $switch_branch;
+  return $from unless @{$switch_branch||[]};
 
   # So it looks like we will have to switch some stuff around.
   # local() is useless here as we will be leaving the scope
@@ -832,6 +814,29 @@ sub _inner_join_to_node {
   return \@new_from;
 }
 
+sub _find_join_path_to_node {
+  my ($self, $from, $target_alias) = @_;
+
+  # subqueries and other oddness are naturally not supported
+  return undef if (
+    ref $from ne 'ARRAY'
+      ||
+    ref $from->[0] ne 'HASH'
+      ||
+    ! defined $from->[0]{-alias}
+  );
+
+  # no path - the head is the alias
+  return [] if $from->[0]{-alias} eq $target_alias;
+
+  for my $i (1 .. $#$from) {
+    return $from->[$i][0]{-join_path} if ( ($from->[$i][0]{-alias}||'') eq $target_alias );
+  }
+
+  # something else went quite wrong
+  return undef;
+}
+
 sub _extract_order_criteria {
   my ($self, $order_by, $sql_maker) = @_;
 
@@ -881,15 +886,15 @@ sub _order_by_is_stable {
   my ($self, $ident, $order_by, $where) = @_;
 
   my @cols = (
-    (map { $_->[0] } $self->_extract_order_criteria($order_by)),
-    $where ? @{$self->_extract_fixed_condition_columns($where)} :(),
-  ) or return undef;
+    ( map { $_->[0] } $self->_extract_order_criteria($order_by) ),
+    ( $where ? keys %{ $self->_extract_fixed_condition_columns($where) } : () ),
+  ) or return 0;
 
   my $colinfo = $self->_resolve_column_info($ident, \@cols);
 
   return keys %$colinfo
     ? $self->_columns_comprise_identifying_set( $colinfo,  \@cols )
-    : undef
+    : 0
   ;
 }
 
@@ -905,115 +910,456 @@ sub _columns_comprise_identifying_set {
     return 1 if $src->_identifying_column_set($_);
   }
 
-  return undef;
+  return 0;
 }
 
-# this is almost identical to the above, except it accepts only
+# this is almost similar to _order_by_is_stable, except it takes
 # a single rsrc, and will succeed only if the first portion of the order
 # by is stable.
 # returns that portion as a colinfo hashref on success
-sub _main_source_order_by_portion_is_stable {
-  my ($self, $main_rsrc, $order_by, $where) = @_;
+sub _extract_colinfo_of_stable_main_source_order_by_portion {
+  my ($self, $attrs) = @_;
 
-  die "Huh... I expect a blessed result_source..."
-    if ref($main_rsrc) eq 'ARRAY';
+  my $nodes = $self->_find_join_path_to_node($attrs->{from}, $attrs->{alias});
+
+  return unless defined $nodes;
 
   my @ord_cols = map
     { $_->[0] }
-    ( $self->_extract_order_criteria($order_by) )
+    ( $self->_extract_order_criteria($attrs->{order_by}) )
   ;
   return unless @ord_cols;
 
-  my $colinfos = $self->_resolve_column_info($main_rsrc);
+  my $valid_aliases = { map { $_ => 1 } (
+    $attrs->{from}[0]{-alias},
+    map { values %$_ } @$nodes,
+  ) };
+
+  my $colinfos = $self->_resolve_column_info($attrs->{from});
+
+  my ($colinfos_to_return, $seen_main_src_cols);
+
+  for my $col (@ord_cols) {
+    # if order criteria is unresolvable - there is nothing we can do
+    my $colinfo = $colinfos->{$col} or last;
+
+    # if we reached the end of the allowed aliases - also nothing we can do
+    last unless $valid_aliases->{$colinfo->{-source_alias}};
+
+    $colinfos_to_return->{$col} = $colinfo;
+
+    $seen_main_src_cols->{$colinfo->{-colname}} = 1
+      if $colinfo->{-source_alias} eq $attrs->{alias};
+  }
+
+  # FIXME the condition may be singling out things on its own, so we
+  # conceivable could come back wi "stable-ordered by nothing"
+  # not confient enough in the parser yet, so punt for the time being
+  return unless $seen_main_src_cols;
+
+  my $main_src_fixed_cols_from_cond = [ $attrs->{where}
+    ? (
+      map
+      {
+        ( $colinfos->{$_} and $colinfos->{$_}{-source_alias} eq $attrs->{alias} )
+          ? $colinfos->{$_}{-colname}
+          : ()
+      }
+      keys %{ $self->_extract_fixed_condition_columns($attrs->{where}) }
+    )
+    : ()
+  ];
+
+  return $attrs->{result_source}->_identifying_column_set([
+    keys %$seen_main_src_cols,
+    @$main_src_fixed_cols_from_cond,
+  ]) ? $colinfos_to_return : ();
+}
+
+# Attempts to flatten a passed in SQLA condition as much as possible towards
+# a plain hashref, *without* altering its semantics. Required by
+# create/populate being able to extract definitive conditions from preexisting
+# resultset {where} stacks
+#
+# FIXME - while relatively robust, this is still imperfect, one of the first
+# things to tackle with DQ
+sub _collapse_cond {
+  my ($self, $where, $where_is_anded_array) = @_;
+
+  my $fin;
+
+  if (! $where) {
+    return;
+  }
+  elsif ($where_is_anded_array or ref $where eq 'HASH') {
+
+    my @pairs;
+
+    my @pieces = $where_is_anded_array ? @$where : $where;
+    while (@pieces) {
+      my $chunk = shift @pieces;
+
+      if (ref $chunk eq 'HASH') {
+        push @pairs, map { $_ => $chunk->{$_} } sort keys %$chunk;
+      }
+      elsif (ref $chunk eq 'ARRAY') {
+        push @pairs, -or => $chunk
+          if @$chunk;
+      }
+      elsif ( ! length ref $chunk) {
+        push @pairs, $chunk, shift @pieces;
+      }
+      else {
+        push @pairs, '', $chunk;
+      }
+    }
+
+    return unless @pairs;
+
+    my @conds = $self->_collapse_cond_unroll_pairs(\@pairs)
+      or return;
+
+    # Consolidate various @conds back into something more compact
+    for my $c (@conds) {
+      if (ref $c ne 'HASH') {
+        push @{$fin->{-and}}, $c;
+      }
+      else {
+        for my $col (sort keys %$c) {
+
+          # consolidate all -and nodes
+          if ($col =~ /^\-and$/i) {
+            push @{$fin->{-and}},
+              ref $c->{$col} eq 'ARRAY' ? @{$c->{$col}}
+            : ref $c->{$col} eq 'HASH' ? %{$c->{$col}}
+            : { $col => $c->{$col} }
+            ;
+          }
+          elsif ($col =~ /^\-/) {
+            push @{$fin->{-and}}, { $col => $c->{$col} };
+          }
+          elsif (exists $fin->{$col}) {
+            $fin->{$col} = [ -and => map {
+              (ref $_ eq 'ARRAY' and ($_->[0]||'') =~ /^\-and$/i )
+                ? @{$_}[1..$#$_]
+                : $_
+              ;
+            } ($fin->{$col}, $c->{$col}) ];
+          }
+          else {
+            $fin->{$col} = $c->{$col};
+          }
+        }
+      }
+    }
+  }
+  elsif (ref $where eq 'ARRAY') {
+    # we are always at top-level here, it is safe to dump empty *standalone* pieces
+    my $fin_idx;
+
+    for (my $i = 0; $i <= $#$where; $i++ ) {
+
+      my $logic_mod = lc ( ($where->[$i] =~ /^(\-(?:and|or))$/i)[0] || '' );
+
+      if ($logic_mod) {
+        $i++;
+        $self->throw_exception("Unsupported top-level op/arg pair: [ $logic_mod => $where->[$i] ]")
+          unless ref $where->[$i] eq 'HASH' or ref $where->[$i] eq 'ARRAY';
+
+        my $sub_elt = $self->_collapse_cond({ $logic_mod => $where->[$i] })
+          or next;
 
-  for (0 .. $#ord_cols) {
+        $fin_idx->{ "SER_" . serialize $sub_elt } = $sub_elt;
+      }
+      elsif (! length ref $where->[$i] ) {
+        my $sub_elt = $self->_collapse_cond({ @{$where}[$i, $i+1] })
+          or next;
+
+        $fin_idx->{ "COL_$where->[$i]_" . serialize $sub_elt } = $sub_elt;
+        $i++;
+      }
+      else {
+        $fin_idx->{ "SER_" . serialize $where->[$i] } = $self->_collapse_cond( $where->[$i] ) || next;
+      }
+    }
+
+    if (! $fin_idx) {
+      return;
+    }
+    elsif ( keys %$fin_idx == 1 ) {
+      $fin = (values %$fin_idx)[0];
+    }
+    else {
+      my @or;
+
+      # at this point everything is at most one level deep - unroll if needed
+      for (sort keys %$fin_idx) {
+        if ( ref $fin_idx->{$_} eq 'HASH' and keys %{$fin_idx->{$_}} == 1 ) {
+          my ($l, $r) = %{$fin_idx->{$_}};
+
+          if (
+            ref $r eq 'ARRAY'
+              and
+            (
+              ( @$r == 1 and $l =~ /^\-and$/i )
+                or
+              $l =~ /^\-or$/i
+            )
+          ) {
+            push @or, @$r
+          }
+
+          elsif (
+            ref $r eq 'HASH'
+              and
+            keys %$r == 1
+              and
+            $l =~ /^\-(?:and|or)$/i
+          ) {
+            push @or, %$r;
+          }
+
+          else {
+            push @or, $l, $r;
+          }
+        }
+        else {
+          push @or, $fin_idx->{$_};
+        }
+      }
+
+      $fin->{-or} = \@or;
+    }
+  }
+  else {
+    # not a hash not an array
+    $fin = { -and => [ $where ] };
+  }
+
+  # unroll single-element -and's
+  while (
+    $fin->{-and}
+      and
+    @{$fin->{-and}} < 2
+  ) {
+    my $and = delete $fin->{-and};
+    last if @$and == 0;
+
+    # at this point we have @$and == 1
     if (
-      ! $colinfos->{$ord_cols[$_]}
-        or
-      $colinfos->{$ord_cols[$_]}{-result_source} != $main_rsrc
+      ref $and->[0] eq 'HASH'
+        and
+      ! grep { exists $fin->{$_} } keys %{$and->[0]}
     ) {
-      $#ord_cols =  $_ - 1;
+      $fin = {
+        %$fin, %{$and->[0]}
+      };
+    }
+    else {
+      $fin->{-and} = $and;
       last;
     }
   }
 
-  # we just truncated it above
-  return unless @ord_cols;
+  # compress same-column conds found in $fin
+  for my $col ( grep { $_ !~ /^\-/ } keys %$fin ) {
+    next unless ref $fin->{$col} eq 'ARRAY' and ($fin->{$col}[0]||'') =~ /^\-and$/i;
+    my $val_bag = { map {
+      (! defined $_ )                          ? ( UNDEF => undef )
+    : ( ! length ref $_ or is_plain_value $_ ) ? ( "VAL_$_" => $_ )
+    : ( ( 'SER_' . serialize $_ ) => $_ )
+    } @{$fin->{$col}}[1 .. $#{$fin->{$col}}] };
+
+    if (keys %$val_bag == 1 ) {
+      ($fin->{$col}) = values %$val_bag;
+    }
+    else {
+      $fin->{$col} = [ -and => map { $val_bag->{$_} } sort keys %$val_bag ];
+    }
+  }
 
-  my $order_portion_ci = { map {
-    $colinfos->{$_}{-colname} => $colinfos->{$_},
-    $colinfos->{$_}{-fq_colname} => $colinfos->{$_},
-  } @ord_cols };
+  return keys %$fin ? $fin : ();
+}
 
-  # since all we check here are the start of the order_by belonging to the
-  # top level $rsrc, a present identifying set will mean that the resultset
-  # is ordered by its leftmost table in a stable manner
-  #
-  # RV of _identifying_column_set contains unqualified names only
-  my $unqualified_idset = $main_rsrc->_identifying_column_set({
-    ( $where ? %{
-      $self->_resolve_column_info(
-        $main_rsrc, $self->_extract_fixed_condition_columns($where)
-      )
-    } : () ),
-    %$order_portion_ci
-  }) or return;
-
-  my $ret_info;
-  my %unqualified_idcols_from_order = map {
-    $order_portion_ci->{$_} ? ( $_ => $order_portion_ci->{$_} ) : ()
-  } @$unqualified_idset;
-
-  # extra optimization - cut the order_by at the end of the identifying set
-  # (just in case the user was stupid and overlooked the obvious)
-  for my $i (0 .. $#ord_cols) {
-    my $col = $ord_cols[$i];
-    my $unqualified_colname = $order_portion_ci->{$col}{-colname};
-    $ret_info->{$col} = { %{$order_portion_ci->{$col}}, -idx_in_order_subset => $i };
-    delete $unqualified_idcols_from_order{$ret_info->{$col}{-colname}};
-
-    # we didn't reach the end of the identifying portion yet
-    return $ret_info unless keys %unqualified_idcols_from_order;
-  }
-
-  die 'How did we get here...';
+sub _collapse_cond_unroll_pairs {
+  my ($self, $pairs) = @_;
+
+  my @conds;
+
+  while (@$pairs) {
+    my ($lhs, $rhs) = splice @$pairs, 0, 2;
+
+    if ($lhs eq '') {
+      push @conds, $self->_collapse_cond($rhs);
+    }
+    elsif ( $lhs =~ /^\-and$/i ) {
+      push @conds, $self->_collapse_cond($rhs, (ref $rhs eq 'ARRAY'));
+    }
+    elsif ( $lhs =~ /^\-or$/i ) {
+      push @conds, $self->_collapse_cond(
+        (ref $rhs eq 'HASH') ? [ map { $_ => $rhs->{$_} } sort keys %$rhs ] : $rhs
+      );
+    }
+    else {
+      if (ref $rhs eq 'HASH' and ! keys %$rhs) {
+        # FIXME - SQLA seems to be doing... nothing...?
+      }
+      elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-ident}) {
+        push @conds, { $lhs => { '=', $rhs } };
+      }
+      elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-value} and is_plain_value $rhs->{-value}) {
+        push @conds, { $lhs => $rhs->{-value} };
+      }
+      elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) {
+        if( is_literal_value $rhs->{'='}) {
+          push @conds, { $lhs => $rhs };
+        }
+        else {
+          for my $p ($self->_collapse_cond_unroll_pairs([ $lhs => $rhs->{'='} ])) {
+
+            # extra sanity check
+            if (keys %$p > 1) {
+              require Data::Dumper::Concise;
+              local $Data::Dumper::Deepcopy = 1;
+              $self->throw_exception(
+                "Internal error: unexpected collapse unroll:"
+              . Data::Dumper::Concise::Dumper { in => { $lhs => $rhs }, out => $p }
+              );
+            }
+
+            my ($l, $r) = %$p;
+
+            push @conds, ( ! length ref $r or is_plain_value($r) )
+              ? { $l => $r }
+              : { $l => { '=' => $r } }
+            ;
+          }
+        }
+      }
+      elsif (ref $rhs eq 'ARRAY') {
+        # some of these conditionals encounter multi-values - roll them out using
+        # an unshift, which will cause extra looping in the while{} above
+        if (! @$rhs ) {
+          push @conds, { $lhs => [] };
+        }
+        elsif ( ($rhs->[0]||'') =~ /^\-(?:and|or)$/i ) {
+          $self->throw_exception("Value modifier not followed by any values: $lhs => [ $rhs->[0] ] ")
+            if  @$rhs == 1;
+
+          if( $rhs->[0] =~ /^\-and$/i ) {
+            unshift @$pairs, map { $lhs => $_ } @{$rhs}[1..$#$rhs];
+          }
+          # if not an AND then it's an OR
+          elsif(@$rhs == 2) {
+            unshift @$pairs, $lhs => $rhs->[1];
+          }
+          else {
+            push @conds, { $lhs => [ @{$rhs}[1..$#$rhs] ] };
+          }
+        }
+        elsif (@$rhs == 1) {
+          unshift @$pairs, $lhs => $rhs->[0];
+        }
+        else {
+          push @conds, { $lhs => $rhs };
+        }
+      }
+      # unroll func + { -value => ... }
+      elsif (
+        ref $rhs eq 'HASH'
+          and
+        ( my ($subop) = keys %$rhs ) == 1
+          and
+        length ref ((values %$rhs)[0])
+          and
+        my $vref = is_plain_value( (values %$rhs)[0] )
+      ) {
+        push @conds, { $lhs => { $subop => $$vref } }
+      }
+      else {
+        push @conds, { $lhs => $rhs };
+      }
+    }
+  }
+
+  return @conds;
 }
 
-# returns an arrayref of column names which *definitely* have some
-# sort of non-nullable equality requested in the given condition
-# specification. This is used to figure out if a resultset is
-# constrained to a column which is part of a unique constraint,
-# which in turn allows us to better predict how ordering will behave
-# etc.
+# Analyzes a given condition and attempts to extract all columns
+# with a definitive fixed-condition criteria. Returns a hashref
+# of k/v pairs suitable to be passed to set_columns(), with a
+# MAJOR CAVEAT - multi-value (contradictory) equalities are still
+# represented as a reference to the UNRESOVABLE_CONDITION constant
+# The reason we do this is that some codepaths only care about the
+# codition being stable, as opposed to actually making sense
+#
+# The normal mode is used to figure out if a resultset is constrained
+# to a column which is part of a unique constraint, which in turn
+# allows us to better predict how ordering will behave etc.
+#
+# With the optional "consider_nulls" boolean argument, the function
+# is instead used to infer inambiguous values from conditions
+# (e.g. the inheritance of resultset conditions on new_result)
 #
-# this is a rudimentary, incomplete, and error-prone extractor
-# however this is OK - it is conservative, and if we can not find
-# something that is in fact there - the stack will recover gracefully
-# Also - DQ and the mst it rode in on will save us all RSN!!!
 sub _extract_fixed_condition_columns {
-  my ($self, $where) = @_;
+  my ($self, $where, $consider_nulls) = @_;
+  my $where_hash = $self->_collapse_cond($_[1]);
 
-  return unless ref $where eq 'HASH';
+  my $res = {};
+  my ($c, $v);
+  for $c (keys %$where_hash) {
+    my $vals;
 
-  my @cols;
-  for my $lhs (keys %$where) {
-    if ($lhs =~ /^\-and$/i) {
-      push @cols, ref $where->{$lhs} eq 'ARRAY'
-        ? ( map { @{ $self->_extract_fixed_condition_columns($_) } } @{$where->{$lhs}} )
-        : @{ $self->_extract_fixed_condition_columns($where->{$lhs}) }
-      ;
+    if (!defined ($v = $where_hash->{$c}) ) {
+      $vals->{UNDEF} = $v if $consider_nulls
+    }
+    elsif (
+      ref $v eq 'HASH'
+        and
+      keys %$v == 1
+    ) {
+      if (exists $v->{-value}) {
+        if (defined $v->{-value}) {
+          $vals->{"VAL_$v->{-value}"} = $v->{-value}
+        }
+        elsif( $consider_nulls ) {
+          $vals->{UNDEF} = $v->{-value};
+        }
+      }
+      # do not need to check for plain values - _collapse_cond did it for us
+      elsif(length ref $v->{'='} and is_literal_value($v->{'='}) ) {
+        $vals->{ 'SER_' . serialize $v->{'='} } = $v->{'='};
+      }
+    }
+    elsif (
+      ! length ref $v
+        or
+      is_plain_value ($v)
+    ) {
+      $vals->{"VAL_$v"} = $v;
+    }
+    elsif (ref $v eq 'ARRAY' and ($v->[0]||'') eq '-and') {
+      for ( @{$v}[1..$#$v] ) {
+        my $subval = $self->_extract_fixed_condition_columns({ $c => $_ }, 'consider nulls');  # always fish nulls out on recursion
+        next unless exists $subval->{$c};  # didn't find anything
+        $vals->{
+          ! defined $subval->{$c}                                        ? 'UNDEF'
+        : ( ! length ref $subval->{$c} or is_plain_value $subval->{$c} ) ? "VAL_$subval->{$c}"
+        : ( 'SER_' . serialize $subval->{$c} )
+        } = $subval->{$c};
+      }
     }
-    elsif ($lhs !~ /^\-/) {
-      my $val = $where->{$lhs};
 
-      push @cols, $lhs if (defined $val and (
-        ! ref $val
-          or
-        (ref $val eq 'HASH' and keys %$val == 1 and defined $val->{'='})
-      ));
+    if (keys %$vals == 1) {
+      ($res->{$c}) = (values %$vals)
+        unless !$consider_nulls and exists $vals->{UNDEF};
+    }
+    elsif (keys %$vals > 1) {
+      $res->{$c} = UNRESOLVABLE_CONDITION;
     }
   }
-  return \@cols;
+
+  $res;
 }
 
 1;
@@ -1,12 +1,33 @@
 package DBIx::Class::Storage::Statistics;
+
 use strict;
 use warnings;
 
-use base qw/DBIx::Class/;
-use IO::File;
-use namespace::clean;
+use DBIx::Class::_Util qw(sigwarn_silencer qsub);
+use IO::Handle ();
+
+# DO NOT edit away without talking to riba first, he will just put it back
+# BEGIN pre-Moo2 import block
+BEGIN {
+  my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all};
+
+  local $ENV{PERL_STRICTURES_EXTRA} = 0;
+  # load all of these now, so that lazy-loading does not escape
+  # the current PERL_STRICTURES_EXTRA setting
+  require Sub::Quote;
+  require Sub::Defer;
+  require Moo;
+  require Moo::Object;
+  require Method::Generate::Accessor;
+  require Method::Generate::Constructor;
+
+  Moo->import;
+  ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} );
+}
+# END pre-Moo2 import block
 
-__PACKAGE__->mk_group_accessors(simple => qw/callback _debugfh silence/);
+extends 'DBIx::Class';
+use namespace::clean;
 
 =head1 NAME
 
@@ -26,55 +47,65 @@ for collecting the statistics as discussed in L<DBIx::Class::Manual::Cookbook>.
 
 =head1 METHODS
 
-=cut
-
 =head2 new
 
 Returns a new L<DBIx::Class::Storage::Statistics> object.
 
-=cut
-sub new {
-  my $self = {};
-  bless $self, (ref($_[0]) || $_[0]);
-
-  return $self;
-}
-
 =head2 debugfh
 
 Sets or retrieves the filehandle used for trace/debug output.  This should
-be an IO::Handle compatible object (only the C<print> method is used). Initially
-should be set to STDERR - although see information on the
-L<DBIC_TRACE> environment variable.
+be an L<IO::Handle> compatible object (only the
+L<< print|IO::Handle/METHODS >> method is used). By
+default it is initially set to STDERR - although see discussion of the
+L<DBIC_TRACE|DBIx::Class::Storage/DBIC_TRACE> environment variable.
 
-As getter it will lazily open a filehandle for you if one is not already set.
+Invoked as a getter it will lazily open a filehandle and set it to
+L<< autoflush|perlvar/HANDLE->autoflush( EXPR ) >> (if one is not
+already set).
 
 =cut
 
+# FIXME - there ought to be a way to fold this into _debugfh itself
+# having the undef re-trigger the builder (or better yet a default
+# which can be folded in as a qsub)
 sub debugfh {
   my $self = shift;
 
-  if (@_) {
-    $self->_debugfh($_[0]);
-  } elsif (!defined($self->_debugfh())) {
-    my $fh;
-    my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG}
-                  || $ENV{DBIC_TRACE};
-    if (defined($debug_env) && ($debug_env =~ /=(.+)$/)) {
-      $fh = IO::File->new($1, 'a')
-        or die("Cannot open trace file $1");
-    } else {
-      $fh = IO::File->new('>&STDERR')
-        or die('Duplication of STDERR for debug output failed (perhaps your STDERR is closed?)');
-    }
-
-    $fh->autoflush();
-    $self->_debugfh($fh);
+  return $self->_debugfh(@_) if @_;
+  $self->_debugfh || $self->_build_debugfh;
+}
+
+has _debugfh => (
+  is => 'rw',
+  lazy => 1,
+  trigger => qsub '$_[0]->_defaulted_to_stderr(undef)',
+  builder => '_build_debugfh',
+);
+
+sub _build_debugfh {
+  my $fh;
+
+  my $debug_env = $ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} || $ENV{DBIC_TRACE};
+
+  if (defined($debug_env) and ($debug_env =~ /=(.+)$/)) {
+    open ($fh, '>>', $1)
+      or die("Cannot open trace file $1: $!\n");
+  }
+  else {
+    open ($fh, '>&STDERR')
+      or die("Duplication of STDERR for debug output failed (perhaps your STDERR is closed?): $!\n");
+    $_[0]->_defaulted_to_stderr(1);
   }
 
-  $self->_debugfh;
+  $fh->autoflush(1);
+
+  $fh;
 }
 
+has [qw(_defaulted_to_stderr silence callback)] => (
+  is => 'rw',
+);
+
 =head2 print
 
 Prints the specified string to our debugging filehandle.  Provided to save our
@@ -86,7 +117,13 @@ sub print {
 
   return if $self->silence;
 
-  $self->debugfh->print($msg);
+  my $fh = $self->debugfh;
+
+  # not using 'no warnings' here because all of this can change at runtime
+  local $SIG{__WARN__} = sigwarn_silencer(qr/^Wide character in print/)
+    if $self->_defaulted_to_stderr;
+
+  $fh->print($msg);
 }
 
 =head2 silence
@@ -196,18 +233,22 @@ sub query_start {
 Called when a query finishes executing.  Has the same arguments as query_start.
 
 =cut
+
 sub query_end {
   my ($self, $string) = @_;
 }
 
-1;
-
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
+
+1;
@@ -154,13 +154,15 @@ the transaction is rolled back, via L<DBIx::Class::Storage/txn_rollback>
 
 L<DBIx::Class::Schema/txn_scope_guard>.
 
-=head1 AUTHOR
+L<Scope::Guard> by chocolateboy (inspiration for this module)
 
-Ash Berlin, 2008.
+=head1 FURTHER QUESTIONS?
 
-Inspired by L<Scope::Guard> by chocolateboy.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-This module is free software. It may be used, redistributed and/or modified
-under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -227,6 +227,7 @@ sub txn_commit {
     $self->debugobj->txn_commit() if $self->debug;
     $self->_exec_txn_commit;
     $self->{transaction_depth}--;
+    $self->savepoints([]);
   }
   elsif($self->transaction_depth > 1) {
     $self->{transaction_depth}--;
@@ -252,6 +253,7 @@ sub txn_rollback {
     $self->debugobj->txn_rollback() if $self->debug;
     $self->_exec_txn_rollback;
     $self->{transaction_depth}--;
+    $self->savepoints([]);
   }
   elsif ($self->transaction_depth > 1) {
     $self->{transaction_depth}--;
@@ -434,10 +436,10 @@ shell environment.
 
 =head2 debugfh
 
-Set or retrieve the filehandle used for trace/debug output.  This should be
-an IO::Handle compatible object (only the C<print> method is used.  Initially
-set to be STDERR - although see information on the
-L<DBIC_TRACE> environment variable.
+An opportunistic proxy to L<< ->debugobj->debugfh(@_)
+|DBIx::Class::Storage::Statistics/debugfh >>
+If the currently set L</debugobj> does not have a L</debugfh> method, caling
+this is a no-op.
 
 =cut
 
@@ -634,7 +636,6 @@ filename the file is read with L<Config::Any> and the results are
 used as the configuration for tracing.  See L<SQL::Abstract::Tree/new>
 for what that structure should look like.
 
-
 =head2 DBIX_CLASS_STORAGE_DBI_DEBUG
 
 Old name for DBIC_TRACE
@@ -644,13 +645,16 @@ Old name for DBIC_TRACE
 L<DBIx::Class::Storage::DBI> - reference storage implementation using
 SQL::Abstract and DBI.
 
-=head1 AUTHOR AND CONTRIBUTORS
+=head1 FURTHER QUESTIONS?
 
-See L<AUTHOR|DBIx::Class/AUTHOR> and L<CONTRIBUTORS|DBIx::Class/CONTRIBUTORS> in DBIx::Class
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -162,13 +162,16 @@ sub _is_utf8_column {
   return ($_[0]->utf8_columns || {})->{$_[1]};
 }
 
-=head1 AUTHORS
+=head1 FURTHER QUESTIONS?
 
-See L<DBIx::Class/CONTRIBUTORS>.
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-=head1 LICENSE
+=head1 COPYRIGHT AND LICENSE
 
-You may distribute this code under the same terms as Perl itself.
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
 
 =cut
 
@@ -17,23 +17,23 @@ BEGIN {
     # but of course
     BROKEN_FORK => ($^O eq 'MSWin32') ? 1 : 0,
 
+    BROKEN_GOTO => ($] < '5.008003') ? 1 : 0,
+
     HAS_ITHREADS => $Config{useithreads} ? 1 : 0,
 
     # ::Runmode would only be loaded by DBICTest, which in turn implies t/
     DBICTEST => eval { DBICTest::RunMode->is_author } ? 1 : 0,
 
     # During 5.13 dev cycle HELEMs started to leak on copy
-    PEEPEENESS =>
-      # request for all tests would force "non-leaky" illusion and vice-versa
-      defined $ENV{DBICTEST_ALL_LEAKS}                                              ? !$ENV{DBICTEST_ALL_LEAKS}
-      # otherwise confess that this perl is busted ONLY on smokers
-    : eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006)  ? 1
-      # otherwise we are good
-                                                                                    : 0
-    ,
+    # add an escape for these perls ON SMOKERS - a user will still get death
+    PEEPEENESS => ( eval { DBICTest::RunMode->is_smoker } && ($] >= 5.013005 and $] <= 5.013006) ),
+
+    SHUFFLE_UNORDERED_RESULTSETS => $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} ? 1 : 0,
 
     ASSERT_NO_INTERNAL_WANTARRAY => $ENV{DBIC_ASSERT_NO_INTERNAL_WANTARRAY} ? 1 : 0,
 
+    ASSERT_NO_INTERNAL_INDIRECT_CALLS => $ENV{DBIC_ASSERT_NO_INTERNAL_INDIRECT_CALLS} ? 1 : 0,
+
     IV_SIZE => $Config{ivsize},
 
     OS_NAME => $^O,
@@ -55,9 +55,35 @@ use DBIx::Class::Carp '^DBIx::Class|^DBICTest';
 
 use Carp 'croak';
 use Scalar::Util qw(weaken blessed reftype);
+use List::Util qw(first);
+
+# DO NOT edit away without talking to riba first, he will just put it back
+# BEGIN pre-Moo2 import block
+BEGIN {
+  my $initial_fatal_bits = (${^WARNING_BITS}||'') & $warnings::DeadBits{all};
+
+  local $ENV{PERL_STRICTURES_EXTRA} = 0;
+  # load all of these now, so that lazy-loading does not escape
+  # the current PERL_STRICTURES_EXTRA setting
+  require Sub::Quote;
+  require Sub::Defer;
+
+  Sub::Quote->import('quote_sub');
+  ${^WARNING_BITS} &= ( $initial_fatal_bits | ~ $warnings::DeadBits{all} );
+}
+sub qsub ($) { goto &quote_sub }  # no point depping on new Moo just for this
+# END pre-Moo2 import block
 
 use base 'Exporter';
-our @EXPORT_OK = qw(sigwarn_silencer modver_gt_or_eq fail_on_internal_wantarray refcount hrefaddr is_exception);
+our @EXPORT_OK = qw(
+  sigwarn_silencer modver_gt_or_eq
+  fail_on_internal_wantarray fail_on_internal_call
+  refdesc refcount hrefaddr is_exception
+  quote_sub qsub perlstring serialize
+  UNRESOLVABLE_CONDITION
+);
+
+use constant UNRESOLVABLE_CONDITION => \ '1 = 0';
 
 sub sigwarn_silencer ($) {
   my $pattern = shift;
@@ -69,7 +95,21 @@ sub sigwarn_silencer ($) {
   return sub { &$orig_sig_warn unless $_[0] =~ $pattern };
 }
 
-sub hrefaddr ($) { sprintf '0x%x', &Scalar::Util::refaddr }
+sub perlstring ($) { q{"}. quotemeta( shift ). q{"} };
+
+sub hrefaddr ($) { sprintf '0x%x', &Scalar::Util::refaddr||0 }
+
+sub refdesc ($) {
+  croak "Expecting a reference" if ! length ref $_[0];
+
+  # be careful not to trigger stringification,
+  # reuse @_ as a scratch-pad
+  sprintf '%s%s(0x%x)',
+    ( defined( $_[1] = blessed $_[0]) ? "$_[1]=" : '' ),
+    reftype $_[0],
+    Scalar::Util::refaddr($_[0]),
+  ;
+}
 
 sub refcount ($) {
   croak "Expecting a reference" if ! length ref $_[0];
@@ -80,6 +120,12 @@ sub refcount ($) {
   B::svref_2object($_[0])->REFCNT;
 }
 
+sub serialize ($) {
+  require Storable;
+  local $Storable::canonical = 1;
+  Storable::nfreeze($_[0]);
+}
+
 sub is_exception ($) {
   my $e = $_[0];
 
@@ -101,8 +147,8 @@ sub is_exception ($) {
   if (defined $suberror) {
     if (length (my $class = blessed($e) )) {
       carp_unique( sprintf(
-        'External exception object %s=%s(%s) implements partial (broken) '
-      . 'overloading preventing it from being used in simple ($x eq $y) '
+        'External exception class %s implements partial (broken) overloading '
+      . 'preventing its instances from being used in simple ($x eq $y) '
       . 'comparisons. Given Perl\'s "globally cooperative" exception '
       . 'handling this type of brokenness is extremely dangerous on '
       . 'exception objects, as it may (and often does) result in silent '
@@ -114,8 +160,6 @@ sub is_exception ($) {
       . 'is saner application-wide. What follows is the actual error text '
       . "as generated by Perl itself:\n\n%s\n ",
         $class,
-        reftype $e,
-        hrefaddr $e,
         $class,
         'http://v.gd/DBIC_overload_tempfix/',
         $suberror,
@@ -146,6 +190,9 @@ sub modver_gt_or_eq ($$) {
   local $SIG{__WARN__} = sigwarn_silencer( qr/\Qisn't numeric in subroutine entry/ )
     if SPURIOUS_VERSION_CHECK_WARNINGS;
 
+  croak "$mod does not seem to provide a version (perhaps it never loaded)"
+    unless $mod->VERSION;
+
   local $@;
   eval { $mod->VERSION($ver) } ? 1 : 0;
 }
@@ -153,7 +200,7 @@ sub modver_gt_or_eq ($$) {
 {
   my $list_ctx_ok_stack_marker;
 
-  sub fail_on_internal_wantarray {
+  sub fail_on_internal_wantarray () {
     return if $list_ctx_ok_stack_marker;
 
     if (! defined wantarray) {
@@ -176,14 +223,23 @@ sub modver_gt_or_eq ($$) {
       $cf++;
     }
 
+    my ($fr, $want, $argdesc);
+    {
+      package DB;
+      $fr = [ caller($cf) ];
+      $want = ( caller($cf-1) )[5];
+      $argdesc = ref $DB::args[0]
+        ? DBIx::Class::_Util::refdesc($DB::args[0])
+        : 'non '
+      ;
+    };
+
     if (
-      (caller($cf))[0] =~ /^(?:DBIx::Class|DBICx::)/
+      $want and $fr->[0] =~ /^(?:DBIx::Class|DBICx::)/
     ) {
-      my $obj = shift;
-
       DBIx::Class::Exception->throw( sprintf (
-        "Improper use of %s(%s) instance in list context at %s line %d\n\n\tStacktrace starts",
-        ref($obj), hrefaddr($obj), (caller($cf))[1,2]
+        "Improper use of %s instance in list context at %s line %d\n\n    Stacktrace starts",
+        $argdesc, @{$fr}[1,2]
       ), 'with_stacktrace');
     }
 
@@ -193,4 +249,33 @@ sub modver_gt_or_eq ($$) {
   }
 }
 
+sub fail_on_internal_call {
+  my ($fr, $argdesc);
+  {
+    package DB;
+    $fr = [ caller(1) ];
+    $argdesc = ref $DB::args[0]
+      ? DBIx::Class::_Util::refdesc($DB::args[0])
+      : undef
+    ;
+  };
+
+  if (
+    $argdesc
+      and
+    $fr->[0] =~ /^(?:DBIx::Class|DBICx::)/
+      and
+    $fr->[1] !~ /\b(?:CDBICompat|ResultSetProxy)\b/  # no point touching there
+  ) {
+    DBIx::Class::Exception->throw( sprintf (
+      "Illegal internal call of indirect proxy-method %s() with argument %s: examine the last lines of the proxy method deparse below to determine what to call directly instead at %s on line %d\n\n%s\n\n    Stacktrace starts",
+      $fr->[3], $argdesc, @{$fr}[1,2], ( $fr->[6] || do {
+        require B::Deparse;
+        no strict 'refs';
+        B::Deparse->new->coderef2text(\&{$fr->[3]})
+      }),
+    ), 'with_stacktrace');
+  }
+}
+
 1;
@@ -11,7 +11,7 @@ our $VERSION;
 # $VERSION declaration must stay up here, ahead of any other package
 # declarations, as to not confuse various modules attempting to determine
 # this ones version, whether that be s.c.o. or Module::Metadata, etc
-$VERSION = '0.08270';
+$VERSION = '0.082800';
 
 $VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases
 
@@ -57,12 +57,16 @@ sub _attr_cache {
   };
 }
 
+# *DO NOT* change this URL nor the identically named =head1 below
+# it is linked throughout the ecosystem
+sub DBIx::Class::_ENV_::HELP_URL () {
+  'http://p3rl.org/DBIx::Class#GETTING_HELP/SUPPORT'
+}
+
 1;
 
 __END__
 
-=encoding UTF-8
-
 =head1 NAME
 
 DBIx::Class - Extensible and flexible object <-> relational mapper.
@@ -74,13 +78,15 @@ To get the most out of DBIx::Class with the least confusion it is strongly
 recommended to read (at the very least) the
 L<Manuals|DBIx::Class::Manual::DocMap/Manuals> in the order presented there.
 
-=head1 HOW TO GET HELP
+=cut
 
-Due to the complexity of its problem domain, DBIx::Class is a relatively
+=head1 GETTING HELP/SUPPORT
+
+Due to the sheer size of its problem domain, DBIx::Class is a relatively
 complex framework. After you start using DBIx::Class questions will inevitably
 arise. If you are stuck with a problem or have doubts about a particular
-approach do not hesitate to contact the community with your questions. The
-list below is sorted by "fastest response time":
+approach do not hesitate to contact us via any of the following options (the
+list is sorted by "fastest response time"):
 
 =over
 
@@ -249,8 +255,10 @@ Contributions are always welcome, in all usable forms (we especially
 welcome documentation improvements). The delivery methods include git-
 or unified-diff formatted patches, GitHub pull requests, or plain bug
 reports either via RT or the Mailing list. Contributors are generally
-granted full access to the official repository after their first patch
-passes successful review.
+granted access to the official repository after their first several
+patches pass successful review. Don't hesitate to
+L<contact|/GETTING HELP/SUPPORT> either of the L</CAT HERDERS> with
+any further questions you may have.
 
 =for comment
 FIXME: Getty, frew and jnap need to get off their asses and finish the contrib section so we can link it here ;)
@@ -275,279 +283,44 @@ accessible at the following locations:
 
 =back
 
-=head1 AUTHOR
-
-mst: Matt S. Trout <mst@shadowcatsystems.co.uk>
-
-(I mostly consider myself "project founder" these days but the AUTHOR heading
-is traditional :)
-
-=head1 CONTRIBUTORS
-
-abraxxa: Alexander Hartmaier <abraxxa@cpan.org>
-
-acca: Alexander Kuznetsov <acca@cpan.org>
-
-aherzog: Adam Herzog <adam@herzogdesigns.com>
-
-Alexander Keusch <cpan@keusch.at>
-
-alexrj: Alessandro Ranellucci <aar@cpan.org>
-
-alnewkirk: Al Newkirk <we@ana.im>
-
-amiri: Amiri Barksdale <amiri@metalabel.com>
-
-amoore: Andrew Moore <amoore@cpan.org>
-
-andrewalker: Andre Walker <andre@andrewalker.net>
-
-andyg: Andy Grundman <andy@hybridized.org>
-
-ank: Andres Kievsky
-
-arc: Aaron Crane <arc@cpan.org>
-
-arcanez: Justin Hunter <justin.d.hunter@gmail.com>
-
-ash: Ash Berlin <ash@cpan.org>
-
-bert: Norbert Csongrádi <bert@cpan.org>
-
-blblack: Brandon L. Black <blblack@gmail.com>
-
-bluefeet: Aran Deltac <bluefeet@cpan.org>
-
-bphillips: Brian Phillips <bphillips@cpan.org>
-
-boghead: Bryan Beeley <cpan@beeley.org>
-
-brd: Brad Davis <brd@FreeBSD.org>
-
-bricas: Brian Cassidy <bricas@cpan.org>
-
-brunov: Bruno Vecchi <vecchi.b@gmail.com>
-
-caelum: Rafael Kitover <rkitover@cpan.org>
-
-caldrin: Maik Hentsche <maik.hentsche@amd.com>
-
-castaway: Jess Robinson
-
-claco: Christopher H. Laco
-
-clkao: CL Kao
-
-da5id: David Jack Olrik <djo@cpan.org>
-
-dariusj: Darius Jokilehto <dariusjokilehto@yahoo.co.uk>
-
-davewood: David Schmidt <davewood@gmx.at>
-
-daxim: Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>
-
-debolaz: Anders Nor Berle <berle@cpan.org>
-
-dew: Dan Thomas <dan@godders.org>
-
-dkubb: Dan Kubb <dan.kubb-cpan@onautopilot.com>
-
-dnm: Justin Wheeler <jwheeler@datademons.com>
-
-dpetrov: Dimitar Petrov <mitakaa@gmail.com>
-
-dwc: Daniel Westermann-Clark <danieltwc@cpan.org>
-
-dyfrgi: Michael Leuchtenburg <michael@slashhome.org>
-
-edenc: Eden Cardim <edencardim@gmail.com>
-
-ether: Karen Etheridge <ether@cpan.org>
-
-felliott: Fitz Elliott <fitz.elliott@gmail.com>
-
-freetime: Bill Moseley <moseley@hank.org>
-
-frew: Arthur Axel "fREW" Schmidt <frioux@gmail.com>
-
-goraxe: Gordon Irving <goraxe@cpan.org>
-
-gphat: Cory G Watson <gphat@cpan.org>
-
-Grant Street Group L<http://www.grantstreet.com/>
-
-groditi: Guillermo Roditi <groditi@cpan.org>
-
-Haarg: Graham Knop <haarg@haarg.org>
-
-hobbs: Andrew Rodland <arodland@cpan.org>
-
-ilmari: Dagfinn Ilmari MannsE<aring>ker <ilmari@ilmari.org>
-
-initself: Mike Baas <mike@initselftech.com>
-
-ironcamel: Naveed Massjouni <naveedm9@gmail.com>
-
-jawnsy: Jonathan Yu <jawnsy@cpan.org>
-
-jasonmay: Jason May <jason.a.may@gmail.com>
-
-jesper: Jesper Krogh
-
-jgoulah: John Goulah <jgoulah@cpan.org>
-
-jguenther: Justin Guenther <jguenther@cpan.org>
-
-jhannah: Jay Hannah <jay@jays.net>
+=head1 AUTHORS
 
-jmac: Jason McIntosh <jmac@appleseed-sc.com>
+Even though a large portion of the source I<appears> to be written by just a
+handful of people, this library continues to remain a collaborative effort -
+perhaps one of the most successful such projects on L<CPAN|http://cpan.org>.
+It is important to remember that ideas do not always result in a direct code
+contribution, but deserve acknowledgement just the same. Time and time again
+the seemingly most insignificant questions and suggestions have been shown
+to catalyze monumental improvements in consistency, accuracy and performance.
 
-jnapiorkowski: John Napiorkowski <jjn1056@yahoo.com>
+=for comment this line is replaced with the author list at dist-building time
 
-jon: Jon Schutz <jjschutz@cpan.org>
+The canonical source of authors and their details is the F<AUTHORS> file at
+the root of this distribution (or repository). The canonical source of
+per-line authorship is the L<git repository|/HOW TO CONTRIBUTE> history
+itself.
 
-jshirley: J. Shirley <jshirley@gmail.com>
+=head1 CAT HERDERS
 
-kaare: Kaare Rasmussen
+The fine folks nudging the project in a particular direction:
 
-konobi: Scott McWhirter
-
-littlesavage: Alexey Illarionov <littlesavage@orionet.ru>
-
-lukes: Luke Saunders <luke.saunders@gmail.com>
-
-marcus: Marcus Ramberg <mramberg@cpan.org>
-
-mattlaw: Matt Lawrence
-
-mattp: Matt Phillips <mattp@cpan.org>
-
-michaelr: Michael Reddick <michael.reddick@gmail.com>
-
-milki: Jonathan Chu <milki@rescomp.berkeley.edu>
-
-mithaldu: Christian Walde <walde.christian@gmail.com>
-
-mjemmeson: Michael Jemmeson <michael.jemmeson@gmail.com>
-
-mstratman: Mark A. Stratman <stratman@gmail.com>
-
-ned: Neil de Carteret
-
-nigel: Nigel Metheringham <nigelm@cpan.org>
-
-ningu: David Kamholz <dkamholz@cpan.org>
-
-Nniuq: Ron "Quinn" Straight" <quinnfazigu@gmail.org>
-
-norbi: Norbert Buchmuller <norbi@nix.hu>
-
-nuba: Nuba Princigalli <nuba@cpan.org>
-
-Numa: Dan Sully <daniel@cpan.org>
-
-ovid: Curtis "Ovid" Poe <ovid@cpan.org>
-
-oyse: E<Oslash>ystein Torget <oystein.torget@dnv.com>
-
-paulm: Paul Makepeace
-
-penguin: K J Cheetham
-
-perigrin: Chris Prather <chris@prather.org>
-
-peter: Peter Collingbourne <peter@pcc.me.uk>
-
-Peter Siklósi <einon@einon.hu>
-
-Peter Valdemar ME<oslash>rch <peter@morch.com>
-
-phaylon: Robert Sedlacek <phaylon@dunkelheit.at>
-
-plu: Johannes Plunien <plu@cpan.org>
-
-Possum: Daniel LeWarne <possum@cpan.org>
-
-quicksilver: Jules Bean
-
-rafl: Florian Ragwitz <rafl@debian.org>
-
-rainboxx: Matthias Dietrich <perl@rb.ly>
-
-rbo: Robert Bohne <rbo@cpan.org>
-
-rbuels: Robert Buels <rmb32@cornell.edu>
-
-rdj: Ryan D Johnson <ryan@innerfence.com>
-
-ribasushi: Peter Rabbitson <ribasushi@cpan.org>
-
-rjbs: Ricardo Signes <rjbs@cpan.org>
-
-robkinyon: Rob Kinyon <rkinyon@cpan.org>
-
-Robert Olson <bob@rdolson.org>
-
-moltar: Roman Filippov <romanf@cpan.org>
-
-Sadrak: Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>
-
-sc_: Just Another Perl Hacker
-
-scotty: Scotty Allen <scotty@scottyallen.com>
-
-semifor: Marc Mims <marc@questright.com>
-
-SineSwiper: Brendan Byrd <bbyrd@cpan.org>
-
-solomon: Jared Johnson <jaredj@nmgi.com>
-
-spb: Stephen Bennett <stephen@freenode.net>
-
-Squeeks <squeek@cpan.org>
-
-sszabo: Stephan Szabo <sszabo@bigpanda.com>
-
-talexb: Alex Beamish <talexb@gmail.com>
-
-tamias: Ronald J Kimball <rjk@tamias.net>
-
-teejay : Aaron Trevena <teejay@cpan.org>
-
-Todd Lipcon
-
-Tom Hukins
-
-tonvoon: Ton Voon <tonvoon@cpan.org>
-
-triode: Pete Gamache <gamache@cpan.org>
-
-typester: Daisuke Murase <typester@cpan.org>
-
-victori: Victor Igumnov <victori@cpan.org>
-
-wdh: Will Hawes
-
-wesm: Wes Malone <wes@mitsi.com>
-
-willert: Sebastian Willert <willert@cpan.org>
-
-wreis: Wallace Reis <wreis@cpan.org>
-
-xenoterracide: Caleb Cushing <xenoterracide@gmail.com>
+=over
 
-yrlnry: Mark Jason Dominus <mjd@plover.com>
+B<ribasushi>: Peter Rabbitson <ribasushi@cpan.org>
+(present day maintenance and controlled evolution)
 
-zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
+B<castaway>: Jess Robinson <castaway@desert-island.me.uk>
+(lions share of the reference documentation and manuals)
 
-Zefram: Andrew Main <zefram@fysh.org>
+B<mst>: Matt S Trout <mst@shadowcat.co.uk> (project founder -
+original idea, architecture and implementation)
 
-=head1 COPYRIGHT
+=back
 
-Copyright (c) 2005 - 2011 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
-as listed above.
+=head1 COPYRIGHT AND LICENSE
 
-=head1 LICENSE
+Copyright (c) 2005 by mst, castaway, ribasushi, and other DBIx::Class
+L</AUTHORS> as listed above and in F<AUTHORS>.
 
 This library is free software and may be distributed under the same terms
-as perl itself.
+as perl5 itself. See F<LICENSE> for the complete licensing terms.
@@ -0,0 +1,656 @@
+=head1 NAME
+
+DBIx::Class - Extensible and flexible object <-> relational mapper.
+
+=head1 WHERE TO START READING
+
+See L<DBIx::Class::Manual::DocMap> for an overview of the exhaustive documentation.
+To get the most out of DBIx::Class with the least confusion it is strongly
+recommended to read (at the very least) the
+L<Manuals|DBIx::Class::Manual::DocMap/Manuals> in the order presented there.
+
+
+=cut
+
+=head1 GETTING HELP/SUPPORT
+
+Due to the sheer size of its problem domain, DBIx::Class is a relatively
+complex framework. After you start using DBIx::Class questions will inevitably
+arise. If you are stuck with a problem or have doubts about a particular
+approach do not hesitate to contact us via any of the following options (the
+list is sorted by "fastest response time"):
+
+=over
+
+=item * IRC: irc.perl.org#dbix-class
+
+=for html
+<a href="https://chat.mibbit.com/#dbix-class@irc.perl.org">(click for instant chatroom login)</a>
+
+=item * Mailing list: L<http://lists.scsys.co.uk/mailman/listinfo/dbix-class>
+
+=item * RT Bug Tracker: L<https://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class>
+
+=item * Twitter: L<https://www.twitter.com/dbix_class>
+
+=item * Web Site: L<http://www.dbix-class.org/>
+
+=back
+
+=head1 SYNOPSIS
+
+For the very impatient: L<DBIx::Class::Manual::QuickStart>
+
+This code in the next step can be generated automatically from an existing
+database, see L<dbicdump> from the distribution C<DBIx-Class-Schema-Loader>.
+
+=head2 Schema classes preparation
+
+Create a schema class called F<MyApp/Schema.pm>:
+
+  package MyApp::Schema;
+  use base qw/DBIx::Class::Schema/;
+
+  __PACKAGE__->load_namespaces();
+
+  1;
+
+Create a result class to represent artists, who have many CDs, in
+F<MyApp/Schema/Result/Artist.pm>:
+
+See L<DBIx::Class::ResultSource> for docs on defining result classes.
+
+  package MyApp::Schema::Result::Artist;
+  use base qw/DBIx::Class::Core/;
+
+  __PACKAGE__->table('artist');
+  __PACKAGE__->add_columns(qw/ artistid name /);
+  __PACKAGE__->set_primary_key('artistid');
+  __PACKAGE__->has_many(cds => 'MyApp::Schema::Result::CD', 'artistid');
+
+  1;
+
+A result class to represent a CD, which belongs to an artist, in
+F<MyApp/Schema/Result/CD.pm>:
+
+  package MyApp::Schema::Result::CD;
+  use base qw/DBIx::Class::Core/;
+
+  __PACKAGE__->load_components(qw/InflateColumn::DateTime/);
+  __PACKAGE__->table('cd');
+  __PACKAGE__->add_columns(qw/ cdid artistid title year /);
+  __PACKAGE__->set_primary_key('cdid');
+  __PACKAGE__->belongs_to(artist => 'MyApp::Schema::Result::Artist', 'artistid');
+
+  1;
+
+=head2 API usage
+
+Then you can use these classes in your application's code:
+
+  # Connect to your database.
+  use MyApp::Schema;
+  my $schema = MyApp::Schema->connect($dbi_dsn, $user, $pass, \%dbi_params);
+
+  # Query for all artists and put them in an array,
+  # or retrieve them as a result set object.
+  # $schema->resultset returns a DBIx::Class::ResultSet
+  my @all_artists = $schema->resultset('Artist')->all;
+  my $all_artists_rs = $schema->resultset('Artist');
+
+  # Output all artists names
+  # $artist here is a DBIx::Class::Row, which has accessors
+  # for all its columns. Rows are also subclasses of your Result class.
+  foreach $artist (@all_artists) {
+    print $artist->name, "\n";
+  }
+
+  # Create a result set to search for artists.
+  # This does not query the DB.
+  my $johns_rs = $schema->resultset('Artist')->search(
+    # Build your WHERE using an SQL::Abstract structure:
+    { name => { like => 'John%' } }
+  );
+
+  # Execute a joined query to get the cds.
+  my @all_john_cds = $johns_rs->search_related('cds')->all;
+
+  # Fetch the next available row.
+  my $first_john = $johns_rs->next;
+
+  # Specify ORDER BY on the query.
+  my $first_john_cds_by_title_rs = $first_john->cds(
+    undef,
+    { order_by => 'title' }
+  );
+
+  # Create a result set that will fetch the artist data
+  # at the same time as it fetches CDs, using only one query.
+  my $millennium_cds_rs = $schema->resultset('CD')->search(
+    { year => 2000 },
+    { prefetch => 'artist' }
+  );
+
+  my $cd = $millennium_cds_rs->next; # SELECT ... FROM cds JOIN artists ...
+  my $cd_artist_name = $cd->artist->name; # Already has the data so no 2nd query
+
+  # new() makes a Result object but doesnt insert it into the DB.
+  # create() is the same as new() then insert().
+  my $new_cd = $schema->resultset('CD')->new({ title => 'Spoon' });
+  $new_cd->artist($cd->artist);
+  $new_cd->insert; # Auto-increment primary key filled in after INSERT
+  $new_cd->title('Fork');
+
+  $schema->txn_do(sub { $new_cd->update }); # Runs the update in a transaction
+
+  # change the year of all the millennium CDs at once
+  $millennium_cds_rs->update({ year => 2002 });
+
+=head1 DESCRIPTION
+
+This is an SQL to OO mapper with an object API inspired by L<Class::DBI>
+(with a compatibility layer as a springboard for porting) and a resultset API
+that allows abstract encapsulation of database operations. It aims to make
+representing queries in your code as perl-ish as possible while still
+providing access to as many of the capabilities of the database as possible,
+including retrieving related records from multiple tables in a single query,
+C<JOIN>, C<LEFT JOIN>, C<COUNT>, C<DISTINCT>, C<GROUP BY>, C<ORDER BY> and
+C<HAVING> support.
+
+DBIx::Class can handle multi-column primary and foreign keys, complex
+queries and database-level paging, and does its best to only query the
+database in order to return something you've directly asked for. If a
+resultset is used as an iterator it only fetches rows off the statement
+handle as requested in order to minimise memory usage. It has auto-increment
+support for SQLite, MySQL, PostgreSQL, Oracle, SQL Server and DB2 and is
+known to be used in production on at least the first four, and is fork-
+and thread-safe out of the box (although
+L<your DBD may not be|DBI/Threads and Thread Safety>).
+
+This project is still under rapid development, so large new features may be
+marked B<experimental> - such APIs are still usable but may have edge bugs.
+Failing test cases are I<always> welcome and point releases are put out rapidly
+as bugs are found and fixed.
+
+We do our best to maintain full backwards compatibility for published
+APIs, since DBIx::Class is used in production in many organisations,
+and even backwards incompatible changes to non-published APIs will be fixed
+if they're reported and doing so doesn't cost the codebase anything.
+
+The test suite is quite substantial, and several developer releases
+are generally made to CPAN before the branch for the next release is
+merged back to trunk for a major release.
+
+=head1 HOW TO CONTRIBUTE
+
+Contributions are always welcome, in all usable forms (we especially
+welcome documentation improvements). The delivery methods include git-
+or unified-diff formatted patches, GitHub pull requests, or plain bug
+reports either via RT or the Mailing list. Contributors are generally
+granted access to the official repository after their first several
+patches pass successful review. Don't hesitate to
+L<contact|/GETTING HELP/SUPPORT> either of the L</CAT HERDERS> with
+any further questions you may have.
+
+=for comment
+FIXME: Getty, frew and jnap need to get off their asses and finish the contrib section so we can link it here ;)
+
+This project is maintained in a git repository. The code and related tools are
+accessible at the following locations:
+
+=over
+
+=item * Official repo: L<git://git.shadowcat.co.uk/dbsrgits/DBIx-Class.git>
+
+=item * Official gitweb: L<http://git.shadowcat.co.uk/gitweb/gitweb.cgi?p=dbsrgits/DBIx-Class.git>
+
+=item * GitHub mirror: L<https://github.com/dbsrgits/DBIx-Class>
+
+=item * Authorized committers: L<ssh://dbsrgits@git.shadowcat.co.uk/DBIx-Class.git>
+
+=item * Travis-CI log: L<https://travis-ci.org/dbsrgits/dbix-class/builds>
+
+=for html
+&#x21AA; Stable branch CI status: <img src="https://secure.travis-ci.org/dbsrgits/dbix-class.png?branch=master"></img>
+
+=back
+
+=head1 AUTHORS
+
+Even though a large portion of the source I<appears> to be written by just a
+handful of people, this library continues to remain a collaborative effort -
+perhaps one of the most successful such projects on L<CPAN|http://cpan.org>.
+It is important to remember that ideas do not always result in a direct code
+contribution, but deserve acknowledgement just the same. Time and time again
+the seemingly most insignificant questions and suggestions have been shown
+to catalyze monumental improvements in consistency, accuracy and performance.
+
+List of the awesome contributors who made DBIC v0.082800 possible
+
+=encoding utf8
+
+=over
+
+B<abraxxa>:Alexander Hartmaier <abraxxa@cpan.org>
+
+B<acca>:Alexander Kuznetsov <acca@cpan.org>
+
+B<aherzog>:Adam Herzog <adam@herzogdesigns.com>
+
+Alexander Keusch <cpan@keusch.at>
+
+B<alexrj>:Alessandro Ranellucci <aar@cpan.org>
+
+B<alnewkirk>:Al Newkirk <github@alnewkirk.com>
+
+B<amiri>:Amiri Barksdale <amiribarksdale@gmail.com>
+
+B<amoore>:Andrew Moore <amoore@cpan.org>
+
+Andrew Mehta <Andrew@unitedgames.co.uk>
+
+B<andrewalker>:Andre Walker <andre@andrewalker.net>
+
+B<andyg>:Andy Grundman <andy@hybridized.org>
+
+B<ank>:Andres Kievsky <ank@ank.com.ar>
+
+B<arc>:Aaron Crane <arc@cpan.org>
+
+B<arcanez>:Justin Hunter <justin.d.hunter@gmail.com>
+
+B<ash>:Ash Berlin <ash@cpan.org>
+
+B<bert>:Norbert Csongrádi <bert@cpan.org>
+
+B<bfwg>:Colin Newell <colin.newell@gmail.com>
+
+B<blblack>:Brandon L. Black <blblack@gmail.com>
+
+B<bluefeet>:Aran Deltac <bluefeet@cpan.org>
+
+B<boghead>:Bryan Beeley <cpan@beeley.org>
+
+B<bphillips>:Brian Phillips <bphillips@cpan.org>
+
+B<brd>:Brad Davis <brd@FreeBSD.org>
+
+Brian Kirkbride <brian.kirkbride@deeperbydesign.com>
+
+B<bricas>:Brian Cassidy <bricas@cpan.org>
+
+B<brunov>:Bruno Vecchi <vecchi.b@gmail.com>
+
+B<caelum>:Rafael Kitover <rkitover@cpan.org>
+
+B<caldrin>:Maik Hentsche <maik.hentsche@amd.com>
+
+B<castaway>:Jess Robinson <castaway@desert-island.me.uk>
+
+B<chorny>:Alexandr Ciornii <alexchorny@gmail.com>
+
+B<claco>:Christopher H. Laco <claco@cpan.org>
+
+B<clkao>:CL Kao <clkao@clkao.org>
+
+Ctrl-O L<http://ctrlo.com/|http://ctrlo.com/>
+
+B<da5id>:David Jack Olrik <david@olrik.dk>
+
+B<dams>:Damien Krotkine <dams@cpan.org>
+
+B<dandv>:Dan Dascalescu <ddascalescu+github@gmail.com>
+
+B<dariusj>:Darius Jokilehto <dariusjokilehto@yahoo.co.uk>
+
+B<davewood>:David Schmidt <mail@davidschmidt.at>
+
+B<daxim>:Lars Dɪᴇᴄᴋᴏᴡ 迪拉斯 <daxim@cpan.org>
+
+B<dduncan>:Darren Duncan <darren@darrenduncan.net>
+
+B<debolaz>:Anders Nor Berle <berle@cpan.org>
+
+B<dew>:Dan Thomas <dan@godders.org>
+
+B<dim0xff>:Dmitry Latin <dim0xff@gmail.com>
+
+B<dkubb>:Dan Kubb <dan.kubb-cpan@onautopilot.com>
+
+B<dnm>:Justin Wheeler <jwheeler@datademons.com>
+
+B<dpetrov>:Dimitar Petrov <mitakaa@gmail.com>
+
+B<dsteinbrunner>:David Steinbrunner <dsteinbrunner@pobox.com>
+
+B<duncan_dmg>:Duncan Garland <Duncan.Garland@motortrak.com>
+
+B<dwc>:Daniel Westermann-Clark <danieltwc@cpan.org>
+
+B<dyfrgi>:Michael Leuchtenburg <michael@slashhome.org>
+
+B<edenc>:Eden Cardim <edencardim@gmail.com>
+
+Eligo L<http://eligo.co.uk/|http://eligo.co.uk/>
+
+B<ether>:Karen Etheridge <ether@cpan.org>
+
+B<evdb>:Edmund von der Burg <evdb@ecclestoad.co.uk>
+
+B<faxm0dem>:Fabien Wernli <cpan@faxm0dem.org>
+
+B<felliott>:Fitz Elliott <fitz.elliott@gmail.com>
+
+B<freetime>:Bill Moseley <moseley@hank.org>
+
+B<frew>:Arthur Axel "fREW" Schmidt <frioux@gmail.com>
+
+B<gbjk>:Gareth Kirwan <gbjk@thermeon.com>
+
+B<Getty>:Torsten Raudssus <torsten@raudss.us>
+
+B<goraxe>:Gordon Irving <goraxe@cpan.org>
+
+B<gphat>:Cory G Watson <gphat@cpan.org>
+
+Grant Street Group L<http://www.grantstreet.com/|http://www.grantstreet.com/>
+
+B<groditi>:Guillermo Roditi <groditi@cpan.org>
+
+B<gshank>:Gerda Shank <gshank@cpan.org>
+
+B<guacamole>:Fred Steinberg <fred.steinberg@gmail.com>
+
+B<Haarg>:Graham Knop <haarg@haarg.org>
+
+B<hobbs>:Andrew Rodland <andrew@cleverdomain.org>
+
+Ian Wells <ijw@cack.org.uk>
+
+B<idn>:Ian Norton <i.norton@shadowcat.co.uk>
+
+B<ilmari>:Dagfinn Ilmari Mannsåker <ilmari@ilmari.org>
+
+B<initself>:Mike Baas <mike@initselftech.com>
+
+B<ironcamel>:Naveed Massjouni <naveedm9@gmail.com>
+
+B<jasonmay>:Jason May <jason.a.may@gmail.com>
+
+B<jawnsy>:Jonathan Yu <jawnsy@cpan.org>
+
+B<jegade>:Jens Gassmann <jens.gassmann@atomix.de>
+
+B<jeneric>:Eric A. Miller <emiller@cpan.org>
+
+B<jesper>:Jesper Krogh <jesper@krogh.cc>
+
+Jesse Sheidlower <jester@panix.com>
+
+B<jgoulah>:John Goulah <jgoulah@cpan.org>
+
+B<jguenther>:Justin Guenther <jguenther@cpan.org>
+
+B<jhannah>:Jay Hannah <jay@jays.net>
+
+B<jmac>:Jason McIntosh <jmac@appleseed-sc.com>
+
+B<jmmills>:Jason M. Mills <jmmills@cpan.org>
+
+B<jnapiorkowski>:John Napiorkowski <jjn1056@yahoo.com>
+
+Joe Carlson <jwcarlson@lbl.gov>
+
+B<jon>:Jon Schutz <jjschutz@cpan.org>
+
+Jordan Metzmeier <jmetzmeier@magazines.com>
+
+B<jshirley>:J. Shirley <jshirley@gmail.com>
+
+B<kaare>:Kaare Rasmussen
+
+B<kd>:Kieren Diment <diment@gmail.com>
+
+B<konobi>:Scott McWhirter <konobi@cpan.org>
+
+B<lejeunerenard>:Sean Zellmer <sean@lejeunerenard.com>
+
+B<littlesavage>:Alexey Illarionov <littlesavage@orionet.ru>
+
+B<lukes>:Luke Saunders <luke.saunders@gmail.com>
+
+B<marcus>:Marcus Ramberg <mramberg@cpan.org>
+
+B<mateu>:Mateu X. Hunter <hunter@missoula.org>
+
+Matt LeBlanc <antirice@gmail.com>
+
+Matt Sickler <imMute@msk4.com>
+
+B<mattlaw>:Matt Lawrence
+
+B<mattp>:Matt Phillips <mattp@cpan.org>
+
+B<mdk>:Mark Keating <m.keating@shadowcat.co.uk>
+
+B<melo>:Pedro Melo <melo@simplicidade.org>
+
+B<metaperl>:Terrence Brannon <metaperl@gmail.com>
+
+B<michaelr>:Michael Reddick <michael.reddick@gmail.com>
+
+B<milki>:Jonathan Chu <milki@rescomp.berkeley.edu>
+
+B<minty>:Murray Walker <perl@minty.org>
+
+B<mithaldu>:Christian Walde <walde.christian@gmail.com>
+
+B<mjemmeson>:Michael Jemmeson <michael.jemmeson@gmail.com>
+
+B<mna>:Maya
+
+B<mo>:Moritz Onken <onken@netcubed.de>
+
+B<moltar>:Roman Filippov <romanf@cpan.org>
+
+B<moritz>:Moritz Lenz <moritz@faui2k3.org>
+
+B<mrf>:Mike Francis <ungrim97@gmail.com>
+
+B<mst>:Matt S. Trout <mst@shadowcat.co.uk>
+
+B<mstratman>:Mark A. Stratman <stratman@gmail.com>
+
+B<ned>:Neil de Carteret <n3dst4@gmail.com>
+
+B<nigel>:Nigel Metheringham <nigelm@cpan.org>
+
+B<ningu>:David Kamholz <dkamholz@cpan.org>
+
+B<Nniuq>:Ron "Quinn" Straight" <quinnfazigu@gmail.org>
+
+B<norbi>:Norbert Buchmuller <norbi@nix.hu>
+
+B<nothingmuch>:Yuval Kogman <nothingmuch@woobling.org>
+
+B<nuba>:Nuba Princigalli <nuba@cpan.org>
+
+B<Numa>:Dan Sully <daniel@cpan.org>
+
+B<oalders>:Olaf Alders <olaf@wundersolutions.com>
+
+Olly Betts <olly@survex.com>
+
+B<osfameron>:Hakim Cassimally <osfameron@cpan.org>
+
+B<ovid>:Curtis "Ovid" Poe <ovid@cpan.org>
+
+B<oyse>:Øystein Torget <oystein.torget@dnv.com>
+
+B<paulm>:Paul Makepeace <paulm+pause@paulm.com>
+
+B<penguin>:K J Cheetham <jamie@shadowcatsystems.co.uk>
+
+B<perigrin>:Chris Prather <chris@prather.org>
+
+Peter Siklósi <einon@einon.hu>
+
+Peter Valdemar Mørch <peter@morch.com>
+
+B<peter>:Peter Collingbourne <peter@pcc.me.uk>
+
+B<phaylon>:Robert Sedlacek <phaylon@dunkelheit.at>
+
+B<plu>:Johannes Plunien <plu@cpan.org>
+
+B<Possum>:Daniel LeWarne <possum@cpan.org>
+
+B<pplu>:Jose Luis Martinez <jlmartinez@capside.com>
+
+B<quicksilver>:Jules Bean <jules@jellybean.co.uk>
+
+B<racke>:Stefan Hornburg <racke@linuxia.de>
+
+B<rafl>:Florian Ragwitz <rafl@debian.org>
+
+B<rainboxx>:Matthias Dietrich <perl@rb.ly>
+
+B<rbo>:Robert Bohne <rbo@cpan.org>
+
+B<rbuels>:Robert Buels <rmb32@cornell.edu>
+
+B<rdj>:Ryan D Johnson <ryan@innerfence.com>
+
+B<Relequestual>:Ben Hutton <relequestual@gmail.com>
+
+B<renormalist>:Steffen Schwigon <schwigon@cpan.org>
+
+B<ribasushi>:Peter Rabbitson <ribasushi@cpan.org>
+
+B<rjbs>:Ricardo Signes <rjbs@cpan.org>
+
+Robert Krimen <rkrimen@cpan.org>
+
+Robert Olson <bob@rdolson.org>
+
+B<robkinyon>:Rob Kinyon <rkinyon@cpan.org>
+
+Roman Ardern-Corris <spam_in@3legs.com>
+
+B<ruoso>:Daniel Ruoso <daniel@ruoso.com>
+
+B<Sadrak>:Felix Antonius Wilhelm Ostmann <sadrak@cpan.org>
+
+B<sc_>:Just Another Perl Hacker
+
+B<schwern>:Michael G Schwern <mschwern@cpan.org>
+
+Scott R. Godin <webdragon.net@gmail.com>
+
+B<scotty>:Scotty Allen <scotty@scottyallen.com>
+
+B<semifor>:Marc Mims <marc@questright.com>
+
+Simon Elliott <cpan@browsing.co.uk>
+
+B<SineSwiper>:Brendan Byrd <perl@resonatorsoft.org>
+
+B<skaufman>:Samuel Kaufman <sam@socialflow.com>
+
+B<solomon>:Jared Johnson <jaredj@nmgi.com>
+
+B<spb>:Stephen Bennett <stephen@freenode.net>
+
+Squeeks <squeek@cpan.org>
+
+B<srezic>:Slaven Rezic <slaven@rezic.de>
+
+B<sszabo>:Stephan Szabo <sszabo@bigpanda.com>
+
+Stephen Peters <steve@stephenpeters.me>
+
+B<stonecolddevin>:Devin Austin <dhoss@cpan.org>
+
+B<talexb>:Alex Beamish <talexb@gmail.com>
+
+B<tamias>:Ronald J Kimball <rjk@tamias.net>
+
+B<TBSliver>:Tom Bloor <t.bloor@shadowcat.co.uk>
+
+B<teejay>:Aaron Trevena <teejay@cpan.org>
+
+B<theorbtwo>:James Mastros <james@mastros.biz>
+
+Thomas Kratz <tomk@cpan.org>
+
+B<timbunce>:Tim Bunce <tim.bunce@pobox.com>
+
+Todd Lipcon
+
+Tom Hukins <tom@eborcom.com>
+
+B<tommy>:Tommy Butler <tbutler.cpan.org@internetalias.net>
+
+B<tonvoon>:Ton Voon <ton.voon@opsview.com>
+
+B<triode>:Pete Gamache <gamache@cpan.org>
+
+B<typester>:Daisuke Murase <typester@cpan.org>
+
+B<uree>:Oriol Soriano <oriol.soriano@capside.com>
+
+B<uwe>:Uwe Voelker <uwe@uwevoelker.de>
+
+B<victori>:Victor Igumnov <victori@cpan.org>
+
+B<wdh>:Will Hawes <wdhawes@gmail.com>
+
+B<wesm>:Wes Malone <wes@mitsi.com>
+
+B<willert>:Sebastian Willert <willert@cpan.org>
+
+B<wintermute>:Toby Corkindale <tjc@cpan.org>
+
+B<wreis>:Wallace Reis <wreis@cpan.org>
+
+B<xenoterracide>:Caleb Cushing <xenoterracide@gmail.com>
+
+B<yrlnry>:Mark Jason Dominus <mjd@plover.com>
+
+B<zamolxes>:Bogdan Lucaciu <bogdan@wiz.ro>
+
+B<Zefram>:Andrew Main <zefram@fysh.org>
+
+=back
+
+
+
+The canonical source of authors and their details is the F<AUTHORS> file at
+the root of this distribution (or repository). The canonical source of
+per-line authorship is the L<git repository|/HOW TO CONTRIBUTE> history
+itself.
+
+=head1 CAT HERDERS
+
+The fine folks nudging the project in a particular direction:
+
+=over
+
+B<ribasushi>: Peter Rabbitson <ribasushi@cpan.org>
+(present day maintenance and controlled evolution)
+
+B<castaway>: Jess Robinson <castaway@desert-island.me.uk>
+(lions share of the reference documentation and manuals)
+
+B<mst>: Matt S Trout <mst@shadowcat.co.uk> (project founder -
+original idea, architecture and implementation)
+
+=back
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright (c) 2005 by mst, castaway, ribasushi, and other DBIx::Class
+L</AUTHORS> as listed above and in F<AUTHORS>.
+
+This library is free software and may be distributed under the same terms
+as perl5 itself. See F<LICENSE> for the complete licensing terms.
@@ -163,8 +163,8 @@ sub parse {
         # global add_fk_index set in parser_args
         my $add_fk_index = (exists $args->{add_fk_index} && ! $args->{add_fk_index}) ? 0 : 1;
 
-        foreach my $rel (sort @rels)
-        {
+        REL:
+        foreach my $rel (sort @rels) {
 
             my $rel_info = $source->relationship_info($rel);
 
@@ -173,7 +173,7 @@ sub parse {
 
             my $relsource = try { $source->related_source($rel) };
             unless ($relsource) {
-              carp "Ignoring relationship '$rel' - related resultsource '$rel_info->{class}' is not registered with this schema\n";
+              carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '$rel_info->{class}' is not registered with this schema\n";
               next;
             };
 
@@ -186,13 +186,18 @@ sub parse {
             # support quoting properly to be signaled about this
             $rel_table = $$rel_table if ref $rel_table eq 'SCALAR';
 
-            my $reverse_rels = $source->reverse_relationship_info($rel);
-            my ($otherrelname, $otherrelationship) = each %{$reverse_rels};
-
             # Force the order of @cond to match the order of ->add_columns
             my $idx;
             my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns;
-            my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}});
+
+            for ( keys %{$rel_info->{cond}} ) {
+              unless (exists $other_columns_idx{$_}) {
+                carp "Ignoring relationship '$rel' on '$moniker' - related resultsource '@{[ $relsource->source_name ]}' does not contain one of the specified columns: '$_'\n";
+                next REL;
+              }
+            }
+
+            my @cond = sort { $other_columns_idx{$a} <=> $other_columns_idx{$b} } keys(%{$rel_info->{cond}});
 
             # Get the key information, mapping off the foreign/self markers
             my @refkeys = map {/^\w+\.(\w+)$/} @cond;
@@ -217,6 +222,8 @@ sub parse {
                 $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
             }
 
+            my ($otherrelname, $otherrelationship) = %{ $source->reverse_relationship_info($rel) };
+
             my $cascade;
             for my $c (qw/delete update/) {
                 if (exists $rel_info->{attrs}{"on_$c"}) {
@@ -252,9 +259,12 @@ sub parse {
                     $tables{$table_name}{foreign_table_deps}{$rel_table}++;
                   }
 
+                  # trim schema before generating constraint/index names
+                  (my $table_abbrev = $table_name) =~ s/ ^ [^\.]+ \. //x;
+
                   $table->add_constraint(
                     type             => 'foreign_key',
-                    name             => join('_', $table_name, 'fk', @keys),
+                    name             => join('_', $table_abbrev, 'fk', @keys),
                     fields           => \@keys,
                     reference_fields => \@refkeys,
                     reference_table  => $rel_table,
@@ -275,8 +285,9 @@ sub parse {
                   next if join("\x00", @keys) eq join("\x00", @primary);
 
                   if ($add_fk_index_rel) {
+                      (my $idx_name = $table_name) =~ s/ ^ [^\.]+ \. //x;
                       my $index = $table->add_index(
-                          name   => join('_', $table_name, 'idx', @keys),
+                          name   => join('_', $table_abbrev, 'idx', @keys),
                           fields => \@keys,
                           type   => 'NORMAL',
                       );
@@ -517,12 +528,13 @@ Limit the amount of parsed sources by supplying an explicit list of source names
 
 L<SQL::Translator>, L<DBIx::Class::Schema>
 
-=head1 AUTHORS
-
-See L<DBIx::Class/CONTRIBUTORS>.
+=head1 FURTHER QUESTIONS?
 
-=head1 LICENSE
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
 
-You may distribute this code under the same terms as Perl itself.
+=head1 COPYRIGHT AND LICENSE
 
-=cut
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
@@ -16,6 +16,17 @@ SQL::Translator::Producer::DBIx::Class::File - DBIx::Class file producer
 
 Creates a DBIx::Class::Schema for use with DBIx::Class
 
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+
 =cut
 
 use strict;
@@ -17,8 +17,8 @@ Meta->tests(join (' ', map { $_ || () } @xt_tests, Meta->tests ) );
 # inject an explicit xt test run, mainly to check the contents of
 # lib and the generated POD's *before* anything is copied around
 #
-# at the end rerun the whitespace test in the distdir, to make sure everything
-# is pristine
+# at the end rerun the whitespace and footer tests in the distdir
+# to make sure everything is pristine
 postamble <<"EOP";
 
 dbic_clonedir_copy_generated_pod : test_xt
@@ -43,9 +43,9 @@ test_xt : pm_to_blib
   )
 ]}
 
-create_distdir : dbic_distdir_retest_whitespace
+create_distdir : dbic_distdir_retest_ws_and_footers
 
-dbic_distdir_retest_whitespace :
+dbic_distdir_retest_ws_and_footers :
 \t@{[
   $mm_proto->cd (
     '$(DISTVNAME)',
@@ -55,7 +55,7 @@ dbic_distdir_retest_whitespace :
         '$(ABSPERLRUN)',
         map { $mm_proto->quote_literal($_) } qw(-Ilib -e $ENV{RELEASE_TESTING}=1;$ENV{DBICTEST_NO_MAKEFILE_VERIFICATION}=1;)
       ),
-      'xt/whitespace.t'
+      'xt/whitespace.t xt/footers.t',
     )
   )
 ]}
@@ -91,6 +91,9 @@ END {
 
   if (keys %removed_build_requires) {
     print "Regenerating META with author requires excluded\n";
+    # M::I understands unicode in meta but does not write with the right
+    # layers - fhtagn!!!
+    local $SIG{__WARN__} = sub { warn $_[0] unless $_[0] =~ /Wide character in print/ };
     Meta->write;
   }
 
@@ -1,19 +0,0 @@
-print "Appending to the no_index META list\n";
-
-# Deprecated/internal modules need no exposure when building the meta
-no_index directory => $_ for (qw|
-  lib/DBIx/Class/Admin
-  lib/DBIx/Class/PK/Auto
-  lib/DBIx/Class/CDBICompat
-  maint
-|);
-no_index package => $_ for (qw/
-  DBIx::Class::Storage::DBIHacks
-  DBIx::Class::Storage::BlockRunner
-  DBIx::Class::Carp
-  DBIx::Class::_Util
-  DBIx::Class::ResultSet::Pager
-/);
-
-# keep the Makefile.PL eval happy
-1;
@@ -0,0 +1,56 @@
+# principal author list is kinda mandated by spec, luckily is rather static
+author 'mst: Matt S Trout <mst@shadowcat.co.uk> (project founder - original idea, architecture and implementation)';
+author 'castaway: Jess Robinson <castaway@desert-island.me.uk> (lions share of the reference documentation and manuals)';
+author 'ribasushi: Peter Rabbitson <ribasushi@cpan.org> (present day maintenance and controlled evolution)';
+
+# pause sanity
+Meta->{values}{x_authority} = 'cpan:RIBASUSHI';
+
+# populate x_contributors
+# a direct dump of the sort is ok - xt/authors.t guarantees source sanity
+Meta->{values}{x_contributors} = [ do {
+  # according to #p5p this is how one safely reads random unicode
+  # this set of boilerplate is insane... wasn't perl unicode-king...?
+  no warnings 'once';
+  require Encode;
+  require PerlIO::encoding;
+  local $PerlIO::encoding::fallback = Encode::FB_CROAK();
+
+  open (my $fh, '<:encoding(UTF-8)', 'AUTHORS') or die "Unable to open AUTHORS - can't happen: $!\n";
+  map { chomp; ( (! $_ or $_ =~ /^\s*\#/) ? () : $_ ) } <$fh>;
+
+}];
+
+# legalese
+license 'perl';
+resources 'license' => 'http://dev.perl.org/licenses/';
+
+# misc resources
+abstract_from 'lib/DBIx/Class.pm';
+resources 'homepage'    => 'http://www.dbix-class.org/';
+resources 'IRC'         => 'irc://irc.perl.org/#dbix-class';
+resources 'repository'  => 'https://github.com/dbsrgits/DBIx-Class';
+resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class';
+resources 'bugtracker'  => 'http://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class';
+
+# nothing determined at runtime, except for possibly SQLT dep
+# (see the check around DBICTEST_SQLT_DEPLOY in Makefile.PL)
+dynamic_config 0;
+
+# Deprecated/internal modules need no exposure when building the meta
+no_index directory => $_ for (qw|
+  lib/DBIx/Class/Admin
+  lib/DBIx/Class/PK/Auto
+  lib/DBIx/Class/CDBICompat
+  maint
+|);
+no_index package => $_ for (qw/
+  DBIx::Class::Storage::DBIHacks
+  DBIx::Class::Storage::BlockRunner
+  DBIx::Class::Carp
+  DBIx::Class::_Util
+  DBIx::Class::ResultSet::Pager
+/);
+
+# keep the Makefile.PL eval happy
+1;
@@ -1,28 +1,24 @@
 
-my $dbic_ver_re = qr/ (\d) \. (\d{2}) (\d{3}) (?: _ (\d{2}) )? /x; # not anchored!!!
+my $dbic_ver_re = qr/ 0 \. (\d{2}) (\d{2}) (\d{2}) (?: _ (\d{2}) )? /x; # not anchored!!!
 
 my $version_string = Meta->version;
 my $version_value = eval $version_string;
 
 my ($v_maj, $v_min, $v_point, $v_dev) = $version_string =~ /^$dbic_ver_re$/
   or die sprintf (
-    "Invalid version %s (as specified in %s)\nCurrently valid version formats are M.VVPPP or M.VVPPP_DD\n",
+    "Invalid version %s (as specified in %s)\nCurrently valid version formats are 0.MMVVPP or 0.MMVVPP_DD\n",
     $version_string,
     Meta->{values}{version_from} || Meta->{values}{all_from} || 'Makefile.PL',
   )
 ;
 
-if ($v_maj != 0 or $v_min > 8) {
+if ($v_maj > 8) {
   die "Illegal version $version_string - we are still in the 0.08 cycle\n"
 }
 
-if ($v_point >= 300) {
-  die "Illegal version $version_string - we are still in the 0.082xx cycle\n"
-}
-
 Meta->makemaker_args->{DISTVNAME} = Meta->name . "-$version_string-TRIAL" if (
-  # all odd releases *after* 0.08200 generate a -TRIAL, no exceptions
-  ( $v_point > 200 and int($v_point / 100) % 2 )
+  # all odd releases *after* 0.089x generate a -TRIAL, no exceptions
+  ( $v_point > 89 )
 );
 
 
@@ -32,13 +28,15 @@ if (keys %$tags) {
   my $shipped_versions;
   my $shipped_dev_versions;
 
+  my $legacy_re = qr/^ v 0 \. (\d{2}) (\d{2}) (\d) (?: _ (\d{2}) )? $/x;
+
   for (keys %$tags) {
-    if ($_ =~ /^v$dbic_ver_re$/) {
+    if ($_ =~ /^v$dbic_ver_re$/ or $_ =~ $legacy_re ) {
       if (defined $4) {
-        $shipped_dev_versions->{"$1.$2$3$4"} = 1;
+        $shipped_dev_versions->{"0.$1$2$3$4"} = 1;
       }
       else {
-        $shipped_versions->{"$1.$2$3"} = 1;
+        $shipped_versions->{"0.$1$2$3"} = 1;
       }
       delete $tags->{$_};
     }
@@ -1,23 +0,0 @@
-# When a long-standing branch is updated a README may still linger around
-unlink 'README' if -f 'README';
-
-# Makefile syntax allows adding extra dep-specs for already-existing targets,
-# and simply appends them on *LAST*-come *FIRST*-serve basis.
-# This allows us to inject extra depenencies for standard EUMM targets
-
-require File::Spec;
-my $dir = File::Spec->catdir(qw(maint .Generated_Pod));
-my $fn = File::Spec->catfile($dir, 'README');
-
-postamble <<"EOP";
-
-clonedir_generate_files : dbic_clonedir_gen_readme
-
-dbic_clonedir_gen_readme :
-\t@{[ $mm_proto->oneliner('mkpath', ['-MExtUtils::Command']) ]} $dir
-\tpod2text lib/DBIx/Class.pm > $fn
-
-EOP
-
-# keep the Makefile.PL eval happy
-1;
@@ -56,6 +56,7 @@ EOP
   my $great_success;
   {
     local @ARGV = ('--documentation-as-pod', $pod_fn);
+    local $0 = 'dbicadmin';
     local *CORE::GLOBAL::exit = sub { $great_success++; die; };
     do 'script/dbicadmin';
   }
@@ -92,6 +93,32 @@ EOP
 }
 
 
+# generate the DBIx/Class.pod only during distdir
+{
+  my $dist_pod_fn = File::Spec->catfile($pod_dir, qw(lib DBIx Class.pod));
+
+  postamble <<"EOP";
+
+clonedir_generate_files : dbic_distdir_gen_dbic_pod
+
+dbic_distdir_gen_dbic_pod :
+
+\tperldoc -u lib/DBIx/Class.pm > $dist_pod_fn
+\t@{[ $mm_proto->oneliner(
+  "s!^.*?this line is replaced with the author list.*! qq{List of the awesome contributors who made DBIC v$ver possible\n\n} . qx(\$^X -Ilib maint/gen_pod_authors)!me",
+  [qw( -0777 -p -i )]
+) ]} $dist_pod_fn
+
+create_distdir : dbic_distdir_defang_authors
+
+# Remove the maintainer-only warning (be nice ;)
+dbic_distdir_defang_authors :
+\t@{[ $mm_proto->oneliner('s/ ^ \s* \# \s* \*\*\* .+ \n ( ^ \s* \# \s*? \n )? //xmg', [qw( -0777 -p -i )] ) ]} \$(DISTVNAME)/AUTHORS
+
+EOP
+}
+
+
 # on some OSes generated files may have an incorrect \n - fix it
 # so that the xt tests pass on a fresh checkout (also shipping a
 # dist with CRLFs is beyond obnoxious)
@@ -0,0 +1,42 @@
+# When a long-standing branch is updated a README may still linger around
+unlink 'README' if -f 'README';
+
+# Makefile syntax allows adding extra dep-specs for already-existing targets,
+# and simply appends them on *LAST*-come *FIRST*-serve basis.
+# This allows us to inject extra depenencies for standard EUMM targets
+
+require File::Spec;
+my $dir = File::Spec->catdir(qw(maint .Generated_Pod));
+my $r_fn = File::Spec->catfile($dir, 'README');
+
+my $start_file = sub {
+  my $fn = $mm_proto->quote_literal(shift);
+  return join "\n",
+    qq{\t\$(NOECHO) \$(RM_F) $fn},
+    ( map { qq(\t\$(NOECHO) \$(ECHO) "$_" >> $fn) } (
+      "DBIx::Class is Copyright (c) 2005-@{[ (gmtime)[5] + 1900  ]} by mst, castaway, ribasushi, and others.",
+      "See AUTHORS and LICENSE included with this distribution. All rights reserved.",
+      "",
+    )),
+  ;
+};
+
+postamble <<"EOP";
+
+clonedir_generate_files : dbic_clonedir_gen_readme
+
+dbic_clonedir_gen_readme : dbic_distdir_gen_dbic_pod
+@{[ $start_file->($r_fn) ]}
+\tpod2text $dir/lib/DBIx/Class.pod >> $r_fn
+
+create_distdir : dbic_distdir_regen_license
+
+dbic_distdir_regen_license :
+@{[ $start_file->( File::Spec->catfile( Meta->name . '-' . Meta->version, 'LICENSE') ) ]}
+\t@{[ $mm_proto->oneliner('cat', ['-MExtUtils::Command']) ]} LICENSE >> \$(DISTVNAME)/LICENSE
+
+EOP
+
+
+# keep the Makefile.PL eval happy
+1;
@@ -0,0 +1,27 @@
+#!/usr/bin/env perl
+
+use warnings;
+use strict;
+
+# we will be outputting *ENCODED* utf8, hence the raw open below
+# the file is already sanity-checked by xt/authors.t
+my @known_authors = do {
+  open (my $fh, '<:raw', 'AUTHORS') or die "Unable to open AUTHORS - can't happen: $!\n";
+  map { chomp; ( ( ! $_ or $_ =~ /^\s*\#/ ) ? () : $_ ) } <$fh>;
+} or die "Known AUTHORS file seems empty... can't happen...";
+
+$_ =~ s!^ ( [^\:]+ ) : \s !B<$1>:!x
+  for @known_authors;
+
+$_ =~ s!( \b https? :// [^\s\>]+ )!L<$1|$1>!x
+  for @known_authors;
+
+print join "\n\n",
+  '=encoding utf8',
+  '=over',
+  @known_authors,
+  '=back',
+  '',
+;
+
+1;
@@ -3,6 +3,8 @@
 use warnings;
 use strict;
 
+use DBIx::Class::_Util; # load early in case any shims are needed
+
 my $lib_dir = 'lib';
 my $pod_dir = 'maint/.Generated_Pod';
 
@@ -0,0 +1,52 @@
+#!/usr/bin/perl
+
+use warnings;
+use strict;
+
+use Config;
+use Term::ANSIColor ':constants';
+my $CRST = RESET;
+my $CCODE = BOLD;
+my $CSTAT = BOLD . GREEN;
+my $CCORE = BOLD . CYAN;
+my $CSIG = CYAN;
+
+if (@ARGV) {
+  my $code = system (@ARGV);
+
+  if ($code < 0) {
+    exit 127;
+  }
+  elsif ($code > 0) {
+
+    my $status = $code >> 8;
+    my $signum = $code & 127;
+    my $core = $code & 128;
+
+    my %sig_idx;
+    @sig_idx{split /\s+/, $Config{sig_num}} = split /\s/, $Config{sig_name};
+
+    printf STDERR (
+<<EOF
+
+Results of execution: `%s`
+----------------------
+System exit code:$CCODE %d $CRST$CSIG %s $CRST
+ ($CSTAT%08b$CRST$CCORE%b$CRST$CSIG%07b$CRST)
+
+Status: %3s ($CSTAT%08b$CRST)
+Signal: %3s ($CSIG%08b$CRST)
+Core:   %3s
+----------------------
+EOF
+    , (join ' ', @ARGV),
+      $code, ($signum ? "(SIG-$sig_idx{$signum})" : ''),
+      $status, $core, $signum,
+      ($status) x 2,
+      ($signum) x 2,
+      ($core ? 'Yes': 'No')
+    );
+
+    exit ($status);
+  }
+}
@@ -1,188 +0,0 @@
-#!/bin/bash
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-# Different boxes we run on may have different amount of hw threads
-# Hence why we need to query
-# Originally we used to read /sys/devices/system/cpu/online
-# but it is not available these days (odd). Thus we fall to
-# the alwas-present /proc/cpuinfo
-# The oneliner is a tad convoluted - basicaly what we do is
-# slurp the entire file and get the index off the last
-# `processor    : XX` line
-export NUMTHREADS="$(( $(perl -0777 -n -e 'print (/ (?: .+ ^ processor \s+ : \s+ (\d+) ) (?! ^ processor ) /smx)' < /proc/cpuinfo) + 1 ))"
-
-export CACHE_DIR="/tmp/poormanscache"
-
-# install some common tools from APT, more below unless CLEANTEST
-apt_install libapp-nopaste-perl tree apt-transport-https
-
-# FIXME - the debian package is oddly broken - uses a bin/env based shebang
-# so nothing works under a brew. Fix here until #debian-perl patches it up
-sudo /usr/bin/perl -p -i -e 's|#!/usr/bin/env perl|#!/usr/bin/perl|' $(which nopaste)
-
-if [[ "$CLEANTEST" != "true" ]]; then
-### apt-get invocation - faster to grab everything at once
-  #
-  # FIXME these debconf lines should automate the firebird config but do not :(((
-  sudo bash -c 'echo -e "firebird2.5-super\tshared/firebird/enabled\tboolean\ttrue" | debconf-set-selections'
-  sudo bash -c 'echo -e "firebird2.5-super\tshared/firebird/sysdba_password/new_password\tpassword\t123" | debconf-set-selections'
-
-  # add extra APT repo for Oracle
-  # (https is critical - apt-get update can't seem to follow the 302)
-  sudo bash -c 'echo -e "\ndeb [arch=i386] https://oss.oracle.com/debian unstable main non-free" >> /etc/apt/sources.list'
-
-  run_or_err "Cloning poor man's cache from github" "git clone --depth=1 --branch=poor_mans_travis_cache https://github.com/ribasushi/travis_futzing.git $CACHE_DIR && $CACHE_DIR/reassemble"
-
-  run_or_err "Priming up the APT cache with $(echo $(ls -d $CACHE_DIR/apt_cache/*.deb))" "sudo cp $CACHE_DIR/apt_cache/*.deb /var/cache/apt/archives"
-
-  apt_install memcached firebird2.5-super firebird2.5-dev unixodbc-dev expect oracle-xe
-
-### config memcached
-  run_or_err "Starting memcached" "sudo /etc/init.d/memcached start"
-  export DBICTEST_MEMCACHED=127.0.0.1:11211
-
-### config mysql
-  run_or_err "Creating MySQL TestDB" "mysql -e 'create database dbic_test;'"
-  export DBICTEST_MYSQL_DSN='dbi:mysql:database=dbic_test;host=127.0.0.1'
-  export DBICTEST_MYSQL_USER=root
-
-### config pg
-  run_or_err "Creating PostgreSQL TestDB" "psql -c 'create database dbic_test;' -U postgres"
-  export DBICTEST_PG_DSN='dbi:Pg:database=dbic_test;host=127.0.0.1'
-  export DBICTEST_PG_USER=postgres
-
-### conig firebird
-  # poor man's deb config
-  EXPECT_FB_SCRIPT='
-    spawn dpkg-reconfigure --frontend=text firebird2.5-super
-    expect "Enable Firebird server?"
-    send "\177\177\177\177yes\r"
-    expect "Password for SYSDBA"
-    send "123\r"
-    sleep 1
-    expect eof
-  '
-  # creating testdb
-  # FIXME - this step still fails from time to time >:(((
-  # has to do with the FB reconfiguration I suppose
-  # for now if it fails twice - simply skip FB testing
-  for i in 1 2 ; do
-
-    run_or_err "Re-configuring Firebird" "
-      sync
-      DEBIAN_FRONTEND=text sudo expect -c '$EXPECT_FB_SCRIPT'
-      sleep 1
-      sync
-      # restart the server for good measure
-      sudo /etc/init.d/firebird2.5-super stop || true
-      sleep 1
-      sync
-      sudo /etc/init.d/firebird2.5-super start
-      sleep 1
-      sync
-    "
-
-    if run_or_err "Creating Firebird TestDB" \
-      "echo \"CREATE DATABASE '/var/lib/firebird/2.5/data/dbic_test.fdb';\" | sudo isql-fb -u sysdba -p 123"
-    then
-
-      run_or_err "Fetching and building Firebird ODBC driver" '
-        cd "$(mktemp -d)"
-        wget -qO- http://sourceforge.net/projects/firebird/files/firebird-ODBC-driver/2.0.2-Release/OdbcFb-Source-2.0.2.153.gz/download | tar -zx
-        cd Builds/Gcc.lin
-        perl -p -i -e "s|/usr/lib64|/usr/lib/x86_64-linux-gnu|g" ../makefile.environ
-        make -f makefile.linux
-        sudo make -f makefile.linux install
-      '
-
-      sudo bash -c 'cat >> /etc/odbcinst.ini' <<< "
-[Firebird]
-Description     = InterBase/Firebird ODBC Driver
-Driver          = /usr/lib/x86_64-linux-gnu/libOdbcFb.so
-Setup           = /usr/lib/x86_64-linux-gnu/libOdbcFb.so
-Threading       = 1
-FileUsage       = 1
-"
-
-      export DBICTEST_FIREBIRD_DSN=dbi:Firebird:dbname=/var/lib/firebird/2.5/data/dbic_test.fdb
-      export DBICTEST_FIREBIRD_USER=SYSDBA
-      export DBICTEST_FIREBIRD_PASS=123
-
-      export DBICTEST_FIREBIRD_INTERBASE_DSN=dbi:InterBase:dbname=/var/lib/firebird/2.5/data/dbic_test.fdb
-      export DBICTEST_FIREBIRD_INTERBASE_USER=SYSDBA
-      export DBICTEST_FIREBIRD_INTERBASE_PASS=123
-
-      export DBICTEST_FIREBIRD_ODBC_DSN="dbi:ODBC:Driver=Firebird;Dbname=/var/lib/firebird/2.5/data/dbic_test.fdb"
-      export DBICTEST_FIREBIRD_ODBC_USER=SYSDBA
-      export DBICTEST_FIREBIRD_ODBC_PASS=123
-
-      break
-    fi
-
-  done
-
-### config oracle
-  SRV_ORA_HOME=/usr/lib/oracle/xe/app/oracle/product/10.2.0/server
-
-  # without this some of the more zealous tests can exhaust the amount
-  # of listeners and oracle is too slow to spin extras up :(
-  sudo bash -c "echo -e '\nprocesses=150' >> $SRV_ORA_HOME/config/scripts/init.ora"
-
-  EXPECT_ORA_SCRIPT='
-    spawn /etc/init.d/oracle-xe configure
-
-    sleep 1
-    set send_slow {1 .005}
-
-    expect "Specify the HTTP port that will be used for Oracle Application Express"
-    sleep 0.5
-    send -s "8021\r"
-
-    expect "Specify a port that will be used for the database listener"
-    sleep 0.5
-    send -s "1521\r"
-
-    expect "Specify a password to be used for database accounts"
-    sleep 0.5
-    send -s "adminpass\r"
-
-    expect "Confirm the password"
-    sleep 0.5
-    send -s "adminpass\r"
-
-    expect "Do you want Oracle Database 10g Express Edition to be started on boot"
-    sleep 0.5
-    send -s "n\r"
-
-    sleep 0.5
-    expect "Configuring Database"
-
-    sleep 1
-    expect eof
-    wait
-  '
-
-  # if we do not redirect to some random file, but instead try to capture
-  # into a var the way run_or_err does - everything hangs
-  # FIXME: I couldn't figure it out after 3 hours of headdesking,
-  # would be nice to know the reason eventually
-  run_or_err "Configuring OracleXE" "sudo $(which expect) -c '$EXPECT_ORA_SCRIPT' &>/tmp/ora_configure_10.2.log"
-
-  export DBICTEST_ORA_DSN=dbi:Oracle://localhost:1521/XE
-  export DBICTEST_ORA_USER=dbic_test
-  export DBICTEST_ORA_PASS=abc123456
-  export DBICTEST_ORA_EXTRAUSER_DSN="$DBICTEST_ORA_DSN"
-  export DBICTEST_ORA_EXTRAUSER_USER=dbic_test_extra
-  export DBICTEST_ORA_EXTRAUSER_PASS=abc123456
-
-  run_or_err "Create Oracle users" "ORACLE_SID=XE ORACLE_HOME=$SRV_ORA_HOME $SRV_ORA_HOME/bin/sqlplus -L -S system/adminpass @/dev/stdin <<< '
-    CREATE USER $DBICTEST_ORA_USER IDENTIFIED BY $DBICTEST_ORA_PASS;
-    GRANT connect,resource TO $DBICTEST_ORA_USER;
-    CREATE USER $DBICTEST_ORA_EXTRAUSER_USER IDENTIFIED BY $DBICTEST_ORA_EXTRAUSER_PASS;
-    GRANT connect,resource TO $DBICTEST_ORA_EXTRAUSER_USER;
-  '"
-
-  export ORACLE_HOME="$CACHE_DIR/ora_instaclient/x86-64/oracle_instaclient_10.2.0.5.0"
-fi
@@ -1,86 +0,0 @@
-#!/bin/bash
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-CPAN_MIRROR=$(echo "$PERL_CPANM_OPT" | grep -oP -- '--mirror\s+\S+' | head -n 1 | cut -d ' ' -f 2)
-if ! [[ "$CPAN_MIRROR" =~ "http://" ]] ; then
-  echo_err "Unable to extract primary cpan mirror from PERL_CPANM_OPT - something is wrong"
-  echo_err "PERL_CPANM_OPT: $PERL_CPANM_OPT"
-  CPAN_MIRROR="http://cpan.metacpan.org/"
-  PERL_CPANM_OPT="$PERL_CPANM_OPT --mirror $CPAN_MIRROR"
-  echo_err "Using $CPAN_MIRROR for the time being"
-fi
-
-export PERL_MM_USE_DEFAULT=1 PERL_MM_NONINTERACTIVE=1 PERL_AUTOINSTALL_PREFER_CPAN=1 PERLBREW_CPAN_MIRROR="$CPAN_MIRROR" HARNESS_TIMER=1 MAKEFLAGS="-j$NUMTHREADS"
-
-# try CPAN's latest offering if requested
-if [[ "$DEVREL_DEPS" == "true" ]] ; then
-
-  PERL_CPANM_OPT="$PERL_CPANM_OPT --dev"
-
-  # FIXME inline-upgrade cpanm, work around https://github.com/travis-ci/travis-ci/issues/1477
-  cpanm_loc="$(which cpanm)"
-  run_or_err "Upgrading cpanm ($cpanm_loc) to latest stable" \
-    "wget -q -O $cpanm_loc cpanmin.us && chmod a+x $cpanm_loc"
-fi
-
-# Fixup CPANM_OPT to behave more like a traditional cpan client
-export PERL_CPANM_OPT="--verbose --no-interactive --no-man-pages $( echo $PERL_CPANM_OPT | sed 's/--skip-satisfied//' )"
-
-if [[ -n "$BREWVER" ]] ; then
-  # since perl 5.14 a perl can safely be built concurrently with -j$large
-  # (according to brute force testing and my power bill)
-  if [[ "$BREWVER" == "blead" ]] || perl -Mversion -e "exit !!(version->new(q($BREWVER)) < 5.014)" ; then
-    perlbrew_jopt="$NUMTHREADS"
-  fi
-
-  run_or_err "Compiling/installing Perl $BREWVER (without testing, using ${perlbrew_jopt:-1} threads, may take up to 5 minutes)" \
-    "perlbrew install --as $BREWVER --notest --noman --verbose $BREWOPTS -j${perlbrew_jopt:-1}  $BREWVER"
-
-  # can not do 'perlbrew uss' in the run_or_err subshell above, or a $()
-  # furthermore `perlbrew use` returns 0 regardless of whether the perl is
-  # found (won't be there unless compilation suceeded, wich *ALSO* returns 0)
-  perlbrew use $BREWVER
-
-  if [[ "$( perlbrew use | grep -oP '(?<=Currently using ).+' )" != "$BREWVER" ]] ; then
-    echo_err "Unable to switch to $BREWVER - compilation failed...?"
-    echo_err "$LASTOUT"
-    exit 1
-  fi
-
-# no brewver - this means a travis perl, which means we want to clean up
-# the presently installed libs
-# Idea stolen from
-# https://github.com/kentfredric/Dist-Zilla-Plugin-Prereqs-MatchInstalled-All/blob/master/maint-travis-ci/sterilize_env.pl
-elif [[ "$CLEANTEST" == "true" ]] && [[ "$POISON_ENV" != "true" ]] ; then
-
-  echo_err "$(tstamp) Cleaning precompiled Travis-Perl"
-  perl -MConfig -MFile::Find -e '
-    my $sitedirs = {
-      map { $Config{$_} => 1 }
-        grep { $_ =~ /site(lib|arch)exp$/ }
-          keys %Config
-    };
-    find({ bydepth => 1, no_chdir => 1, follow_fast => 1, wanted => sub {
-      ! $sitedirs->{$_} and ( -d _ ? rmdir : unlink )
-    } }, keys %$sitedirs )
-  '
-
-  echo_err "Post-cleanup contents of sitelib of the pre-compiled Travis-Perl $TRAVIS_PERL_VERSION:"
-  echo_err "$(tree $(perl -MConfig -e 'print $Config{sitelib_stem}'))"
-  echo_err
-fi
-
-# configure CPAN.pm - older versions go into an endless loop
-# when trying to autoconf themselves
-CPAN_CFG_SCRIPT="
-  require CPAN;
-  require CPAN::FirstTime;
-  *CPAN::FirstTime::conf_sites = sub {};
-  CPAN::Config->load;
-  \$CPAN::Config->{urllist} = [qw{ $CPAN_MIRROR }];
-  \$CPAN::Config->{halt_on_failure} = 1;
-  CPAN::Config->commit;
-"
-run_or_err "Configuring CPAN.pm" "perl -e '$CPAN_CFG_SCRIPT'"
@@ -1,279 +0,0 @@
-#!/bin/bash
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-# poison the environment
-if [[ "$POISON_ENV" = "true" ]] ; then
-
-  # look through lib, find all mentioned ENVvars and set them
-  # to true and see if anything explodes
-  for var in $(grep -P '\$ENV\{' -r lib/ | grep -oP 'DBIC_\w+' | sort -u | grep -v DBIC_TRACE) ; do
-    if [[ -z "${!var}" ]] ; then
-      export $var=1
-    fi
-  done
-
-  # bogus nonexisting DBI_*
-  export DBI_DSN="dbi:ODBC:server=NonexistentServerAddress"
-  export DBI_DRIVER="ADO"
-
-  # make sure tests do not rely on implicid order of returned results
-  export DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER=1
-
-  # emulate a local::lib-like env
-  # trick cpanm into executing true as shell - we just need the find+unpack
-  run_or_err "Downloading latest stable DBIC from CPAN" \
-    "SHELL=/bin/true cpanm --look DBIx::Class"
-
-  export PERL5LIB="$( ls -d ~/.cpanm/latest-build/DBIx-Class-*/lib | tail -n1 ):$PERL5LIB"
-
-  # perldoc -l <mod> searches $(pwd)/lib in addition to PERL5LIB etc, hence the cd /
-  echo_err "Latest stable DBIC (without deps) locatable via \$PERL5LIB at $(cd / && perldoc -l DBIx::Class)"
-
-  # FIXME - this is a kludge in place of proper MDV testing. For the time
-  # being simply use the minimum versions of our DBI/DBDstack, to avoid
-  # fuckups like 0.08260 (went unnoticed for 5 months)
-  #
-  # use url-spec for DBI due to https://github.com/miyagawa/cpanminus/issues/328
-  if perl -M5.013003 -e1 &>/dev/null ; then
-    # earlier DBI will not compile without PERL_POLLUTE which was gone in 5.14
-    parallel_installdeps_notest T/TI/TIMB/DBI-1.614.tar.gz
-  else
-    parallel_installdeps_notest T/TI/TIMB/DBI-1.57.tar.gz
-  fi
-
-  # Test both minimum DBD::SQLite and minimum BigInt SQLite
-  if [[ "$CLEANTEST" = "true" ]]; then
-    parallel_installdeps_notest DBD::SQLite@1.37
-  else
-    parallel_installdeps_notest DBD::SQLite@1.29
-  fi
-
-fi
-
-if [[ "$CLEANTEST" = "true" ]]; then
-  # get the last inc/ off cpan - we will get rid of MI
-  # soon enough, but till then this will do
-  # the point is to have a *really* clean perl (the ones
-  # we build are guaranteed to be clean, without side
-  # effects from travis preinstalls)
-
-  # trick cpanm into executing true as shell - we just need the find+unpack
-  [[ -d ~/.cpanm/latest-build/DBIx-Class-*/inc ]] || run_or_err "Downloading latest stable DBIC inc/ from CPAN" \
-    "SHELL=/bin/true cpanm --look DBIx::Class"
-
-  mv ~/.cpanm/latest-build/DBIx-Class-*/inc .
-
-  # The first CPAN which is somewhat sane is around 1.94_56 (perl 5.12)
-  # The problem is that the first sane version also brings a *lot* of
-  # deps with it, notably things like YAML and HTTP::Tiny
-  # The goal of CLEANTEST is to have as little extra stuff installed as
-  # possible, mainly to catch "but X is perl core" mistakes
-  # So instead we still use our stock (possibly old) CPAN, and add some
-  # handholding
-
-  if [[ "$DEVREL_DEPS" == "true" ]] ; then
-    # Many dists still do not pass tests under tb1.5 properly (and it itself
-    # does not even install on things like 5.10). Install the *stable-dev*
-    # latest T::B here, so that it will not show up as a dependency, and
-    # hence it will not get installed a second time as an unsatisfied dep
-    # under cpanm --dev
-    #
-    # We are also not "quite ready" for SQLA 1.99, do not consider it
-    #
-    installdeps 'Test::Builder~<1.005' 'SQL::Abstract~<1.99'
-
-  elif ! CPAN_is_sane ; then
-    # no configure_requires - we will need the usual suspects anyway
-    # without pre-installing these in one pass things like extract_prereqs won't work
-    installdeps ExtUtils::MakeMaker ExtUtils::CBuilder Module::Build
-
-  fi
-
-else
-  # we will be running all dbic tests - preinstall lots of stuff, run basic tests
-  # using SQLT and set up whatever databases necessary
-  export DBICTEST_SQLT_DEPLOY=1
-
-  # FIXME - need new TB1.5 devrel
-  # if we run under --dev install latest github of TB1.5 first
-  # (unreleased workaround for precedence warnings)
-  if [[ "$DEVREL_DEPS" == "true" ]] ; then
-    parallel_installdeps_notest git://github.com/nthykier/test-more.git@fix-return-precedence-issue
-  fi
-
-  # do the preinstall in several passes to minimize amount of cross-deps installing
-  # multiple times, and to avoid module re-architecture breaking another install
-  # (e.g. once Carp is upgraded there's no more Carp::Heavy,
-  # while a File::Path upgrade may cause a parallel EUMM run to fail)
-  #
-  parallel_installdeps_notest ExtUtils::MakeMaker
-  parallel_installdeps_notest File::Path
-  parallel_installdeps_notest Carp
-  parallel_installdeps_notest Module::Build
-  parallel_installdeps_notest File::Spec Data::Dumper Module::Runtime
-  parallel_installdeps_notest Test::Exception Encode::Locale Test::Fatal
-  parallel_installdeps_notest Test::Warn B::Hooks::EndOfScope Test::Differences HTTP::Status
-  parallel_installdeps_notest Test::Pod::Coverage Test::EOL Devel::GlobalDestruction Sub::Name MRO::Compat Class::XSAccessor URI::Escape HTML::Entities
-  parallel_installdeps_notest YAML LWP Class::Trigger JSON::XS DateTime::Format::Builder Class::Accessor::Grouped Package::Variant
-  parallel_installdeps_notest 'SQL::Abstract~<1.99' Moose Module::Install JSON SQL::Translator File::Which
-
-  if [[ -n "DBICTEST_FIREBIRD_INTERBASE_DSN" ]] ; then
-    # the official version is very much outdated and does not compile on 5.14+
-    # use this rather updated source tree (needs to go to PAUSE):
-    # https://github.com/pilcrow/perl-dbd-interbase
-    parallel_installdeps_notest git://github.com/dbsrgits/perl-dbd-interbase.git
-  fi
-
-fi
-
-# generate the makefile which will have different deps depending on
-# the runmode and envvars set above
-run_or_err "Configure on current branch" "perl Makefile.PL"
-
-# install (remaining) dependencies, sometimes with a gentle push
-if [[ "$CLEANTEST" = "true" ]]; then
-  # we may need to prepend some stuff to that list
-  HARD_DEPS="$(echo $(make listdeps))"
-
-##### TEMPORARY WORKAROUNDS needed in case we will be using CPAN.pm
-  if [[ "$DEVREL_DEPS" != "true" ]] && ! CPAN_is_sane ; then
-    # combat dzillirium on harness-wide level, otherwise breakage happens weekly
-    echo_err "$(tstamp) Ancient CPAN.pm: engaging TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests during dep install"
-    perl -MTAP::Harness\ 3.18 -e1 &>/dev/null || run_or_err "Upgrading TAP::Harness for HARNESS_SUBCLASS support" "cpan TAP::Harness"
-    export PERL5LIB="$(pwd)/maint/travis-ci_scripts/lib:$PERL5LIB"
-    export HARNESS_SUBCLASS="TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests"
-    # sanity check, T::H does not report sensible errors when the subclass fails to load
-    perl -MTAP::Harness::IgnoreNonessentialDzilAutogeneratedTests -e1
-
-    # DBD::SQLite reasonably wants DBI at config time
-    perl -MDBI -e1 &>/dev/null || HARD_DEPS="DBI $HARD_DEPS"
-
-    # this is a fucked CPAN - won't understand configure_requires of
-    # various pieces we may run into
-    # FIXME - need to get these off metacpan or something instead
-    HARD_DEPS="ExtUtils::Depends B::Hooks::OP::Check $HARD_DEPS"
-
-    # FIXME
-    # parent is temporary due to Carp https://rt.cpan.org/Ticket/Display.html?id=88494
-    HARD_DEPS="parent $HARD_DEPS"
-
-    if CPAN_supports_BUILDPL ; then
-      # We will invoke a posibly MBT based BUILD-file, but we do not support
-      # configure requires. So we not only need to install MBT but its prereqs
-      # FIXME This is madness
-      HARD_DEPS="$(extract_prereqs Module::Build::Tiny) Module::Build::Tiny $HARD_DEPS"
-    else
-      # FIXME
-      # work around Params::Validate not having a Makefile.PL so really old
-      # toolchains can not figure out what the prereqs are ;(
-      # Need to do more research before filing a bug requesting Makefile inclusion
-      HARD_DEPS="$(extract_prereqs Params::Validate) $HARD_DEPS"
-    fi
-  fi
-##### END TEMPORARY WORKAROUNDS
-
-  installdeps $HARD_DEPS
-
-### FIXME in case we set it earlier in a workaround
-  if [[ -n "$HARNESS_SUBCLASS" ]] ; then
-
-    INSTALLDEPS_SKIPPED_TESTLIST=$(perl -0777 -e '
-my $curmod_re = qr{
-^
-  (?:
-    \QBuilding and testing\E
-      |
-    [\x20\t]* CPAN\.pm: [^\n]*? (?i:build)\S*
-  )
-
-  [\x20\t]+ (\S+)
-$}mx;
-
-my $curskip_re = qr{^ === \x20 \QSkipping nonessential autogenerated tests: \E([^\n]+) }mx;
-
-my (undef, @chunks) = (split qr/$curmod_re/, <>);
-while (@chunks) {
-  my ($mod, $log) = splice @chunks, 0, 2;
-  print "!!! Skipped nonessential tests while installing $mod:\n\t$1\n"
-    if $log =~ $curskip_re;
-}
-' <<< "$LASTOUT")
-
-    if [[ -n "$INSTALLDEPS_SKIPPED_TESTLIST" ]] ; then
-      POSTMORTEM="$POSTMORTEM$(
-        echo
-        echo "The following non-essential tests were skipped during deps installation"
-        echo "============================================================="
-        echo "$INSTALLDEPS_SKIPPED_TESTLIST"
-        echo "============================================================="
-        echo
-      )"
-    fi
-
-    unset HARNESS_SUBCLASS
-  fi
-
-else
-
-  # listalldeps is deliberate - will upgrade everything it can find
-  # we exclude SQLA specifically, since we do not want to pull
-  # in 1.99_xx on bleadcpan runs
-  deplist="$(make listalldeps | grep -vP '^(SQL::Abstract)$')"
-
-  # assume MDV on POISON_ENV, do not touch DBI/SQLite
-  if [[ "$POISON_ENV" = "true" ]] ; then
-    deplist="$(grep -vP '^(DBI|DBD::SQLite)$' <<< "$deplist")"
-  fi
-
-  parallel_installdeps_notest "$deplist"
-fi
-
-echo_err "$(tstamp) Dependency installation finished"
-# this will display list of available versions
-perl Makefile.PL
-
-# make sure we got everything we need
-if [[ -n "$(make listdeps)" ]] ; then
-  echo_err "$(tstamp) Not all deps installed - something went wrong :("
-  sleep 1 # without this the echo below confuses the console listener >.<
-  CPAN_is_sane || echo_err -e "Outdated CPAN.pm used - full installdep log follows\n$INSTALLDEPS_OUT\n\nSearch for 'NOT OK' in the text above\n\nDeps still missing:"
-  sleep 3 # without this the above echo confuses the console listener >.<
-  make listdeps
-  exit 1
-fi
-
-# check that our MDV somewhat works
-if [[ "$POISON_ENV" = "true" ]] && ( perl -MDBD::SQLite\ 1.38 -e1 || perl -MDBI\ 1.615 -e1 ) &>/dev/null ; then
-  echo_err "Something went wrong - higher versions of DBI and/or DBD::SQLite than we expected"
-  exit 1
-fi
-
-
-# announce what are we running
-echo_err "
-===================== DEPENDENCY CONFIGURATION COMPLETE =====================
-$(tstamp) Configuration phase seems to have taken $(date -ud "@$SECONDS" '+%H:%M:%S') (@$SECONDS)
-
-= CPUinfo
-$(perl -0777 -p -e 's/.+\n\n(?!\z)//s' < /proc/cpuinfo)
-
-= Meminfo
-$(free -m -t)
-
-= Kernel info
-$(uname -a)
-
-= Network Configuration
-$(ip addr)
-
-= Network Sockets Status
-$(sudo netstat -an46p | grep -Pv '\s(CLOSING|(FIN|TIME|CLOSE)_WAIT.?|LAST_ACK)\s')
-
-= Environment
-$(env | grep -P 'TEST|HARNESS|MAKE|TRAVIS|PERL|DBIC' | LC_ALL=C sort | cat -v)
-
-= Perl in use
-$(perl -V)
-============================================================================="
@@ -1,52 +0,0 @@
-#!/bin/bash
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-run_harness_tests() {
-  local -x HARNESS_OPTIONS=c:j$NUMTHREADS
-  make test 2> >(tee "$TEST_STDERR_LOG")
-}
-
-TEST_T0=$SECONDS
-if [[ "$CLEANTEST" = "true" ]] ; then
-  echo_err "$(tstamp) Running tests with plain \`make test\`"
-  run_or_err "Prepare blib" "make pure_all"
-  run_harness_tests
-else
-  PROVECMD="prove -lrswj$NUMTHREADS xt t"
-
-  # FIXME - temporary, until Package::Stash is fixed
-  if perl -M5.010 -e 1 &>/dev/null ; then
-    PROVECMD="$PROVECMD -T"
-  fi
-
-  echo_err "$(tstamp) running tests with \`$PROVECMD\`"
-  $PROVECMD 2> >(tee "$TEST_STDERR_LOG")
-fi
-TEST_T1=$SECONDS
-
-if [[ -z "$DBICTRACE" ]] && [[ -z "$POISON_ENV" ]] && [[ -s "$TEST_STDERR_LOG" ]] ; then
-  STDERR_LOG_SIZE=$(wc -l < "$TEST_STDERR_LOG")
-
-  # prepend STDERR log
-  POSTMORTEM="$(
-    echo
-    echo "Test run produced $STDERR_LOG_SIZE lines of output on STDERR:"
-    echo "============================================================="
-    cat "$TEST_STDERR_LOG"
-    echo "============================================================="
-    echo "End of test run STDERR output ($STDERR_LOG_SIZE lines)"
-    echo
-    echo
-  )$POSTMORTEM"
-fi
-
-echo
-echo "${POSTMORTEM:- \o/ No notable smoke run issues \o/ }"
-echo
-echo "$(tstamp) Testing took a total of $(( $TEST_T1 - $TEST_T0 ))s"
-if [[ -n "$INSTALLDEPS_OUT" ]] ; then
-  echo "$(tstamp) Full dep install log at $(/usr/bin/nopaste -q -s Shadowcat -d DepInstall <<< "$INSTALLDEPS_OUT")"
-fi
-echo
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-# !!! Nothing here will be executed !!!
-# The source-line calling this script is commented out in .travis.yml
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-echo_err "Nothing to do"
-
-return 0
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-if [[ "$CLEANTEST" != "true" ]] ; then
-  parallel_installdeps_notest $(perl -Ilib -MDBIx::Class -e 'print join " ", keys %{DBIx::Class::Optional::Dependencies->req_list_for("dist_dir")}')
-  run_or_err "Attempt to build a dist with all prereqs present" "make dist"
-  echo "Contents of the resulting dist tarball:"
-  echo "==========================================="
-  tar -vzxf DBIx-Class-*.tar.gz
-  echo "==========================================="
-  run_or_err 'Attempt to configure from re-extracted distdir' \
-    'bash -c "cd \$(find DBIx-Class-* -maxdepth 0 -type d | head -n 1) && perl Makefile.PL"'
-fi
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-# !!! Nothing here will be executed !!!
-# The source-line calling this script is commented out in .travis.yml
-
-source maint/travis-ci_scripts/common.bash
-if [[ -n "$SHORT_CIRCUIT_SMOKE" ]] ; then return ; fi
-
-echo_err "Nothing to do"
-
-return 0
@@ -1,194 +0,0 @@
-#!/bin/bash
-
-set -e
-
-TEST_STDERR_LOG=/tmp/dbictest.stderr
-TIMEOUT_CMD="/usr/bin/timeout --kill-after=9.5m --signal=TERM 9m"
-
-echo_err() { echo "$@" 1>&2 ; }
-
-if [[ "$TRAVIS" != "true" ]] ; then
-  echo_err "Running this script makes no sense outside of travis-ci"
-  exit 1
-fi
-
-tstamp() { echo -n "[$(date '+%H:%M:%S')]" ; }
-
-run_or_err() {
-  echo_err -n "$(tstamp) $1 ... "
-
-  LASTEXIT=0
-  START_TIME=$SECONDS
-  # the tee is a handy debugging tool when stumpage is exceedingly strong
-  #LASTOUT=$( bash -c "$2" 2>&1 | tee /dev/stderr) || LASTEXIT=$?
-  LASTOUT=$( bash -c "$2" 2>&1 ) || LASTEXIT=$?
-  DELTA_TIME=$(( $SECONDS - $START_TIME ))
-
-  if [[ "$LASTEXIT" != "0" ]] ; then
-    echo_err "FAILED !!! (after ${DELTA_TIME}s)"
-    echo_err "Command executed:"
-    echo_err "$2"
-    echo_err "STDOUT+STDERR:"
-    echo_err "$LASTOUT"
-
-    return $LASTEXIT
-  else
-    echo_err "done (took ${DELTA_TIME}s)"
-  fi
-}
-
-apt_install() {
-  # flatten
-  pkgs="$@"
-
-  # Need to do this at every step, the sources list may very well have changed
-  run_or_err "Updating APT available package list" "sudo apt-get update"
-
-  run_or_err "Installing Debian APT packages: $pkgs" "sudo apt-get install --allow-unauthenticated  --no-install-recommends -y $pkgs"
-}
-
-extract_prereqs() {
-  # once --verbose is set, --no-verbose can't disable it
-  # do this by hand
-  local PERL_CPANM_OPT="$( echo $PERL_CPANM_OPT | sed 's/--verbose\s*//' )"
-
-  # hack-hack-hack
-  LASTEXIT=0
-  COMBINED_OUT="$( { stdout="$(cpanm --quiet --scandeps --format tree "$@")" ; } 2>&1; echo "!!!STDERRSTDOUTSEPARATOR!!!$stdout")" \
-    || LASTEXIT=$?
-
-  OUT=${COMBINED_OUT#*!!!STDERRSTDOUTSEPARATOR!!!}
-  ERR=$(grep -v " is up to date." <<< "${COMBINED_OUT%!!!STDERRSTDOUTSEPARATOR!!!*}")
-
-  if [[ "$LASTEXIT" != "0" ]] ; then
-    echo_err "Error occured (exit code $LASTEXIT) retrieving dependencies of $@:"
-    echo_err "$ERR"
-    echo_err "$OUT"
-    exit 1
-  fi
-
-  # throw away warnings, ascii art, convert to modnames
-  PQ=$(perl -p -e 's/^\!.*//; s/^[^a-z]+//i; s/\-[^\-]+$/ /; s/\-/::/g' <<< "$OUT")
-
-  # throw away what was in $@
-  for m in "$@" ; do
-    PQ=$( perl -p -e 's/(?:\s|^)\Q'"$m"'\E(?:\s|$)/ /mg' <<< "$PQ")
-  done
-
-  # RV
-  echo "$PQ"
-}
-
-parallel_installdeps_notest() {
-  if [[ -z "$@" ]] ; then return; fi
-
-  # one module spec per line
-  MODLIST="$(printf '%s\n' "$@")"
-
-  # We want to trap the output of each process and serially append them to
-  # each other as opposed to just dumping a jumbled up mass-log that would
-  # need careful unpicking by a human
-  #
-  # While cpanm does maintain individual buildlogs in more recent versions,
-  # we are not terribly interested in trying to figure out which log is which
-  # dist. The verbose-output + trap STDIO technique is vastly superior in this
-  # particular case
-  #
-  # Explanation of inline args:
-  #
-  # [09:38] <T> you need a $0
-  # [09:38] <G> hence the _
-  # [09:38] <G> bash -c '...' _
-  # [09:39] <T> I like -- because it's the magic that gnu getopts uses for somethign else
-  # [09:39] <G> or --, yes
-  # [09:39] <T> ribasushi: you could put "giant space monkey penises" instead of "--" and it would work just as well
-  #
-  run_or_err "Installing (without testing) $(echo $MODLIST)" \
-    "echo \\
-\"$MODLIST\" \\
-      | xargs -d '\\n' -n 1 -P $NUMTHREADS bash -c \\
-        'OUT=\$($TIMEOUT_CMD cpanm --notest \"\$@\" 2>&1 ) || (LASTEXIT=\$?; echo \"\$OUT\"; exit \$LASTEXIT)' \\
-        'giant space monkey penises'
-    "
-}
-
-installdeps() {
-  if [[ -z "$@" ]] ; then return; fi
-
-  echo_err "$(tstamp) Processing dependencies: $@"
-
-  local -x HARNESS_OPTIONS
-
-  HARNESS_OPTIONS="j$NUMTHREADS"
-
-  echo_err -n "Attempting install of $# modules under parallel ($HARNESS_OPTIONS) testing ... "
-
-  LASTEXIT=0
-  START_TIME=$SECONDS
-  LASTOUT=$( _dep_inst_with_test "$@" ) || LASTEXIT=$?
-  DELTA_TIME=$(( $SECONDS - $START_TIME ))
-
-  if [[ "$LASTEXIT" = "0" ]] ; then
-    echo_err "done (took ${DELTA_TIME}s)"
-  else
-    local errlog="after ${DELTA_TIME}s Exit:$LASTEXIT Log:$(/usr/bin/nopaste -q -s Shadowcat -d "Parallel testfail" <<< "$LASTOUT")"
-    echo_err -n "failed ($errlog) retrying with sequential testing ... "
-    POSTMORTEM="$POSTMORTEM$(
-      echo
-      echo "Depinstall under $HARNESS_OPTIONS parallel testing failed $errlog"
-      echo "============================================================="
-      echo "Attempted installation of: $@"
-      echo "============================================================="
-    )"
-
-    HARNESS_OPTIONS=""
-    LASTEXIT=0
-    START_TIME=$SECONDS
-    LASTOUT=$( _dep_inst_with_test "$@" ) || LASTEXIT=$?
-    DELTA_TIME=$(( $SECONDS - $START_TIME ))
-
-    if [[ "$LASTEXIT" = "0" ]] ; then
-      echo_err "done (took ${DELTA_TIME}s)"
-    else
-      echo_err "FAILED !!! (after ${DELTA_TIME}s)"
-      echo_err "STDOUT+STDERR:"
-      echo_err "$LASTOUT"
-      exit 1
-    fi
-  fi
-
-  INSTALLDEPS_OUT="${INSTALLDEPS_OUT}${LASTOUT}"
-}
-
-_dep_inst_with_test() {
-  if [[ "$DEVREL_DEPS" == "true" ]] ; then
-    # --dev is already part of CPANM_OPT
-    $TIMEOUT_CMD cpanm "$@" 2>&1
-  else
-    $TIMEOUT_CMD cpan "$@" 2>&1
-
-    # older perls do not have a CPAN which can exit with error on failed install
-    for m in "$@"; do
-      if ! perl -e '
-
-my $mod = (
-  $ARGV[0] =~ m{ \/ .*? ([^\/]+) $ }x
-    ? do { my @p = split (/\-/, $1); pop @p; join "::", @p }
-    : $ARGV[0]
-);
-
-$mod = q{List::Util} if $mod eq q{Scalar::List::Utils};
-
-eval qq{require($mod)} or ( print $@ and exit 1)
-
-      ' "$m" 2> /dev/null ; then
-        echo -e "$m installation seems to have failed"
-        return 1
-      fi
-    done
-  fi
-}
-
-CPAN_is_sane() { perl -MCPAN\ 1.94_56 -e 1 &>/dev/null ; }
-
-CPAN_supports_BUILDPL() { perl -MCPAN\ 1.9205 -e1 &>/dev/null; }
@@ -1,93 +0,0 @@
-package TAP::Harness::IgnoreNonessentialDzilAutogeneratedTests;
-
-use warnings;
-use strict;
-
-use base 'TAP::Harness';
-use File::Spec ();
-use IPC::Open3 'open3';
-use File::Temp ();
-use List::Util 'first';
-
-my $frivolous_test_map = {
-# Test based on the extremely dep-heavy, *prone to failures* Test::CheckDeps
-#
-  qr|^t/00-check-deps.t$| => [
-    qr|^\Q# this test was generated with Dist::Zilla::Plugin::Test::CheckDeps|m,
-
-    # older non-annotated versions
-    qr|use \s+ Test::CheckDeps .*? ^\Qcheck_dependencies('suggests')\E .*? \QBAIL_OUT("Missing dependencies") if !Test::More->builder->is_passing|smx,
-  ],
-
-# "does everything compile" tests are useless by definition - this is what the
-# rest of the test suite is for
-#
-  qr|^t/00-compile.t$| => [
-    qr|^\Q# this test was generated with Dist::Zilla::Plugin::Test::Compile|m,
-  ],
-
-# The report prereq test managed to become fatal as well
-#
-  qr|^t/00-report-prereqs.t$| => [
-    qr|^\Q# This test was generated by Dist::Zilla::Plugin::Test::ReportPrereqs|m,
-  ],
-
-# Just future-proof the thing, catch anything autogened by dzil for a bit
-  qr|^t/00-| => [
-    qr|^\Q# This test was generated by Dist::Zilla::|m,
-  ]
-};
-
-sub aggregate_tests {
-  my ($self, $aggregate, @all_tests) = @_;
-
-  my ($run_tests, $skip_tests);
-
-  TESTFILE:
-  for (@all_tests) {
-    my $fn = File::Spec::Unix->catpath( File::Spec->splitpath( $_ ) );
-
-    if (my $REs = $frivolous_test_map->{
-      (first { $fn =~ $_ } keys %$frivolous_test_map ) || ''
-    }) {
-      my $slurptest = do { local (@ARGV, $/) = $fn; <> };
-      $slurptest =~ $_ and push @$skip_tests, $fn and next TESTFILE for @$REs;
-    }
-
-    push @$run_tests, $fn;
-  }
-
-  if ($skip_tests) {
-
-    for my $tfn (@$skip_tests) {
-
-      (my $tfn_flattened = $tfn) =~ s|/|_|g;
-
-      my $log_file = File::Temp->new(
-        DIR => '/tmp',
-        TEMPLATE => "AutoGenTest_${tfn_flattened}_XXXXX",
-        SUFFIX => '.txt',
-      );
-
-      # FIXME I have no idea why the fileno dance is necessary - will investigate later
-      # All I know is that if I pass in just $log_file - open3 ignores it >:(
-      my $pid = open3(undef, '>&'.fileno($log_file), undef, $^X, qw(-I blib -I arch/lib), $tfn );
-      waitpid ($pid, 0);
-      my $ex = $?;
-
-      if ($ex) {
-        # use qx as opposed to another open3 until I figure out the above
-        close $log_file or die "Unable to close $log_file: $!";
-        chomp( my $url = `/usr/bin/nopaste -q -s Shadowcat -d $log_file < $log_file` );
-
-        $tfn .= "[would NOT have passed: $ex / $url]";
-      }
-    }
-
-    print STDERR "=== Skipping nonessential autogenerated tests: @$skip_tests\n";
-  }
-
-  return $self->SUPER::aggregate_tests($aggregate, @$run_tests);
-}
-
-1;
@@ -343,7 +343,7 @@ Same as perl's -I, prepended to current @INC
 
 =head1 AUTHORS
 
-See L<DBIx::Class/CONTRIBUTORS>
+See L<DBIx::Class/AUTHORS>
 
 =head1 LICENSE
 
@@ -88,8 +88,17 @@ is($link4->id, 4, 'Link 4 id');
 is($link4->url, undef, 'Link 4 url');
 is($link4->title, 'dtitle', 'Link 4 title');
 
+## variable size dataset
+@links = $schema->populate('Link', [
+[ qw/id title url/ ],
+[ 41 ],
+[ 42, undef, 'url42' ],
+]);
+is(scalar @links, 2);
+is($links[0]->url, undef);
+is($links[1]->url, 'url42');
 
-## make sure populate -> insert_bulk honors fields/orders in void context
+## make sure populate -> _insert_bulk honors fields/orders in void context
 ## schema order
 $schema->populate('Link', [
 [ qw/id url title/ ],
@@ -120,12 +129,69 @@ is($link7->id, 7, 'Link 7 id');
 is($link7->url, undef, 'Link 7 url');
 is($link7->title, 'gtitle', 'Link 7 title');
 
+## variable size dataset in void ctx
+$schema->populate('Link', [
+[ qw/id title url/ ],
+[ 71 ],
+[ 72, undef, 'url72' ],
+]);
+@links = $schema->resultset('Link')->search({ id => [71, 72]}, { order_by => 'id' })->all;
+is(scalar @links, 2);
+is($links[0]->url, undef);
+is($links[1]->url, 'url72');
+
+## variable size dataset in void ctx, hash version
+$schema->populate('Link', [
+  { id => 73 },
+  { id => 74, title => 't74' },
+  { id => 75, url => 'u75' },
+]);
+@links = $schema->resultset('Link')->search({ id => [73..75]}, { order_by => 'id' })->all;
+is(scalar @links, 3);
+is($links[0]->url, undef);
+is($links[0]->title, undef);
+is($links[1]->url, undef);
+is($links[1]->title, 't74');
+is($links[2]->url, 'u75');
+is($links[2]->title, undef);
+
+## Make sure the void ctx trace is sane
+{
+  for (
+    [
+      [ qw/id title url/ ],
+      [ 81 ],
+      [ 82, 't82' ],
+      [ 83, undef, 'url83' ],
+    ],
+    [
+      { id => 91 },
+      { id => 92, title => 't92' },
+      { id => 93, url => 'url93' },
+    ]
+  ) {
+    $schema->is_executed_sql_bind(
+      sub {
+        $schema->populate('Link', $_);
+      },
+      [
+        [ 'BEGIN' ],
+        [
+          'INSERT INTO link( id, title, url ) VALUES( ?, ?, ? )',
+          "__BULK_INSERT__"
+        ],
+        [ 'COMMIT' ],
+      ]
+    );
+  }
+}
+
 # populate with literals
 {
   my $rs = $schema->resultset('Link');
   $rs->delete;
 
-  # test insert_bulk with all literal sql (no binds)
+  # test populate with all literal sql (no binds)
 
   $rs->populate([
     (+{
@@ -163,7 +229,7 @@ is($link7->title, 'gtitle', 'Link 7 title');
   my $rs = $schema->resultset('Link');
   $rs->delete;
 
-  # test insert_bulk with all literal/bind sql
+  # test populate with all literal/bind sql
   $rs->populate([
     (+{
         url => \['?', [ {} => 'cpan.org' ] ],
@@ -178,7 +244,7 @@ is($link7->title, 'gtitle', 'Link 7 title');
 
   $rs->delete;
 
-  # test insert_bulk with mix literal and literal/bind
+  # test populate with mix literal and literal/bind
   $rs->populate([
     (+{
         url => \"'cpan.org'",
@@ -196,7 +262,7 @@ is($link7->title, 'gtitle', 'Link 7 title');
   # test mixed binds with literal sql/bind
 
   $rs->populate([ map { +{
-    url => \[ '? || ?', [ {} => 'cpan.org_' ], [ undef, $_ ] ],
+    url => \[ '? || ?', [ {} => 'cpan.org_' ], $_ ],
     title => "The 'best of' cpan",
   } } (1 .. 5) ]);
 
@@ -317,6 +383,7 @@ lives_ok {
 } 'literal+bind with semantically identical attrs works after normalization';
 
 # test all kinds of population with stringified objects
+# or with empty sets
 warnings_like {
   local $ENV{DBIC_RT79576_NOWARN};
 
@@ -329,79 +396,113 @@ warnings_like {
   my $rank = Math::BigInt->new(42);
 
   my $args = {
-    'stringifying objects after regular values' => [ map
-      { { name => $_, rank => $rank } }
-      (
+    'stringifying objects after regular values' => { AoA => [
+      [qw( name rank )],
+      ( map { [ $_, $rank ] } (
         'supplied before stringifying objects',
         'supplied before stringifying objects 2',
         $fn,
         $fn2,
-      )
-    ],
-    'stringifying objects before regular values' => [ map
-      { { name => $_, rank => $rank } }
-      (
+      )),
+    ]},
+
+    'stringifying objects before regular values' => { AoA => [
+      [qw( rank name )],
+      ( map { [ $rank, $_ ] } (
         $fn,
         $fn2,
         'supplied after stringifying objects',
         'supplied after stringifying objects 2',
-      )
-    ],
-    'stringifying objects between regular values' => [ map
-      { { name => $_, rank => $rank } }
-      (
+      )),
+    ]},
+
+    'stringifying objects between regular values' => { AoA => [
+      [qw( name rank )],
+      ( map { [ $_, $rank ] } (
         'supplied before stringifying objects',
         $fn,
         $fn2,
         'supplied after stringifying objects',
-      )
-    ],
-    'stringifying objects around regular values' => [ map
-      { { name => $_, rank => $rank } }
-      (
+      ))
+    ]},
+
+    'stringifying objects around regular values' => { AoA => [
+      [qw( rank name )],
+      ( map { [ $rank, $_ ] } (
         $fn,
         'supplied between stringifying objects',
         $fn2,
-      )
-    ],
+      ))
+    ]},
+
+    'single stringifying object' => { AoA => [
+      [qw( rank name )],
+      [ $rank, $fn ],
+    ]},
+
+    'empty set' => { AoA => [
+      [qw( name rank )],
+    ]},
   };
 
-  local $Storable::canonical = 1;
-  my $preimage = nfreeze([$fn, $fn2, $rank, $args]);
+  # generate the AoH equivalent based on the AoAs above
+  for my $bag (values %$args) {
+    $bag->{AoH} = [];
+    my @hdr = @{$bag->{AoA}[0]};
+    for my $v ( @{$bag->{AoA}}[1..$#{$bag->{AoA}}] ) {
+      push @{$bag->{AoH}}, my $h = {};
+      @{$h}{@hdr} = @$v;
+    }
+  }
 
-  for my $tst (keys %$args) {
+  local $Storable::canonical = 1;
+  my $preimage = nfreeze($args);
 
-    # test void ctx
-    $rs->delete;
-    $rs->populate($args->{$tst});
-    is_deeply(
-      $rs->all_hri,
-      $args->{$tst},
-      "Populate() $tst in void context"
-    );
 
-    # test non-void ctx
-    $rs->delete;
-    my $dummy = $rs->populate($args->{$tst});
-    is_deeply(
-      $rs->all_hri,
-      $args->{$tst},
-      "Populate() $tst in non-void context"
-    );
+  for my $tst (keys %$args) {
+    for my $type (qw(AoA AoH)) {
+
+      # test void ctx
+      $rs->delete;
+      $rs->populate($args->{$tst}{$type});
+      is_deeply(
+        $rs->all_hri,
+        $args->{$tst}{AoH},
+        "Populate() $tst in void context"
+      );
+
+      # test scalar ctx
+      $rs->delete;
+      my $dummy = $rs->populate($args->{$tst}{$type});
+      is_deeply(
+        $rs->all_hri,
+        $args->{$tst}{AoH},
+        "Populate() $tst in non-void context"
+      );
+
+      # test list ctx
+      $rs->delete;
+      my @dummy = $rs->populate($args->{$tst}{$type});
+      is_deeply(
+        $rs->all_hri,
+        $args->{$tst}{AoH},
+        "Populate() $tst in non-void context"
+      );
+    }
 
     # test create() as we have everything set up already
     $rs->delete;
-    $rs->create($_) for @{$args->{$tst}};
+    $rs->create($_) for @{$args->{$tst}{AoH}};
 
     is_deeply(
       $rs->all_hri,
-      $args->{$tst},
+      $args->{$tst}{AoH},
       "Create() $tst"
     );
   }
 
   ok (
-    ($preimage eq nfreeze( [$fn, $fn2, $rank, $args] )),
+    ($preimage eq nfreeze($args)),
     'Arguments fed to populate()/create() unchanged'
   );
 
@@ -416,10 +517,11 @@ warnings_like {
   )
     ? ()
     # one unique for populate() and create() each
-    : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 2
+    : (qr/\QPOSSIBLE *PAST* DATA CORRUPTION detected \E.+\QTrigger condition encountered at @{[ __FILE__ ]} line\E \d/) x 4
 ], 'Data integrity warnings as planned';
 
-lives_ok {
+$schema->is_executed_sql_bind(
+  sub {
    $schema->resultset('TwoKeys')->populate([{
       artist => 1,
       cd     => 5,
@@ -437,7 +539,26 @@ lives_ok {
             autopilot => 'b',
       }]
    }])
-} 'multicol-PK has_many populate works';
+  },
+  [
+    [ 'BEGIN' ],
+    [ 'INSERT INTO twokeys ( artist, cd)
+        VALUES ( ?, ? )',
+      '__BULK_INSERT__'
+    ],
+    [ 'INSERT INTO fourkeys_to_twokeys ( autopilot, f_bar, f_foo, f_goodbye, f_hello, t_artist, t_cd)
+        VALUES (
+          ?, ?, ?, ?, ?,
+          ( SELECT me.artist FROM twokeys me WHERE artist = ? AND cd = ? ),
+          ( SELECT me.cd FROM twokeys me WHERE artist = ? AND cd = ? )
+        )
+      ',
+      '__BULK_INSERT__'
+    ],
+    [ 'COMMIT' ],
+  ],
+  'multicol-PK has_many populate expected trace'
+);
 
 lives_ok ( sub {
   $schema->populate('CD', [
@@ -12,6 +12,8 @@ use strict;
 use warnings;
 
 use Test::More;
+use Test::Warn;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
 
@@ -24,7 +26,7 @@ my $schema  = DBICTest->init_schema();
 my $art_rs  = $schema->resultset('Artist');
 my $cd_rs  = $schema->resultset('CD');
 
-my $restricted_art_rs  = $art_rs->search({rank => 42});
+my $restricted_art_rs  = $art_rs->search({ -and => [ rank => 42, charfield => { '=', \['(SELECT MAX(artistid) FROM artist) + ?', 6] } ] });
 
 ok( $schema, 'Got a Schema object');
 ok( $art_rs, 'Got Good Artist Resultset');
@@ -37,10 +39,10 @@ ok( $cd_rs, 'Got Good CD Resultset');
 
 SCHEMA_POPULATE1: {
 
-  ## Test to make sure that the old $schema->populate is using the new method
-  ## for $resultset->populate when in void context and with sub objects.
+  # throw a monkey wrench
+  my $post_jnap_monkeywrench = $schema->resultset('Artist')->find(1)->update({ name => undef });
 
-  $schema->populate('Artist', [
+  warnings_exist { $schema->populate('Artist', [
 
     [qw/name cds/],
     ["001First Artist", [
@@ -55,13 +57,13 @@ SCHEMA_POPULATE1: {
     [undef, [
       {title=>"004Title1", year=>2010}
     ]],
-  ]);
+  ]) } qr/\QFast-path populate() of non-uniquely identifiable rows with related data is not possible/;
 
   isa_ok $schema, 'DBIx::Class::Schema';
 
-  my ($undef, $artist1, $artist2, $artist3 ) = $schema->resultset('Artist')->search({
+  my ( $preexisting_undef, $artist1, $artist2, $artist3, $undef ) = $schema->resultset('Artist')->search({
     name=>["001First Artist","002Second Artist","003Third Artist", undef]},
-    {order_by=>'name ASC'})->all;
+    {order_by => { -asc => 'artistid' }})->all;
 
   isa_ok  $artist1, 'DBICTest::Artist';
   isa_ok  $artist2, 'DBICTest::Artist';
@@ -78,6 +80,8 @@ SCHEMA_POPULATE1: {
   ok $artist3->cds->count eq 1, "Got Right number of CDs for Artist3";
   ok $undef->cds->count eq 1, "Got Right number of CDs for Artist4";
 
+  $post_jnap_monkeywrench->delete;
+
   ARTIST1CDS: {
 
     my ($cd1, $cd2, $cd3) = $artist1->cds->search(undef, {order_by=>'year ASC'});
@@ -343,7 +347,9 @@ ARRAY_CONTEXT: {
     ]);
 
     ## Did it use the condition in the resultset?
+    $more_crap->discard_changes;
     cmp_ok( $more_crap->rank, '==', 42, "Got Correct rank for result object");
+    cmp_ok( $more_crap->charfield, '==', $more_crap->id + 5, "Got Correct charfield for result object");
   }
 }
 
@@ -473,7 +479,9 @@ VOID_CONTEXT: {
       },
     ];
 
-    $cd_rs->populate($cds);
+    warnings_exist {
+      $cd_rs->populate($cds)
+    } qr/\QFast-path populate() of belongs_to relationship data is not possible/;
 
     my ($cdA, $cdB) = $cd_rs->search(
       {title=>[sort map {$_->{title}} @$cds]},
@@ -513,7 +521,9 @@ VOID_CONTEXT: {
       },
     ];
 
-    $cd_rs->populate($cds);
+    warnings_exist {
+      $cd_rs->populate($cds);
+    } qr/\QFast-path populate() of belongs_to relationship data is not possible/;
 
     my ($cdA, $cdB, $cdC) = $cd_rs->search(
       {title=>[sort map {$_->{title}} @$cds]},
@@ -626,7 +636,9 @@ VOID_CONTEXT: {
     })->first;
 
     ## Did it use the condition in the resultset?
+    $more_crap->discard_changes;
     cmp_ok( $more_crap->rank, '==', 42, "Got Correct rank for result object");
+    cmp_ok( $more_crap->charfield, '==', $more_crap->id + 5, "Got Correct charfield for result object");
   }
 }
 
@@ -655,7 +667,11 @@ ARRAYREF_OF_ARRAYREF_STYLE: {
   is $cooler->name, 'Cooler', 'Correct Name';
   is $lamer->name, 'Lamer', 'Correct Name';
 
-  cmp_ok $cooler->rank, '==', 42, 'Correct Rank';
+  for ($cooler, $lamer) {
+    $_->discard_changes;
+    cmp_ok( $_->rank, '==', 42, "Got Correct rank for result object");
+    cmp_ok( $_->charfield, '==', $_->id + 5, "Got Correct charfield for result object");
+  }
 
   ARRAY_CONTEXT_WITH_COND_FROM_RS: {
 
@@ -666,7 +682,9 @@ ARRAYREF_OF_ARRAYREF_STYLE: {
     ]);
 
     ## Did it use the condition in the resultset?
+    $mega_lamer->discard_changes;
     cmp_ok( $mega_lamer->rank, '==', 42, "Got Correct rank for result object");
+    cmp_ok( $mega_lamer->charfield, '==', $mega_lamer->id + 5, "Got Correct charfield for result object");
   }
 
   VOID_CONTEXT_WITH_COND_FROM_RS: {
@@ -683,9 +701,30 @@ ARRAYREF_OF_ARRAYREF_STYLE: {
 
     ## Did it use the condition in the resultset?
     cmp_ok( $mega_lamer->rank, '==', 42, "Got Correct rank for result object");
+    cmp_ok( $mega_lamer->charfield, '==', $mega_lamer->id + 5, "Got Correct charfield for result object");
   }
 }
 
-ok(eval { $art_rs->populate([]); 1 }, "Empty populate runs but does nothing");
+EMPTY_POPULATE: {
+  foreach(
+    [ empty         => [] ],
+    [ columns_only  => [ [qw(name rank charfield)] ] ],
+  ) {
+    my ($desc, $arg) = @{$_};
+
+    $schema->is_executed_sql_bind( sub {
+
+      my $rs = $art_rs;
+      lives_ok { $rs->populate($arg); 1 } "$desc populate in void context lives";
+
+      my @r = $art_rs->populate($arg);
+      is_deeply( \@r, [], "$desc populate in list context returns empty list" );
+
+      my $r = $art_rs->populate($arg);
+      is( $r, undef, "$desc populate in scalar context returns undef" );
+
+    }, [], "$desc populate executed no statements" );
+  }
+}
 
 done_testing;
@@ -29,6 +29,7 @@ is_deeply (
       {
         result_class => 'DBIx::Class::ResultClass::HashRefInflator',
         prefetch => ['artist', { tracks => [qw/cd year1999cd year2000cd/] } ],
+        order_by => 'tracks.trackid',
       },
     )->all
   ],
@@ -39,6 +40,7 @@ is_deeply (
         result_class => 'DBIx::Class::ResultClass::HashRefInflator',
         prefetch => ['artist', { tracks => [qw/cd year1999cd year2000cd/] } ],
         columns => [qw/cdid single_track title/],   # to match the columns retrieved by the virtview
+        order_by => 'tracks.trackid',
       },
     )->all
   ],
@@ -2,11 +2,8 @@ use strict;
 use warnings;
 
 use Test::More;
-use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
 $schema->storage->sql_maker->quote_char('"');
@@ -15,27 +12,12 @@ my $rs = $schema->resultset ('Artist');
 my $last_obj = $rs->search ({}, { order_by => { -desc => 'artistid' }, rows => 1})->single;
 my $last_id = $last_obj ? $last_obj->artistid : 0;
 
-
-my ($sql, @bind);
-my $orig_debugobj = $schema->storage->debugobj;
-my $orig_debug = $schema->storage->debug;
-
-$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) );
-$schema->storage->debug (1);
-
 my $obj;
-lives_ok { $obj = $rs->create ({}) } 'Default insert successful';
-
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
-  'INSERT INTO "artist" DEFAULT VALUES',
-  [],
-  'Default-value insert correct SQL',
-);
+$schema->is_executed_sql_bind( sub {
+  $obj = $rs->create ({})
+}, [[
+  'INSERT INTO "artist" DEFAULT VALUES'
+]], 'Default-value insert correct SQL' );
 
 ok ($obj, 'Insert defaults ( $rs->create ({}) )' );
 
@@ -16,15 +16,7 @@ warnings_exist { DBICTest->init_schema( compose_connection => 1, sqlite_use_file
 
 cmp_ok(DBICTest->resultset('Artist')->count, '>', 0, 'count is valid');
 
-# cleanup globals so we do not trigger the leaktest
-for ( map { DBICTest->schema->class($_) } DBICTest->schema->sources ) {
-  $_->class_resolver(undef);
-  $_->resultset_instance(undef);
-  $_->result_source_instance(undef);
-}
-{
-  no warnings qw/redefine once/;
-  *DBICTest::schema = sub {};
-}
+# cleanup globaly cached handle so we do not trigger the leaktest
+DBICTest->schema->storage->disconnect;
 
 done_testing;
@@ -21,6 +21,19 @@ use strict;
 use warnings;
 use Test::More;
 
+use lib qw(t/lib);
+use DBICTest::RunMode;
+
+plan skip_all => "Temporarily no smoke testing of Test::More 1.3xx alphas" if (
+  DBICTest::RunMode->is_smoker
+    and
+  eval { Test::More->VERSION("1.300") }
+    and
+  require ExtUtils::MakeMaker
+    and
+  MM->parse_version($INC{"Test/Builder.pm"}) =~ / ^ 1 \. 3.. ... \_ /x
+);
+
 my $TB = Test::More->builder;
 if ($ENV{DBICTEST_IN_PERSISTENT_ENV}) {
   # without this explicit close older TBs warn in END after a ->reset
@@ -45,8 +58,6 @@ if ($ENV{DBICTEST_IN_PERSISTENT_ENV}) {
   $TB->reset;
 }
 
-use lib qw(t/lib);
-use DBICTest::RunMode;
 use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry visit_refs);
 use Scalar::Util qw(weaken blessed reftype);
 use DBIx::Class;
@@ -6,6 +6,9 @@ BEGIN {
   # these envvars *will* bring in more stuff than the baseline
   delete @ENV{qw(DBICTEST_SQLT_DEPLOY DBIC_TRACE)};
 
+  # make sure extras do not load even when this is set
+  $ENV{PERL_STRICTURES_EXTRA} = 1;
+
   unshift @INC, 't/lib';
   require DBICTest::Util::OverrideRequire;
 
@@ -50,7 +53,7 @@ BEGIN {
       CORE::require('Test/More.pm');
       Test::More::fail ("Unexpected require of '$req' by $caller[0] ($caller[1] line $caller[2])");
 
-      if ($ENV{TEST_VERBOSE}) {
+      if ( $ENV{TEST_VERBOSE} or ! DBICTest::RunMode->is_plain ) {
         CORE::require('DBICTest/Util.pm');
         Test::More::diag( 'Require invoked' .  DBICTest::Util::stacktrace() );
       }
@@ -99,13 +102,16 @@ BEGIN {
     namespace::clean
     Try::Tiny
     Sub::Name
+    strictures
+    Sub::Defer
+    Sub::Quote
 
     Scalar::Util
     List::Util
-    Data::Compare
 
     Class::Accessor::Grouped
     Class::C3::Componentised
+    SQL::Abstract
   ));
 
   require DBICTest::Schema;
@@ -116,7 +122,9 @@ BEGIN {
 {
   register_lazy_loadable_requires(qw(
     Moo
-    Sub::Quote
+    Moo::Object
+    Method::Generate::Accessor
+    Method::Generate::Constructor
     Context::Preserve
   ));
 
@@ -129,7 +137,6 @@ BEGIN {
 {
   register_lazy_loadable_requires(qw(
     DBI
-    SQL::Abstract
     Hash::Merge
   ));
 
@@ -163,6 +170,12 @@ BEGIN {
   assert_no_missing_expected_requires();
 }
 
+# make sure we never loaded any of the strictures XS bullshit
+{
+  ok( ! exists $INC{ Module::Runtime::module_notional_filename($_) }, "$_ load never attempted" )
+    for qw(indirect multidimensional bareword::filehandles);
+}
+
 done_testing;
 
 sub register_lazy_loadable_requires {
@@ -1,21 +1,54 @@
-#!/usr/bin/env perl -T
-
-# the above line forces Test::Harness into taint-mode
-# DO NOT REMOVE
-
 use strict;
 use warnings;
+use Config;
+
+# there is talk of possible perl compilations where -T is fatal or just
+# doesn't work. We don't want to have the user deal with that.
+BEGIN { unless ($INC{'t/lib/DBICTest/WithTaint.pm'}) {
+
+  # it is possible the test itself is initially invoked in taint mode
+  # and with relative paths *and* with a relative $^X and some other
+  # craziness... in short: just be proactive
+  require File::Spec;
+
+  if (length $ENV{PATH}) {
+    ( $ENV{PATH} ) = join ( $Config{path_sep},
+      map { length($_) ? File::Spec->rel2abs($_) : () }
+        split /\Q$Config{path_sep}/, $ENV{PATH}
+    ) =~ /\A(.+)\z/;
+  }
+
+  my ($perl) = $^X =~ /\A(.+)\z/;
+
+  {
+    local $ENV{PATH} = "/nosuchrootbindir";
+    system( $perl => -T => -e => '
+      use warnings;
+      use strict;
+      eval { my $x = $ENV{PATH} . (kill (0)); 1 } or exit 42;
+      exit 0;
+    ');
+  }
+
+  if ( ($? >> 8) != 42 ) {
+    print "1..0 # SKIP Your perl does not seem to like/support -T...\n";
+    exit 0;
+  }
+
+  exec( $perl, qw( -I. -Mt::lib::DBICTest::WithTaint -T ), __FILE__ );
+}}
 
 # When in taint mode, PERL5LIB is ignored (but *not* unset)
 # Put it back in INC so that local-lib users can actually
-# run this test
-use Config;
-BEGIN {
-  for (map { defined $ENV{$_} ? $ENV{$_} : () } (qw/PERLLIB PERL5LIB/) ) {  # we unshift, so reverse precedence
-    my ($envvar) = ($_ =~ /^(.*)$/s);  # untaint
-    unshift @INC, map { length($_) ? $_ : () } (split /\Q$Config{path_sep}\E/, $envvar);
-  }
-}
+# run this test. Use lib.pm instead of an @INC unshift as
+# it will correctly add any arch subdirs encountered
+
+use lib (
+  grep { length }
+    map { split /\Q$Config{path_sep}\E/, (/^(.*)$/)[0] }  # untainting regex
+      grep { defined }
+        @ENV{qw(PERL5LIB PERLLIB)}  # precedence preserved by lib
+);
 
 # We need to specify 'lib' here as well because even if it was already in
 # @INC, the above will have put our local::lib in front of it, so now an
@@ -33,7 +66,7 @@ throws_ok (
   sub { $ENV{PATH} . (kill (0)) },
   qr/Insecure dependency in kill/,
   'taint mode active'
-);
+) if length $ENV{PATH};
 
 {
   package DBICTest::Taint::Classes;
@@ -70,4 +103,13 @@ throws_ok (
   }, 'Loading classes with Module::Find/load_namespaces worked in taint mode' );
 }
 
+# check that we can create a database and all
+{
+  my $s = DBICTest->init_schema( sqlite_use_file => 1 );
+  my $art = $s->resultset('Artist')->search({}, {
+    prefetch => 'cds', order_by => 'artistid',
+  })->next;
+  is ($art->artistid, 1, 'got artist');
+}
+
 done_testing;
@@ -36,8 +36,14 @@ use warnings;
 use Test::More;
 
 use lib 't/lib';
-use DBICTest;
 
+BEGIN {
+  require DBICTest::RunMode;
+  plan( skip_all => "Skipping test on plain module install" )
+    if DBICTest::RunMode->is_plain;
+}
+
+use DBICTest;
 use File::Find;
 use File::Spec;
 use B qw/svref_2object/;
@@ -87,6 +93,8 @@ my $skip_idx = { map { $_ => 1 } (
 
 my $has_moose = eval { require Moose::Util };
 
+Sub::Defer::undefer_all();
+
 # can't use Class::Inspector for the mundane parts as it does not
 # distinguish imports from anything else, what a crock of...
 # Moose is not always available either - hence just do it ourselves
@@ -143,9 +151,18 @@ for my $mod (@modules) {
             last;
           }
         }
-        fail ("${mod}::${name} appears to have entered inheritance chain by import into "
-            . ($via || 'UNKNOWN')
-        );
+
+        # exception time
+        if (
+          ( $name eq 'import' and $via = 'Exporter' )
+        ) {
+          pass("${mod}::${name} is a valid uncleaned import from ${name}");
+        }
+        else {
+          fail ("${mod}::${name} appears to have entered inheritance chain by import into "
+              . ($via || 'UNKNOWN')
+          );
+        }
       }
     }
 
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 use Test::Warn;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -131,6 +130,13 @@ throws_ok {
 
 is($schema->resultset("Artist")->count, 4, 'count ok');
 
+# test find on an unresolvable condition
+is(
+  $schema->resultset('Artist')->find({ artistid => [ -and => 1, 2 ]}),
+  undef
+);
+
+
 # test find_or_new
 {
   my $existing_obj = $schema->resultset('Artist')->find_or_new({
@@ -47,8 +47,10 @@ ok(@cd && !defined($cd[0]), 'Array contains an undef as only element');
 
 $cd = $schema->resultset("CD")->first;
 my $artist_rs = $schema->resultset("Artist")->search({ artistid => $cd->artist->artistid });
-$art = $artist_rs->find({ name => 'some other name' }, { key => 'primary' });
-ok($art, 'Artist found by key in the resultset');
+for my $key ('', 'primary') {
+  my $art = $artist_rs->find({ name => 'some other name' }, { $key ? (key => $key) : () });
+  is($art->artistid, $cd->get_column('artist'), "Artist found through @{[ $key ? 'explicit' : 'implicit' ]} key locked in the resultset");
+}
 
 # collapsing and non-collapsing are separate codepaths, thus the separate tests
 
@@ -11,7 +11,6 @@ use DBIx::Class::Optional::Dependencies ();
 
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
   unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
@@ -199,20 +198,6 @@ lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die';
     my $cd = $rs->next;
     is ($cd->artist->name, $artist->name, 'Prefetched artist');
   }, 'join does not throw (mysql 3 test)';
-
-  # induce a jointype override, make sure it works even if we don't have mysql3
-  local $schema->storage->sql_maker->{_default_jointype} = 'inner';
-  is_same_sql_bind (
-    $rs->as_query,
-    '(
-      SELECT `me`.`cdid`, `me`.`artist`, `me`.`title`, `me`.`year`, `me`.`genreid`, `me`.`single_track`,
-             `artist`.`artistid`, `artist`.`name`, `artist`.`rank`, `artist`.`charfield`
-        FROM cd `me`
-        INNER JOIN `artist` `artist` ON `artist`.`artistid` = `me`.`artist`
-    )',
-    [],
-    'overridden default join type works',
-  );
 }
 
 ## Can we properly deal with the null search problem?
@@ -299,15 +284,9 @@ NULLINSEARCH: {
 
   is ($rs->count, 10, '10 artists present');
 
-  my $orig_debug = $schema->storage->debug;
-  $schema->storage->debug(1);
-  my $query_count;
-  $schema->storage->debugcb(sub { $query_count++ });
-
-  $query_count = 0;
-  $complex_rs->delete;
-
-  is ($query_count, 1, 'One delete query fired');
+  $schema->is_executed_querycount( sub {
+    $complex_rs->delete;
+  }, 1, 'One delete query fired' );
   is ($rs->count, 0, '10 Artists correctly deleted');
 
   $rs->create({
@@ -316,15 +295,13 @@ NULLINSEARCH: {
   });
   is ($rs->count, 1, 'Artist with cd created');
 
-  $query_count = 0;
-  $schema->resultset('CD')->search_related('artist',
-    { 'artist.name' => { -like => 'baby_with_%' } }
-  )->delete;
-  is ($query_count, 1, 'And one more delete query fired');
-  is ($rs->count, 0, 'Artist with cd deleted');
 
-  $schema->storage->debugcb(undef);
-  $schema->storage->debug($orig_debug);
+  $schema->is_executed_querycount( sub {
+    $schema->resultset('CD')->search_related('artist',
+      { 'artist.name' => { -like => 'baby_with_%' } }
+    )->delete;
+  }, 1, 'And one more delete query fired');
+  is ($rs->count, 0, 'Artist with cd deleted');
 }
 
 ZEROINSEARCH: {
@@ -375,8 +352,8 @@ ZEROINSEARCH: {
   ]});
 
   warnings_exist { is_deeply (
-    [ $restrict_rs->get_column('y')->all ],
-    [ $y_rs->all ],
+    [ sort $restrict_rs->get_column('y')->all ],
+    [ sort $y_rs->all ],
     'Zero year was correctly excluded from resultset',
   ) } qr/
     \QUse of distinct => 1 while selecting anything other than a column \E
@@ -4,9 +4,12 @@ use warnings;
 use Test::More;
 use Test::Exception;
 use Sub::Name;
+use Config;
 use DBIx::Class::Optional::Dependencies ();
 use lib qw(t/lib);
 use DBICTest;
+use SQL::Abstract 'is_literal_value';
+use DBIx::Class::_Util 'is_exception';
 
 plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg')
   unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg');
@@ -248,7 +251,7 @@ for my $use_insert_returning ($test_server_supports_insert_returning
     lives_ok {
       is_deeply (
         $arr_rs->search({ arrayfield => { '=' => { -value => [3,4] }} })->first->arrayfield,
-        [3,4],,
+        [3,4],
         'Array value matches explicit equal'
       );
     } 'searching by arrayref (explicit equal sign)';
@@ -291,7 +294,10 @@ for my $use_insert_returning ($test_server_supports_insert_returning
       { -value => [3,4] },
       \[ '= ?' => [arrayfield => [3, 4]] ],
     ) {
-      local $TODO = 'No introspection of complex conditions :(';
+      local $TODO = 'No introspection of complex literal conditions :('
+        if is_literal_value $cond;
+
+
       my $arr_rs_cond = $arr_rs->search({ arrayfield => $cond });
 
       my $row = $arr_rs_cond->create({});
@@ -335,14 +341,9 @@ my $cds = $artist->cds_unordered->search({
 lives_ok { $cds->update({ year => '2010' }) } 'Update on prefetched rs';
 
 ## Test SELECT ... FOR UPDATE
-
   SKIP: {
-      if(eval { require Sys::SigAction }) {
-          Sys::SigAction->import( 'set_sig_handler' );
-      }
-      else {
-        skip "Sys::SigAction is not available", 6;
-      }
+      skip "Your system does not support unsafe signals (d_sigaction) - unable to run deadlock test", 1
+        unless eval { $Config{d_sigaction} and require POSIX };
 
       my ($timed_out, $artist2);
 
@@ -381,15 +382,28 @@ lives_ok { $cds->update({ year => '2010' }) } 'Update on prefetched rs';
           is($artist->artistid, 1, "select returns artistid = 1");
 
           $timed_out = 0;
+
           eval {
-              my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
+              # can not use %SIG assignment directly - we need sigaction below
+              # localization to a block still works however
+              local $SIG{ALRM};
+
+              POSIX::sigaction( POSIX::SIGALRM() => POSIX::SigAction->new(
+                sub { die "DBICTestTimeout" },
+              ));
+
               alarm(2);
               $artist2 = $schema2->resultset('Artist')->find(1);
               $artist2->name('fooey');
               $artist2->update;
-              alarm(0);
           };
-          $timed_out = $@ =~ /DBICTestTimeout/;
+
+          alarm(0);
+
+          if (is_exception($@)) {
+            $timed_out = $@ =~ /DBICTestTimeout/
+              or die $@;
+          }
         });
 
         $t->{test_sub}->();
@@ -9,7 +9,6 @@ use DBIx::Class::Optional::Dependencies ();
 
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my ($dsn,  $user,  $pass)  = @ENV{map { "DBICTEST_ORA_${_}" }  qw/DSN USER PASS/};
 
@@ -9,7 +9,6 @@ use DBIx::Class::Optional::Dependencies ();
 
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my ($dsn,  $user,  $pass)  = @ENV{map { "DBICTEST_ORA_${_}" }  qw/DSN USER PASS/};
 
@@ -78,17 +77,12 @@ SKIP: {
               . ': https://rt.cpan.org/Ticket/Display.html?id=64206'
     if $q;
 
-  # so we can disable BLOB mega-output
-  my $orig_debug = $schema->storage->debug;
-
   my $id;
   foreach my $size (qw( small large )) {
     $id++;
 
-    local $schema->storage->{debug} = $size eq 'large'
-      ? 0
-      : $orig_debug
-    ;
+    local $schema->storage->{debug} = 0
+      if $size eq 'large';
 
     my $str = $binstr{$size};
     lives_ok {
@@ -154,8 +148,6 @@ SKIP: {
     @objs = $rs->search({ blob => "re-updated blob", clob => 're-updated clob' })->all;
     is @objs, 0, 'row deleted successfully';
   }
-
-  $schema->storage->debug ($orig_debug);
 }
 
   do_clean ($dbh);
@@ -3,9 +3,14 @@ use warnings;
 
 use Test::Exception;
 use Test::More;
+
+# I *strongly* suspect Oracle has an implicit stable output order when
+# dealing with HQs. So just punt on the entire shuffle thing.
+BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 }
+
+
 use DBIx::Class::Optional::Dependencies ();
 use lib qw(t/lib);
-use DBICTest::RunMode;
 
 $ENV{NLS_SORT} = "BINARY";
 $ENV{NLS_COMP} = "BINARY";
@@ -280,8 +280,8 @@ SQL
           my $sealed_owners = $owners->as_subselect_rs;
 
           is_deeply (
-            [ map { $_->name } ($sealed_owners->all) ],
-            [ map { $_->name } ($owners->all) ],
+            [ sort map { $_->name } ($sealed_owners->all) ],
+            [ sort map { $_->name } ($owners->all) ],
             "$test_type: Sort preserved from within a subquery",
           );
         }
@@ -331,20 +331,13 @@ SQL
           is ($limited_rs->count, 6, "$test_type: Correct count of limited right-sorted joined resultset");
           is ($limited_rs->count_rs->next, 6, "$test_type: Correct count_rs of limited right-sorted joined resultset");
 
-          my $queries;
-          my $orig_debug = $schema->storage->debug;
-          $schema->storage->debugcb(sub { $queries++; });
-          $schema->storage->debug(1);
-
-          is_deeply (
-            [map { $_->owner->name } ($limited_rs->all) ],
-            [@owner_names[2 .. 7]],
-            "$test_type: Prefetch-limited rows were properly ordered"
-          );
-          is ($queries, 1, "$test_type: Only one query with prefetch");
-
-          $schema->storage->debugcb(undef);
-          $schema->storage->debug($orig_debug);
+          $schema->is_executed_querycount( sub {
+            is_deeply (
+              [map { $_->owner->name } ($limited_rs->all) ],
+              [@owner_names[2 .. 7]],
+              "$test_type: Prefetch-limited rows were properly ordered"
+            );
+          }, 1, "$test_type: Only one query with prefetch" );
 
           is_deeply (
             [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
@@ -207,9 +207,9 @@ SQL
     name => { -like => 'bulk artist %' }
   });
 
-# test insert_bulk using populate.
+# test _insert_bulk using populate.
   SKIP: {
-    skip 'insert_bulk not supported', 4
+    skip '_insert_bulk not supported', 4
       unless $storage_type !~ /NoBindVars/i;
 
     lives_ok {
@@ -227,25 +227,25 @@ SQL
           charfield => 'foo',
         },
       ]);
-    } 'insert_bulk via populate';
+    } '_insert_bulk via populate';
 
-    is $bulk_rs->count, 3, 'correct number inserted via insert_bulk';
+    is $bulk_rs->count, 3, 'correct number inserted via _insert_bulk';
 
     is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
-      'column set correctly via insert_bulk');
+      'column set correctly via _insert_bulk');
 
     my %bulk_ids;
     @bulk_ids{map $_->artistid, $bulk_rs->all} = ();
 
     is ((scalar keys %bulk_ids), 3,
-      'identities generated correctly in insert_bulk');
+      'identities generated correctly in _insert_bulk');
 
     $bulk_rs->delete;
   }
 
-# make sure insert_bulk works a second time on the same connection
+# make sure _insert_bulk works a second time on the same connection
   SKIP: {
-    skip 'insert_bulk not supported', 3
+    skip '_insert_bulk not supported', 3
       unless $storage_type !~ /NoBindVars/i;
 
     lives_ok {
@@ -263,20 +263,20 @@ SQL
           charfield => 'bar',
         },
       ]);
-    } 'insert_bulk via populate called a second time';
+    } '_insert_bulk via populate called a second time';
 
     is $bulk_rs->count, 3,
-      'correct number inserted via insert_bulk';
+      'correct number inserted via _insert_bulk';
 
     is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3,
-      'column set correctly via insert_bulk');
+      'column set correctly via _insert_bulk');
 
     $bulk_rs->delete;
   }
 
-# test invalid insert_bulk (missing required column)
+# test invalid _insert_bulk (missing required column)
 #
-# There should be a rollback, reconnect and the next valid insert_bulk should
+# There should be a rollback, reconnect and the next valid _insert_bulk should
 # succeed.
   throws_ok {
     $schema->resultset('Artist')->populate([
@@ -288,11 +288,11 @@ SQL
 # The second pattern is the error from fallback to regular array insert on
 # incompatible charset.
 # The third is for ::NoBindVars with no syb_has_blk.
-  'insert_bulk with missing required column throws error';
+  '_insert_bulk with missing required column throws error';
 
-# now test insert_bulk with IDENTITY_INSERT
+# now test _insert_bulk with IDENTITY_INSERT
   SKIP: {
-    skip 'insert_bulk not supported', 3
+    skip '_insert_bulk not supported', 3
       unless $storage_type !~ /NoBindVars/i;
 
     lives_ok {
@@ -313,13 +313,13 @@ SQL
           charfield => 'foo',
         },
       ]);
-    } 'insert_bulk with IDENTITY_INSERT via populate';
+    } '_insert_bulk with IDENTITY_INSERT via populate';
 
     is $bulk_rs->count, 3,
-      'correct number inserted via insert_bulk with IDENTITY_INSERT';
+      'correct number inserted via _insert_bulk with IDENTITY_INSERT';
 
     is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3,
-      'column set correctly via insert_bulk with IDENTITY_INSERT');
+      'column set correctly via _insert_bulk with IDENTITY_INSERT');
 
     $bulk_rs->delete;
   }
@@ -434,7 +434,7 @@ SQL
 
     $rs->delete;
 
-    # now try insert_bulk with blobs and only blobs
+    # now try _insert_bulk with blobs and only blobs
     $new_str = $binstr{large} . 'bar';
     lives_ok {
       $rs->populate([
@@ -447,18 +447,18 @@ SQL
           clob => $new_str,
         },
       ]);
-    } 'insert_bulk with blobs does not die';
+    } '_insert_bulk with blobs does not die';
 
     is((grep $_->blob eq $binstr{large}, $rs->all), 2,
-      'IMAGE column set correctly via insert_bulk');
+      'IMAGE column set correctly via _insert_bulk');
 
     is((grep $_->clob eq $new_str, $rs->all), 2,
-      'TEXT column set correctly via insert_bulk');
+      'TEXT column set correctly via _insert_bulk');
 
-    # now try insert_bulk with blobs and a non-blob which also happens to be an
+    # now try _insert_bulk with blobs and a non-blob which also happens to be an
     # identity column
     SKIP: {
-      skip 'no insert_bulk without placeholders', 4
+      skip 'no _insert_bulk without placeholders', 4
         if $storage_type =~ /NoBindVars/i;
 
       $rs->delete;
@@ -480,16 +480,16 @@ SQL
             a_memo => 2,
           },
         ]);
-      } 'insert_bulk with blobs and explicit identity does NOT die';
+      } '_insert_bulk with blobs and explicit identity does NOT die';
 
       is((grep $_->blob eq $binstr{large}, $rs->all), 2,
-        'IMAGE column set correctly via insert_bulk with identity');
+        'IMAGE column set correctly via _insert_bulk with identity');
 
       is((grep $_->clob eq $new_str, $rs->all), 2,
-        'TEXT column set correctly via insert_bulk with identity');
+        'TEXT column set correctly via _insert_bulk with identity');
 
       is_deeply [ map $_->id, $rs->all ], [ 1,2 ],
-        'explicit identities set correctly via insert_bulk with blobs';
+        'explicit identities set correctly via _insert_bulk with blobs';
     }
 
     lives_and {
@@ -223,9 +223,7 @@ is $row->artistid, $current_artistid+1,
 my $rs = $schema->resultset('VaryingMAX');
 
 foreach my $size (qw/small large/) {
-  my $orig_debug = $schema->storage->debug;
-
-  $schema->storage->debug(0) if $size eq 'large';
+  local $schema->storage->{debug} = 0 if $size eq 'large';
 
   my $str = $binstr{$size};
   my $row;
@@ -242,8 +240,6 @@ foreach my $size (qw/small large/) {
   cmp_ok try { $row->varchar_max },   'eq', $str, 'VARCHAR(MAX) matches';
   cmp_ok try { $row->nvarchar_max },  'eq', $str, 'NVARCHAR(MAX) matches';
   cmp_ok try { $row->varbinary_max }, 'eq', $str, 'VARBINARY(MAX) matches';
-
-  $schema->storage->debug($orig_debug);
 }
 
 # test regular blobs
@@ -1,13 +1,6 @@
 use strict;
 use warnings;
 
-# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
-BEGIN {
-  if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
-    unshift @INC, $_ for split /:/, $lib_dirs;
-  }
-}
-
 use Test::More;
 use Test::Exception;
 use Scalar::Util 'weaken';
@@ -8,8 +8,6 @@ use Try::Tiny;
 use DBIx::Class::Optional::Dependencies ();
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj ();
-use DBIC::SqlMakerTest;
 
 my ($dsn,  $user,  $pass)  = @ENV{map { "DBICTEST_MSACCESS_ODBC_${_}" } qw/DSN USER PASS/};
 my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSACCESS_ADO_${_}" }  qw/DSN USER PASS/};
@@ -144,12 +142,7 @@ EOF
     title => 'my track',
   });
 
-  my ($sql, @bind);
-
   my $joined_track = try {
-    local $schema->storage->{debug} = 1;
-    local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind);
-
     $schema->resultset('Artist')->search({
       artistid => $first_artistid,
     }, {
@@ -162,27 +155,10 @@ EOF
     diag "Could not execute two-step left join: $_";
   };
 
-  s/^'//, s/'\z// for @bind;
-
-  # test is duplicated in t/sqlmaker/msaccess.t, keep a duplicate here anyway, just to be safe
-  # -- ribasushi
-  is_same_sql_bind(
-    $sql,
-    \@bind,
-    'SELECT [me].[artistid], [me].[name], [me].[rank], [me].[charfield], [tracks].[title] FROM ( ( [artist] [me] LEFT JOIN cd [cds] ON [cds].[artist] = [me].[artistid] ) LEFT JOIN [track] [tracks] ON [tracks].[cd] = [cds].[cdid] ) WHERE ( [artistid] = ? )',
-    [1],
-    'correct SQL for two-step left join',
-  );
-
   is try { $joined_track->get_column('track_title') }, 'my track',
     'two-step left join works';
 
-  ($sql, @bind) = ();
-
   $joined_artist = try {
-    local $schema->storage->{debug} = 1;
-    local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind);
-
     $schema->resultset('Track')->search({
       trackid => $track->trackid,
     }, {
@@ -195,18 +171,6 @@ EOF
     diag "Could not execute two-step inner join: $_";
   };
 
-  s/^'//, s/'\z// for @bind;
-
-  # test is duplicated in t/sqlmaker/msaccess.t, keep a duplicate here anyway, just to be safe
-  # -- ribasushi
-  is_same_sql_bind(
-    $sql,
-    \@bind,
-    'SELECT [me].[trackid], [me].[cd], [me].[position], [me].[title], [me].[last_updated_on], [me].[last_updated_at], [artist].[name] FROM ( ( [track] [me] INNER JOIN cd [cd] ON [cd].[cdid] = [me].[cd] ) INNER JOIN [artist] [artist] ON [artist].[artistid] = [cd].[artist] ) WHERE ( [trackid] = ? )',
-    [$track->trackid],
-    'correct SQL for two-step inner join',
-  );
-
   is try { $joined_artist->get_column('artist_name') }, 'foo',
     'two-step inner join works';
 
@@ -11,40 +11,6 @@ use lib qw(t/lib);
 use DBICTest;
 use DBIx::Class::_Util qw(sigwarn_silencer modver_gt_or_eq);
 
-# savepoints test
-{
-  my $schema = DBICTest->init_schema(auto_savepoint => 1);
-
-  my $ars = $schema->resultset('Artist');
-
-  # test two-phase commit and inner transaction rollback from nested transactions
-  $schema->txn_do(sub {
-    $ars->create({ name => 'in_outer_transaction' });
-    $schema->txn_do(sub {
-      $ars->create({ name => 'in_inner_transaction' });
-    });
-    ok($ars->search({ name => 'in_inner_transaction' })->first,
-      'commit from inner transaction visible in outer transaction');
-    throws_ok {
-      $schema->txn_do(sub {
-        $ars->create({ name => 'in_inner_transaction_rolling_back' });
-        die 'rolling back inner transaction';
-      });
-    } qr/rolling back inner transaction/, 'inner transaction rollback executed';
-    $ars->create({ name => 'in_outer_transaction2' });
-  });
-
-  ok($ars->search({ name => 'in_outer_transaction' })->first,
-    'commit from outer transaction');
-  ok($ars->search({ name => 'in_outer_transaction2' })->first,
-    'second commit from outer transaction');
-  ok($ars->search({ name => 'in_inner_transaction' })->first,
-    'commit from inner transaction');
-  is $ars->search({ name => 'in_inner_transaction_rolling_back' })->first,
-    undef,
-    'rollback from inner transaction';
-}
-
 # check that we work somewhat OK with braindead SQLite transaction handling
 #
 # As per https://metacpan.org/source/ADAMK/DBD-SQLite-1.37/lib/DBD/SQLite.pm#L921
@@ -53,6 +19,7 @@ use DBIx::Class::_Util qw(sigwarn_silencer modver_gt_or_eq);
 # However DBD::SQLite 1.38_02 seems to fix this, with an accompanying test:
 # https://metacpan.org/source/ADAMK/DBD-SQLite-1.38_02/t/54_literal_txn.t
 
+require DBD::SQLite;
 my $lit_txn_todo = modver_gt_or_eq('DBD::SQLite', '1.38_02')
   ? undef
   : "DBD::SQLite before 1.38_02 is retarded wrt detecting literal BEGIN/COMMIT statements"
@@ -124,6 +91,46 @@ DDL
   }
 }
 
+# test blank begin/svp/commit/begin cycle
+warnings_are {
+  my $schema = DBICTest->init_schema( no_populate => 1 );
+  my $rs = $schema->resultset('Artist');
+  is ($rs->count, 0, 'Start with empty table');
+
+  for my $do_commit (1, 0) {
+    $schema->txn_begin;
+    $schema->svp_begin;
+    $schema->svp_rollback;
+
+    $schema->svp_begin;
+    $schema->svp_rollback;
+
+    $schema->svp_release;
+
+    $schema->svp_begin;
+
+    $schema->txn_rollback;
+
+    $schema->txn_begin;
+    $schema->svp_begin;
+    $schema->svp_rollback;
+
+    $schema->svp_begin;
+    $schema->svp_rollback;
+
+    $schema->svp_release;
+
+    $schema->svp_begin;
+
+    $do_commit ? $schema->txn_commit : $schema->txn_rollback;
+
+    is_deeply $schema->storage->savepoints, [], 'Savepoint names cleared away'
+  }
+
+  $schema->txn_do(sub {
+    ok (1, 'all seems fine');
+  });
+} [], 'No warnings emitted';
 
 my $schema = DBICTest->init_schema();
 
@@ -215,7 +222,20 @@ for my $bi ( qw(
   my $v_desc = sprintf '%s (%d bit signed int)', $bi, $v_bits;
 
   my @w;
-  local $SIG{__WARN__} = sub { $_[0] =~ /datatype mismatch/ ? push @w, @_ : warn @_ };
+  local $SIG{__WARN__} = sub {
+    if ($_[0] =~ /datatype mismatch/) {
+      push @w, @_;
+    }
+    elsif ($_[0] =~ /An integer value occupying more than 32 bits was supplied .+ can not bind properly so DBIC will treat it as a string instead/ ) {
+      # do nothing, this warning will pop up here and there depending on
+      # DBD/bitness combination
+      # we don't want to test for it explicitly, we are just interested
+      # in the results matching at the end
+    }
+    else {
+      warn @_;
+    }
+  };
 
   # some combinations of SQLite 1.35 and older 5.8 faimly is wonky
   # instead of a warning we get a full exception. Sod it
@@ -3,110 +3,19 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
-my $orig_debug = $schema->storage->debug;
-
-# test the abstract join => SQL generator
-my $sa = new DBIx::Class::SQLMaker;
-
-my @j = (
-    { child => 'person' },
-    [ { father => 'person' }, { 'father.person_id' => 'child.father_id' }, ],
-    [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
-);
-my $match = 'person child JOIN person father ON ( father.person_id = '
-          . 'child.father_id ) JOIN person mother ON ( mother.person_id '
-          . '= child.mother_id )'
-          ;
-is_same_sql(
-  $sa->_recurse_from(@j),
-  $match,
-  'join 1 ok'
-);
-
-my @j2 = (
-    { mother => 'person' },
-    [   [   { child => 'person' },
-            [   { father             => 'person' },
-                { 'father.person_id' => 'child.father_id' }
-            ]
-        ],
-        { 'mother.person_id' => 'child.mother_id' }
-    ],
-);
-$match = 'person mother JOIN (person child JOIN person father ON ('
-       . ' father.person_id = child.father_id )) ON ( mother.person_id = '
-       . 'child.mother_id )'
-       ;
-is_same_sql(
-  $sa->_recurse_from(@j2),
-  $match,
-  'join 2 ok'
-);
-
-
-my @j3 = (
-    { child => 'person' },
-    [ { father => 'person', -join_type => 'inner' }, { 'father.person_id' => 'child.father_id' }, ],
-    [ { mother => 'person', -join_type => 'inner'  }, { 'mother.person_id' => 'child.mother_id' } ],
-);
-$match = 'person child INNER JOIN person father ON ( father.person_id = '
-          . 'child.father_id ) INNER JOIN person mother ON ( mother.person_id '
-          . '= child.mother_id )'
-          ;
-
-is_same_sql(
-  $sa->_recurse_from(@j3),
-  $match,
-  'join 3 (inner join) ok'
-);
-
-my @j4 = (
-    { mother => 'person' },
-    [   [   { child => 'person', -join_type => 'left' },
-            [   { father             => 'person', -join_type => 'right' },
-                { 'father.person_id' => 'child.father_id' }
-            ]
-        ],
-        { 'mother.person_id' => 'child.mother_id' }
-    ],
-);
-$match = 'person mother LEFT JOIN (person child RIGHT JOIN person father ON ('
-       . ' father.person_id = child.father_id )) ON ( mother.person_id = '
-       . 'child.mother_id )'
-       ;
-is_same_sql(
-  $sa->_recurse_from(@j4),
-  $match,
-  'join 4 (nested joins + join types) ok'
-);
-
-my @j5 = (
-    { child => 'person' },
-    [ { father => 'person' }, { 'father.person_id' => \'!= child.father_id' }, ],
-    [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
-);
-$match = 'person child JOIN person father ON ( father.person_id != '
-          . 'child.father_id ) JOIN person mother ON ( mother.person_id '
-          . '= child.mother_id )'
-          ;
-is_same_sql(
-  $sa->_recurse_from(@j5),
-  $match,
-  'join 5 (SCALAR reference for ON statement) ok'
-);
-
 my $rs = $schema->resultset("CD")->search(
            { 'year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
-           { from => [ { 'me' => 'cd' },
-                         [
-                           { artist => 'artist' },
-                           { 'me.artist' => 'artist.artistid' }
-                         ] ] }
+           { from => [
+              { 'me' => 'cd' },
+              [
+                { artist => 'artist' },
+                { 'me.artist' => { -ident => 'artist.artistid' } },
+              ],
+           ] }
          );
 
 is( $rs + 0, 1, "Single record in resultset");
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 use Test::Exception;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -6,8 +6,6 @@ use Test::Exception;
 use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
 
 my $schema = DBICTest->init_schema();
 
@@ -228,23 +226,12 @@ is($row->baz, 3, 'baz is correct');
 {
   my $artist = $schema->resultset('Artist')->find(1);
 
-  my ($sql, @bind);
-  my $old_debugobj = $schema->storage->debugobj;
-  my $old_debug = $schema->storage->debug;
-  $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)),
-  $schema->storage->debug(1);
-
-  $artist->discard_changes;
-
-  is_same_sql_bind (
-    $sql,
-    \@bind,
-    'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?',
-    [qw/'1'/],
-  );
-
-  $schema->storage->debug($old_debug);
-  $schema->storage->debugobj($old_debugobj);
+  $schema->is_executed_sql_bind( sub { $artist->discard_changes }, [
+    [
+      'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?',
+      [ { dbic_colname => "me.artistid", sqlt_datatype => "integer" } => 1 ],
+    ]
+  ], 'Expected query on discard_changes');
 }
 
 {
@@ -17,10 +17,24 @@ cmp_ok($cover_band->id, '!=', $artist->id, 'ok got new column id...');
 is($cover_cds->count, $artist_cds->count, 'duplicated rows count ok');
 
 #check multi-keyed
-cmp_ok($cover_band->search_related('twokeys')->count, '>', 0, 'duplicated multiPK ok');
+is(
+  $cover_band->search_related('twokeys')->count,
+  $artist->search_related('twokeys')->count,
+  'duplicated multiPK ok'
+);
 
 #and check copying a few relations away
 cmp_ok($cover_cds->search_related('tags')->count, '==',
    $artist_cds->search_related('tags')->count , 'duplicated count ok');
 
+
+# check from the other side
+my $cd = $schema->resultset('CD')->find(1);
+my $dup_cd = $cd->copy ({ title => 'ha!' });
+is(
+  $dup_cd->search_related('twokeys')->count,
+  $cd->search_related('twokeys')->count,
+  'duplicated multiPK ok'
+);
+
 done_testing;
@@ -7,12 +7,6 @@ use DBICTest;
 
 my $schema = DBICTest->init_schema();
 
-my $queries;
-my $debugcb = sub{ $queries++ };
-my $sdebug = $schema->storage->debug;
-
-plan tests => 23;
-
 my $rs = $schema->resultset("Artist")->search(
   { artistid => 1 }
 );
@@ -43,18 +37,12 @@ my $cd = $schema->resultset('CD')->find(1);
 
 $rs->clear_cache;
 
-$queries = 0;
-$schema->storage->debug(1);
-$schema->storage->debugcb ($debugcb);
-
-$rs = $schema->resultset('Artist')->search( undef, { cache => 1 } );
-while( $artist = $rs->next ) {}
-$artist = $rs->first();
-
-is( $queries, 1, 'revisiting a row does not issue a query when cache => 1' );
+$schema->is_executed_querycount( sub {
 
-$schema->storage->debug($sdebug);
-$schema->storage->debugcb (undef);
+  $rs = $schema->resultset('Artist')->search( undef, { cache => 1 } );
+  while( $artist = $rs->next ) {}
+  $artist = $rs->first();
+}, 1, 'revisiting a row does not issue a query when cache => 1' );
 
 my @a = $schema->resultset("Artist")->search(
   { },
@@ -74,33 +62,28 @@ $rs = $schema->resultset("Artist")->search(
   }
 );
 
-# start test for prefetch SELECT count
-$queries = 0;
-$schema->storage->debug(1);
-$schema->storage->debugcb ($debugcb);
-
-$artist = $rs->first;
-$rs->reset();
+# prefetch SELECT count
+$schema->is_executed_querycount( sub {
+  $artist = $rs->first;
+  $rs->reset();
 
-# make sure artist contains a related resultset for cds
-isa_ok( $artist->{related_resultsets}{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' );
+  # make sure artist contains a related resultset for cds
+  isa_ok( $artist->{related_resultsets}{cds}, 'DBIx::Class::ResultSet', 'artist has a related_resultset for cds' );
 
-# check if $artist->cds->get_cache is populated
-is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records');
+  # check if $artist->cds->get_cache is populated
+  is( scalar @{$artist->cds->get_cache}, 3, 'cache for artist->cds contains correct number of records');
 
-# ensure that $artist->cds returns correct number of objects
-is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' );
+  # ensure that $artist->cds returns correct number of objects
+  is( scalar ($artist->cds), 3, 'artist->cds returns correct number of objects' );
 
-# ensure that $artist->cds->count returns correct value
-is( $artist->cds->count, 3, 'artist->cds->count returns correct value' );
+  # ensure that $artist->cds->count returns correct value
+  is( $artist->cds->count, 3, 'artist->cds->count returns correct value' );
 
-# ensure that $artist->count_related('cds') returns correct value
-is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' );
+  # ensure that $artist->count_related('cds') returns correct value
+  is( $artist->count_related('cds'), 3, 'artist->count_related returns correct value' );
 
-is($queries, 1, 'only one SQL statement executed');
+}, 1, 'only one SQL statement executed');
 
-$schema->storage->debug($sdebug);
-$schema->storage->debugcb (undef);
 
 # make sure related_resultset is deleted after object is updated
 $artist->set_column('name', 'New Name');
@@ -131,57 +114,44 @@ is($artist->cds, 0, 'No cds for this artist');
 }
 
 # SELECT count for nested has_many prefetch
-$queries = 0;
-$schema->storage->debug(1);
-$schema->storage->debugcb ($debugcb);
-
-$artist = ($rs->all)[0];
-
-is($queries, 1, 'only one SQL statement executed');
-
-$queries = 0;
-
-my @objs;
-my $cds = $artist->cds;
-my $tags = $cds->next->tags;
-while( my $tag = $tags->next ) {
-  push @objs, $tag->tagid; #warn "tag:", $tag->ID, " => ", $tag->tag;
-}
-
-is_deeply( \@objs, [ 3 ], 'first cd has correct tags' );
-
-$tags = $cds->next->tags;
-@objs = ();
-while( my $tag = $tags->next ) {
-  push @objs, $tag->id; #warn "tag: ", $tag->ID;
-}
-
-is_deeply( [ sort @objs] , [ 2, 5, 8 ], 'third cd has correct tags' );
-
-$tags = $cds->next->tags;
-@objs = ();
-while( my $tag = $tags->next ) {
-  push @objs, $tag->id; #warn "tag: ", $tag->ID;
-}
-
-is_deeply( \@objs, [ 1 ], 'second cd has correct tags' );
+$schema->is_executed_querycount( sub {
+  $artist = ($rs->all)[0];
+}, 1, 'only one SQL statement executed');
+
+$schema->is_executed_querycount( sub {
+  my @objs;
+  my $cds = $artist->cds;
+  my $tags = $cds->next->tags;
+  while( my $tag = $tags->next ) {
+    push @objs, $tag->tagid; #warn "tag:", $tag->ID, " => ", $tag->tag;
+  }
 
-is( $queries, 0, 'no additional SQL statements while checking nested data' );
+  is_deeply( \@objs, [ 3 ], 'first cd has correct tags' );
 
-# start test for prefetch SELECT count
-$queries = 0;
+  $tags = $cds->next->tags;
+  @objs = ();
+  while( my $tag = $tags->next ) {
+    push @objs, $tag->id; #warn "tag: ", $tag->ID;
+  }
 
-$artist = $schema->resultset('Artist')->find(1, { prefetch => [qw/cds/] });
+  is_deeply( [ sort @objs] , [ 2, 5, 8 ], 'third cd has correct tags' );
 
-is( $queries, 1, 'only one select statement on find with inline has_many prefetch' );
+  $tags = $cds->next->tags;
+  @objs = ();
+  while( my $tag = $tags->next ) {
+    push @objs, $tag->id; #warn "tag: ", $tag->ID;
+  }
 
-# start test for prefetch SELECT count
-$queries = 0;
+  is_deeply( \@objs, [ 1 ], 'second cd has correct tags' );
+}, 0, 'no additional SQL statements while checking nested data' );
 
-$rs = $schema->resultset('Artist')->search(undef, { prefetch => [qw/cds/] });
-$artist = $rs->find(1);
+$schema->is_executed_querycount( sub {
+  $artist = $schema->resultset('Artist')->find(1, { prefetch => [qw/cds/] });
+}, 1, 'only one select statement on find with inline has_many prefetch' );
 
-is( $queries, 1, 'only one select statement on find with has_many prefetch on resultset' );
+$schema->is_executed_querycount( sub {
+  $rs = $schema->resultset('Artist')->search(undef, { prefetch => [qw/cds/] });
+  $artist = $rs->find(1);
+}, 1, 'only one select statement on find with has_many prefetch on resultset' );
 
-$schema->storage->debug($sdebug);
-$schema->storage->debugcb (undef);
+done_testing;
@@ -138,17 +138,12 @@ for my $name (keys %stores) {
 
 
     # Test resultsource with cached rows
-    my $query_count;
-    $cd_rs = $cd_rs->search ({}, { cache => 1 });
+    $schema->is_executed_querycount( sub {
+      $cd_rs = $cd_rs->search ({}, { cache => 1 });
 
-    my $orig_debug = $schema->storage->debug;
-    $schema->storage->debug(1);
-    $schema->storage->debugcb(sub { $query_count++ } );
+      # this will hit the database once and prime the cache
+      my @cds = $cd_rs->all;
 
-    # this will hit the database once and prime the cache
-    my @cds = $cd_rs->all;
-
-    lives_ok {
       $copy = $store->($cd_rs);
       ref_ne($copy, $cd_rs, 'Cached resultset cloned');
       is_deeply (
@@ -158,12 +153,7 @@ for my $name (keys %stores) {
       );
 
       is ($copy->count, $cd_rs->count, 'Cached count identical');
-    } "serialize cached resultset lives: $name";
-
-    is ($query_count, 1, 'Only one db query fired');
-
-    $schema->storage->debug($orig_debug);
-    $schema->storage->debugcb(undef);
+    }, 1, 'Only one db query fired');
 }
 
 # test schema-less detached thaw
@@ -5,7 +5,6 @@ use Test::More;
 use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj;
 
 {
   package A::Comp;
@@ -88,7 +87,7 @@ warnings_like (
 my $schema = DBICTest->init_schema();
 DBICTest::Schema::CD->load_components('UTF8Columns');
 DBICTest::Schema::CD->utf8_columns('title');
-Class::C3->reinitialize();
+Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
 
 # as per http://search.cpan.org/dist/Test-Simple/lib/Test/More.pm#utf8
 binmode (Test::More->builder->$_, ':utf8') for qw/output failure_output todo_output/;
@@ -97,24 +96,22 @@ my $bytestream_title = my $utf8_title = "weird \x{466} stuff";
 utf8::encode($bytestream_title);
 cmp_ok ($bytestream_title, 'ne', $utf8_title, 'unicode/raw differ (sanity check)');
 
-my $storage = $schema->storage;
-my ($sql, @bind);
-my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
-my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj);
-$storage->debugobj ($debugobj);
-$storage->debug (1);
-
-my $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } );
-
-$storage->debugobj ($orig_debugobj);
-$storage->debug ($orig_debug);
-
-# bind values are always alphabetically ordered by column, thus [1]
-# the single quotes are an artefact of the debug-system
+my $cd;
 {
   local $TODO = "This has been broken since rev 1191, Mar 2006";
-  is ($bind[1], "'$bytestream_title'", 'INSERT: raw bytes sent to the database');
-}
+
+  $schema->is_executed_sql_bind( sub {
+    $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } )
+  }, [[
+    'INSERT INTO cd ( artist, title, year) VALUES ( ?, ?, ? )',
+     [ { dbic_colname => "artist", sqlt_datatype => "integer" }
+        => 1 ],
+     [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 }
+        => $bytestream_title ],
+     [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 }
+        => 2048 ],
+  ]], 'INSERT: raw bytes sent to the database' );
+};
 
 # this should be using the cursor directly, no inflation/processing of any sort
 my ($raw_db_title) = $schema->resultset('CD')
@@ -149,16 +146,20 @@ ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'reloaded utf8-less title' );
 $bytestream_title = $utf8_title = "something \x{219} else";
 utf8::encode($bytestream_title);
 
+$schema->is_executed_sql_bind( sub {
+  $cd->update ({ title => $utf8_title });
+}, [
+  [ 'BEGIN' ],
+  [
+    'UPDATE cd SET title = ? WHERE cdid = ?',
+    [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 }
+      => $bytestream_title ],
+    [ { dbic_colname => "cdid", sqlt_datatype => "integer" }
+      => 6 ],
+  ],
+  [ 'COMMIT' ],
+], 'UPDATE: raw bytes sent to the database');
 
-$storage->debugobj ($debugobj);
-$storage->debug (1);
-
-$cd->update ({ title => $utf8_title });
-
-$storage->debugobj ($orig_debugobj);
-$storage->debug ($orig_debug);
-
-is ($bind[0], "'$bytestream_title'", 'UPDATE: raw bytes sent to the database');
 ($raw_db_title) = $schema->resultset('CD')
                              ->search ($cd->ident_condition)
                                ->get_column('title')
@@ -8,36 +8,27 @@ use DBICTest;
 
 my $schema = DBICTest->init_schema();
 
-my $queries;
-$schema->storage->debugcb( sub{ $queries++ } );
-my $sdebug = $schema->storage->debug;
-
 my $cd = $schema->resultset("CD")->find(1);
 $cd->title('test');
 
-# SELECT count
-$queries = 0;
-$schema->storage->debug(1);
-
-$cd->update;
-
-is($queries, 1, 'liner_notes (might_have) not prefetched - do not load
-liner_notes on update');
-
-$schema->storage->debug($sdebug);
-
+$schema->is_executed_querycount( sub {
+  $cd->update;
+}, {
+  BEGIN => 1,
+  UPDATE => 1,
+  COMMIT => 1,
+}, 'liner_notes (might_have) not prefetched - do not load liner_notes on update' );
 
 my $cd2 = $schema->resultset("CD")->find(2, {prefetch => 'liner_notes'});
 $cd2->title('test2');
 
-# SELECT count
-$queries = 0;
-$schema->storage->debug(1);
-
-$cd2->update;
-
-is($queries, 1, 'liner_notes (might_have) prefetched - do not load
-liner_notes on update');
+$schema->is_executed_querycount( sub {
+  $cd2->update;
+}, {
+  BEGIN => 1,
+  UPDATE => 1,
+  COMMIT => 1,
+}, 'liner_notes (might_have) prefetched - do not load liner_notes on update');
 
 warning_like {
   local $ENV{DBIC_DONT_VALIDATE_RELS};
@@ -62,5 +53,4 @@ warning_like {
   'Setting DBIC_DONT_VALIDATE_RELS suppresses nullable relation warnings';
 }
 
-$schema->storage->debug($sdebug);
 done_testing();
@@ -26,7 +26,7 @@ sub DBICTest::Schema::deployment_statements {
 
 # Check deployment statements ctx sensitivity
 {
-  my $schema = DBICTest->init_schema (no_deploy => 1);
+  my $schema = DBICTest->init_schema (no_deploy => 1, quote_names => 1);
   my $not_first_table_creation_re = qr/CREATE TABLE "fourkeys_to_twokeys"/;
 
   my $statements = $schema->deployment_statements;
@@ -4,9 +4,14 @@ use warnings;
 use Test::More;
 use Test::Warn;
 use Test::Exception;
+
+# MASSIVE FIXME - there is a hole in ::RSC / as_subselect_rs
+# losing the order. Needs a rework/extract of the realiaser,
+# and that's a whole another bag of dicks
+BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 }
+
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -248,17 +253,23 @@ is_same_sql_bind (
 
   $schema->resultset('CD')->create({ artist => 1, title => 'dealbroker no tracks', year => 2001 });
 
+  my $yp1 = \[ 'year + ?', 1 ];
+
   my $rs = $schema->resultset ('CD')->search (
     { 'artist.name' => { '!=', 'evancarrol' }, 'tracks.trackid' => { '!=', undef } },
     {
       order_by => 'me.year',
       join => [qw(artist tracks)],
-      columns => [ 'year', { cnt => { count => 'me.cdid' }} ],
+      columns => [
+        'year',
+        { cnt => { count => 'me.cdid' } },
+        {  year_plus_one => $yp1 },
+      ],
     },
   );
 
   my $rstypes = {
-    'explicitly grouped' => $rs->search_rs({}, { group_by => 'year' }),
+    'explicitly grouped' => $rs->search_rs({}, { group_by => [ 'year', $yp1 ] } ),
     'implicitly grouped' => $rs->search_rs({}, { distinct => 1 }),
   };
 
@@ -277,27 +288,37 @@ is_same_sql_bind (
   # would silently drop the group_by entirely, likely ending up with nonsensival results
   # With the current behavior the user will at least get a nice fat exception from the
   # RDBMS (or maybe the RDBMS will even decide to handle the situation sensibly...)
-  warnings_exist { is_same_sql_bind(
-    $rstypes->{'implicitly grouped'}->get_column('cnt')->as_query,
-    '(
-      SELECT COUNT( me.cdid )
-        FROM cd me
-        JOIN artist artist
-          ON artist.artistid = me.artist
-        LEFT JOIN track tracks
-          ON tracks.cd = me.cdid
-      WHERE artist.name != ? AND tracks.trackid IS NOT NULL
-      GROUP BY COUNT( me.cdid )
-      ORDER BY MIN(me.year)
-    )',
-    [ [ { dbic_colname => 'artist.name', sqlt_datatype => 'varchar', sqlt_size => 100 }
-        => 'evancarrol'
-    ] ],
-    'Expected (though nonsensical) SQL generated on rscol-with-distinct-over-function',
-  ) } qr/
-    \QUse of distinct => 1 while selecting anything other than a column \E
-    \Qdeclared on the primary ResultSource is deprecated\E
-  /x, 'deprecation warning';
+  for (
+    [ cnt => 'COUNT( me.cdid )' ],
+    [ year_plus_one => 'year + ?' => [ {} => 1 ] ],
+  ) {
+    my ($col, $sel_grp_sql, @sel_grp_bind) = @$_;
+
+    warnings_exist { is_same_sql_bind(
+      $rstypes->{'implicitly grouped'}->get_column($col)->as_query,
+      "(
+        SELECT $sel_grp_sql
+          FROM cd me
+          JOIN artist artist
+            ON artist.artistid = me.artist
+          LEFT JOIN track tracks
+            ON tracks.cd = me.cdid
+        WHERE artist.name != ? AND tracks.trackid IS NOT NULL
+        GROUP BY $sel_grp_sql
+        ORDER BY MIN(me.year)
+      )",
+      [
+        @sel_grp_bind,
+        [ { dbic_colname => 'artist.name', sqlt_datatype => 'varchar', sqlt_size => 100 }
+          => 'evancarrol' ],
+        @sel_grp_bind,
+      ],
+      'Expected (though nonsensical) SQL generated on rscol-with-distinct-over-function',
+    ) } qr/
+      \QUse of distinct => 1 while selecting anything other than a column \E
+      \Qdeclared on the primary ResultSource is deprecated (you selected '$col')\E
+    /x, 'deprecation warning';
+  }
 
   {
     local $TODO = 'multiplying join leaks through to the count aggregate... this may never actually work';
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 my $schema = DBICTest->init_schema();
 
 lives_ok (sub {
@@ -4,8 +4,6 @@ use warnings;
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
 
 { # Fake storage driver for sqlite with autocast
     package DBICTest::SQLite::AutoCast;
@@ -37,22 +35,18 @@ my $rs = $schema->resultset ('CD')->search ({
   'me.single_track' => \[ '= ?', [ single_track => 1 ] ],
 }, { join => 'tracks' });
 
-my ($sql, @bind);
-my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
-my $storage = $schema->storage;
-my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj);
-$storage->debugobj ($debugobj);
-$storage->debug (1);
-
-# the quoting is a debugobj thing, not dbic-internals
-my $bind = [ map { "'$_'" } qw/
-  5 1 2009 4
-/];
+my @bind = (
+  [ { dbic_colname => "cdid", sqlt_datatype => "integer" }
+      => 5 ],
+  [ { dbic_colname => "single_track", sqlt_datatype => "integer" }
+      => 1 ],
+  [ { dbic_colname => "tracks.last_updated_on", sqlt_datatype => "datetime" }
+      => 2009 ],
+  [ { dbic_colname => "tracks.position", sqlt_datatype => "int" }
+      => 4 ],
+);
 
-$rs->all;
-is_same_sql_bind (
-  $sql,
-  \@bind,
+$schema->is_executed_sql_bind( sub { $rs->all }, [[
   '
     SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
       FROM cd me
@@ -64,16 +58,12 @@ is_same_sql_bind (
       AND tracks.last_updated_on < ?
       AND tracks.position = ?
   ',
-  $bind,
-  'expected sql with casting off',
-);
+  @bind,
+]], 'expected sql with casting off' );
 
 $schema->storage->auto_cast (1);
 
-$rs->all;
-is_same_sql_bind (
-  $sql,
-  \@bind,
+$schema->is_executed_sql_bind( sub { $rs->all }, [[
   '
     SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
       FROM cd me
@@ -85,11 +75,7 @@ is_same_sql_bind (
       AND tracks.last_updated_on < CAST (? AS DateTime)
       AND tracks.position = ?
   ',
-  $bind,
-  'expected sql with casting on',
-);
-
-$storage->debugobj ($orig_debugobj);
-$storage->debug ($orig_debug);
+  @bind,
+]], 'expected sql with casting on' );
 
 done_testing;
@@ -285,6 +285,17 @@ my $schema_v3 = DBICVersion::Schema->connect($dsn, $user, $pass, { ignore_versio
   ok($get_db_version_run == 0, "attributes pulled from list connect_info");
 }
 
+# at this point we have v1, v2 and v3 still connected
+# make sure they are the only connections and everything else is gone
+is
+  scalar( grep
+    { defined $_ and $_->{Active} }
+    map
+      { @{$_->{ChildHandles}} }
+      values %{ { DBI->installed_drivers } }
+  ), 3, "Expected number of connections at end of script"
+;
+
 END {
   unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) {
     $ddl_dir->rmtree;
@@ -1,178 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use DBIx::Class::Optional::Dependencies ();
-
-my $env2optdep = {
-  DBICTEST_PG => 'rdbms_pg',
-  DBICTEST_MYSQL => 'test_rdbms_mysql',
-};
-
-plan skip_all => join (' ',
-  'Set $ENV{DBICTEST_PG_DSN} and/or $ENV{DBICTEST_MYSQL_DSN} _USER and _PASS to run these tests.',
-) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
-
-use lib qw(t/lib);
-use DBICTest;
-use DBICTest::Stats;
-
-my $schema;
-
-for my $prefix (keys %$env2optdep) { SKIP: {
-  my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
-
-  skip ("Skipping tests with $prefix: set \$ENV{${prefix}_DSN} _USER and _PASS", 1)
-    unless $dsn;
-
-  skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
-    unless  DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
-
-  $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 });
-
-  my $create_sql;
-  $schema->storage->ensure_connected;
-  if ($schema->storage->isa('DBIx::Class::Storage::DBI::Pg')) {
-    $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))";
-    $schema->storage->dbh->do('SET client_min_messages=WARNING');
-  }
-  elsif ($schema->storage->isa('DBIx::Class::Storage::DBI::mysql')) {
-    $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB";
-  }
-  else {
-    skip( 'Untested driver ' . $schema->storage, 1 );
-  }
-
-  note "Testing $prefix";
-
-  my $stats = DBICTest::Stats->new;
-  $schema->storage->debugobj($stats);
-  $schema->storage->debug(1);
-
-  $schema->storage->dbh->do ('DROP TABLE IF EXISTS artist');
-  $schema->storage->dbh->do ($create_sql);
-
-  $schema->resultset('Artist')->create({ name => 'foo' });
-
-  $schema->txn_begin;
-
-  my $arty = $schema->resultset('Artist')->find(1);
-
-  my $name = $arty->name;
-
-  # First off, test a generated savepoint name
-  $schema->svp_begin;
-
-  cmp_ok($stats->{'SVP_BEGIN'}, '==', 1, 'Statistics svp_begin tickled');
-
-  $arty->update({ name => 'Jheephizzy' });
-
-  $arty->discard_changes;
-
-  cmp_ok($arty->name, 'eq', 'Jheephizzy', 'Name changed');
-
-  # Rollback the generated name
-  # Active: 0
-  $schema->svp_rollback;
-
-  cmp_ok($stats->{'SVP_ROLLBACK'}, '==', 1, 'Statistics svp_rollback tickled');
-
-  $arty->discard_changes;
-
-  cmp_ok($arty->name, 'eq', $name, 'Name rolled back');
-
-  $arty->update({ name => 'Jheephizzy'});
-
-  # Active: 0 1
-  $schema->svp_begin('testing1');
-
-  $arty->update({ name => 'yourmom' });
-
-  # Active: 0 1 2
-  $schema->svp_begin('testing2');
-
-  $arty->update({ name => 'gphat' });
-  $arty->discard_changes;
-  cmp_ok($arty->name, 'eq', 'gphat', 'name changed');
-  # Active: 0 1 2
-  # Rollback doesn't DESTROY the savepoint, it just rolls back to the value
-  # at its conception
-  $schema->svp_rollback('testing2');
-  $arty->discard_changes;
-  cmp_ok($arty->name, 'eq', 'yourmom', 'testing2 reverted');
-
-  # Active: 0 1 2 3
-  $schema->svp_begin('testing3');
-  $arty->update({ name => 'coryg' });
-  # Active: 0 1 2 3 4
-  $schema->svp_begin('testing4');
-  $arty->update({ name => 'watson' });
-
-  # Release 3, which implicitly releases 4
-  # Active: 0 1 2
-  $schema->svp_release('testing3');
-  $arty->discard_changes;
-  cmp_ok($arty->name, 'eq', 'watson', 'release left data');
-  # This rolls back savepoint 2
-  # Active: 0 1 2
-  $schema->svp_rollback;
-  $arty->discard_changes;
-  cmp_ok($arty->name, 'eq', 'yourmom', 'rolled back to 2');
-
-  # Rollback the original savepoint, taking us back to the beginning, implicitly
-  # rolling back savepoint 1 and 2
-  $schema->svp_rollback('savepoint_0');
-  $arty->discard_changes;
-  cmp_ok($arty->name, 'eq', 'foo', 'rolled back to start');
-
-  $schema->txn_commit;
-
-  # And now to see if txn_do will behave correctly
-  $schema->txn_do (sub {
-    my $artycp = $arty;
-
-    $schema->txn_do (sub {
-      $artycp->name ('Muff');
-      $artycp->update;
-    });
-
-    eval {
-      $schema->txn_do (sub {
-        $artycp->name ('Moff');
-        $artycp->update;
-        $artycp->discard_changes;
-        is($artycp->name,'Moff','Value updated in nested transaction');
-        $schema->storage->dbh->do ("GUARANTEED TO PHAIL");
-      });
-    };
-
-    ok ($@,'Nested transaction failed (good)');
-
-    $arty->discard_changes;
-
-    is($arty->name,'Muff','auto_savepoint rollback worked');
-
-    $arty->name ('Miff');
-
-    $arty->update;
-  });
-
-  $arty->discard_changes;
-
-  is($arty->name,'Miff','auto_savepoint worked');
-
-  cmp_ok($stats->{'SVP_BEGIN'},'==',7,'Correct number of savepoints created');
-
-  cmp_ok($stats->{'SVP_RELEASE'},'==',3,'Correct number of savepoints released');
-
-  cmp_ok($stats->{'SVP_ROLLBACK'},'==',5,'Correct number of savepoint rollbacks');
-
-  $schema->storage->dbh->do ("DROP TABLE artist");
-}}
-
-done_testing;
-
-END {
-  eval { $schema->storage->dbh->do ("DROP TABLE artist") } if defined $schema;
-  undef $schema;
-}
@@ -259,6 +259,28 @@ lives_ok (sub {
   }, 'partial schema tests successful');
 }
 
+{
+  my $cd_rsrc = $schema->source('CD');
+  $cd_rsrc->name(\'main.cd');
+
+  my $sqlt_schema = create_schema(
+    { schema => $schema },
+    args => { ignore_constraint_names => 0, ignore_index_names => 0 }
+  );
+
+  foreach my $source_name (qw(CD)) {
+    my $table = get_table($sqlt_schema, $schema, $source_name);
+    ok(
+      !(grep {$_->name =~ m/main\./} $table->get_indices),
+      'indices have periods stripped out'
+    );
+    ok(
+      !(grep {$_->name =~ m/main\./} $table->get_constraints),
+      'constraints have periods stripped out'
+    );
+  }
+}
+
 done_testing;
 
 sub create_schema {
@@ -108,13 +108,15 @@ my $admin = DBIx::Class::Admin->new(
 );
 
 $admin->version("3.0");
-lives_ok { $admin->install(); } 'install schema version 3.0';
+$admin->install;
 is($admin->schema->get_db_version, "3.0", 'db thinks its version 3.0');
-dies_ok { $admin->install("4.0"); } 'cannot install to allready existing version';
+throws_ok {
+  $admin->install("4.0")
+} qr/Schema already has a version. Try upgrade instead/, 'cannot install to allready existing version';
 
 $admin->force(1);
 warnings_exist ( sub {
-  lives_ok { $admin->install("4.0") } 'can force install to allready existing version'
+  $admin->install("4.0")
 }, qr/Forcing install may not be a good idea/, 'Force warning emitted' );
 is($admin->schema->get_db_version, "4.0", 'db thinks its version 4.0');
 }
@@ -32,7 +32,7 @@ is(Film->__driver, "SQLite", "Driver set correctly");
 }
 
 eval { my $duh = Film->insert; };
-like $@, qr/create needs a hashref/, "needs a hashref";
+like $@, qr/Result object instantiation requires a hashref as argument/, "needs a hashref";
 
 ok +Film->create_test_film;
 
@@ -1,6 +1,8 @@
 use strict;
 use warnings;
 use Test::More;
+use Test::Exception;
+use DBIx::Class::_Util 'sigwarn_silencer';
 
 @YA::Film::ISA = 'Film';
 
@@ -105,7 +107,8 @@ sub taste_bad {
 
 sub fail_with_bad_object {
   my ($dir, $codir) = @_;
-  eval {
+  throws_ok {
+    local $SIG{__WARN__} = sigwarn_silencer( qr/\Qusually should inherit from the related ResultClass ('Director')/ );
     YA::Film->create(
       {
         Title             => 'Tastes Bad',
@@ -115,8 +118,7 @@ sub fail_with_bad_object {
         NumExplodingSheep => 23
       }
     );
-  };
-  ok $@, $@;
+  } qr/isn't a Director/;
 }
 
 package Foo;
@@ -46,7 +46,7 @@ eval { my $pj = Film->add_to_actors(\%pj_data) };
 like $@, qr/class/, "add_to_actors must be object method";
 
 eval { my $pj = $btaste->add_to_actors(%pj_data) };
-like $@, qr/expects a hashref/, "add_to_actors takes hash";
+like $@, qr/Result object instantiation requires a hashref as argument/, "add_to_actors takes hash";
 
 ok(
   my $pj = $btaste->add_to_actors(
@@ -1,6 +1,8 @@
 use strict;
 use warnings;
 use Test::More;
+use Test::Exception;
+use DBIx::Class::_Util 'sigwarn_silencer';
 
 use lib 't/cdbi/testlib';
 use Film;
@@ -45,8 +47,8 @@ my $sj = Director->create({
   });
 
 {
-  eval { $btaste->Director($btaste) };
-  like $@, qr/Director/, "Can't set film as director";
+  throws_ok { $btaste->Director($btaste) }
+    qr/isn't a Director/, "Can't set film as director";
   is $btaste->Director->id, $pj->id, "PJ still the director";
 
   # drop from cache so that next retrieve() is from db
@@ -69,8 +71,7 @@ my $sj = Director->create({
 is $sj->id, 'Skippy Jackson', 'Create new director - Skippy';
 Film->has_a('CoDirector' => 'Director');
 {
-  eval { $btaste->CoDirector("Skippy Jackson") };
-  is $@, "", "Auto inflates";
+  lives_ok { $btaste->CoDirector("Skippy Jackson") };
   isa_ok $btaste->CoDirector, "Director";
   is $btaste->CoDirector->id, $sj->id, "To skippy";
 }
@@ -96,7 +97,8 @@ is(
   $pj = Director->retrieve('Peter Jackson');
 
   my $fail;
-  eval {
+  throws_ok {
+    local $SIG{__WARN__} = sigwarn_silencer( qr/\Qusually should inherit from the related ResultClass ('Director')/ );
     $fail = YA::Film->create({
         Title             => 'Tastes Bad',
         Director          => $sj,
@@ -104,8 +106,7 @@ is(
         Rating            => 'R',
         NumExplodingSheep => 23
       });
-  };
-  ok $@,    "Can't have film as codirector: $@";
+  } qr/isn't a Director/, "Can't have film as codirector";
   is $fail, undef, "We didn't get anything";
 
   my $tastes_bad = YA::Film->create({
@@ -226,8 +227,10 @@ SKIP: {
 }
 
 { # Broken has_a declaration
-  eval { Film->has_a(driector => "Director") };
-  like $@, qr/driector/, "Sensible error from has_a with incorrect column: $@";
+  throws_ok{ Film->has_a(driector => "Director") }
+    qr/No such column driector/,
+    "Sensible error from has_a with incorrect column"
+  ;
 }
 
 done_testing;
@@ -18,7 +18,7 @@ DBICTest::Schema::CD->has_a( 'year', 'DateTime',
       inflate => sub { DateTime->new( year => shift ) },
       deflate => sub { shift->year }
 );
-Class::C3->reinitialize;
+Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO;
 
 # inflation test
 my $cd = $schema->resultset("CD")->find(3);
@@ -0,0 +1,36 @@
+use strict;
+use warnings;
+
+# Class::DBI in its infinate wisdom allows implicit inflation
+# and deflation of foriegn clas looups in has_a relationships.
+# for inflate it would call ->new on the foreign_class and for
+# deflate it would "" the column value and allow for overloading
+# of the "" operator.
+
+use Test::More;
+use DBIx::Class::Optional::Dependencies;
+
+BEGIN {
+  plan skip_all => "Test needs ".DBIx::Class::Optional::Dependencies->req_missing_for('test_dt_sqlite')
+    unless DBIx::Class::Optional::Dependencies->req_ok_for('test_dt_sqlite');
+}
+
+use lib 't/cdbi/testlib';
+use ImplicitInflate;
+
+ok(ImplicitInflate->can('db_Main'), 'set_db()');
+is(ImplicitInflate->__driver, "SQLite", 'Driver set correctly');
+
+my $now = DateTime->now;
+
+ImplicitInflate->create({
+  update_datetime => $now,
+  text            => "Test Data",
+});
+
+my $implicit_inflate = ImplicitInflate->retrieve(text => 'Test Data');
+
+ok($implicit_inflate->update_datetime->isa('DateTime'), 'Date column inflated correctly');
+is($implicit_inflate->update_datetime => $now, 'Date has correct year');
+
+done_testing;
@@ -0,0 +1,29 @@
+use strict;
+use warnings;
+
+# Columns in CDBI could be defined as Class::DBI::Column objects rather than
+# or as well as with __PACKAGE__->columns();
+
+use Test::More;
+
+use lib 't/cdbi/testlib';
+use ColumnObject;
+
+ok(ColumnObject->can('db_Main'), 'set_db()');
+is(ColumnObject->__driver, 'SQLite', 'Driver set correctly');
+
+ColumnObject->create({
+  columna => 'Test Data',
+  columnb => 'Test Data 2',
+});
+
+my $column_object = ColumnObject->retrieve(columna => 'Test Data');
+$column_object->columnb_as_write('Test Data Written');
+$column_object->update;
+$column_object = ColumnObject->retrieve(columna => 'Test Data');
+
+is($column_object->columna_as_read => 'Test Data', 'Read column via accessor');
+is($column_object->columna         => 'Test Data', 'Real column returns right data');
+is($column_object->columnb         => 'Test Data Written', 'ColumnB wrote via mutator');
+
+done_testing;
@@ -0,0 +1,29 @@
+package # Hide from PAUSE
+    ColumnObject;
+
+use strict;
+use warnings;
+
+use base 'DBIC::Test::SQLite';
+use Class::DBI::Column;
+
+__PACKAGE__->set_table('column_object');
+
+__PACKAGE__->columns( Primary => 'id' );
+__PACKAGE__->columns( All => (
+  'id',
+  'columna',
+  'columnb',
+  Class::DBI::Column->new('columna' => {accessor => 'columna_as_read'}),
+  Class::DBI::Column->new('columnb' => {mutator  => 'columnb_as_write'}),
+));
+
+sub create_sql {
+  return qq{
+    id       INTEGER PRIMARY KEY,
+    columna  VARCHAR(20),
+    columnb  VARCHAR(20)
+  }
+}
+
+1;
@@ -36,6 +36,11 @@ use warnings;
 
 use Test::More;
 
+# adding implicit search criteria to the iterator will alter the test
+# mechanics - leave everything as-is instead, and hope SQLite won't
+# change too much
+BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 }
+
 use lib 't/lib';
 use DBICTest;
 
@@ -0,0 +1,42 @@
+package # Hide from PAUSE
+  ImplicitInflate;
+
+# Test class for the testing of Implicit inflation
+# in CDBI Classes using Compat layer
+# See t/cdbi/70-implicit_inflate.t
+
+use strict;
+use warnings;
+
+use base 'DBIC::Test::SQLite';
+
+__PACKAGE__->set_table('Date');
+
+__PACKAGE__->columns( Primary => 'id' );
+__PACKAGE__->columns( All => qw/ update_datetime text/);
+
+__PACKAGE__->has_a(
+  update_datetime => 'MyDateStamp',
+);
+
+sub create_sql {
+  # SQLite doesn't support Datetime datatypes.
+  return qq{
+    id              INTEGER PRIMARY KEY,
+    update_datetime TEXT,
+    text            VARCHAR(20)
+  }
+}
+
+{
+  package MyDateStamp;
+
+  use DateTime::Format::SQLite;
+
+  sub new {
+    my ($self, $value) = @_;
+    return DateTime::Format::SQLite->parse_datetime($value);
+  }
+}
+
+1;
@@ -4,9 +4,7 @@ use warnings;
 use lib qw(t/lib);
 
 use Test::More;
-use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($ROWS, $OFFSET) = (
@@ -23,27 +21,25 @@ my $schema = DBICTest->init_schema();
                 { position => [1,2] },
                 { prefetch => [qw/disc lyrics/], rows => 3, offset => 8 },
             );
-  is ($rs->all, 2, 'Correct number of objects');
-
-
-  my ($sql, @bind);
-  $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind));
-  $schema->storage->debug(1);
+  my @wherebind = (
+    [ { sqlt_datatype => 'int', dbic_colname => 'position' }
+      => 1 ],
+    [ { sqlt_datatype => 'int', dbic_colname => 'position' }
+      => 2 ],
+  );
 
-  is ($rs->count, 2, 'Correct count via count()');
+  is ($rs->all, 2, 'Correct number of objects');
 
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    is ($rs->count, 2, 'Correct count via count()');
+  }, [[
     'SELECT COUNT( * )
       FROM cd me
       JOIN track tracks ON tracks.cd = me.cdid
       JOIN cd disc ON disc.cdid = tracks.cd
      WHERE ( ( position = ? OR position = ? ) )
-    ',
-    [ qw/'1' '2'/ ],
-    'count softlimit applied',
-  );
+    ', @wherebind
+  ]], 'count softlimit applied');
 
   my $crs = $rs->count_rs;
   is ($crs->next, 2, 'Correct count via count_rs()');
@@ -60,14 +56,7 @@ my $schema = DBICTest->init_schema();
         LIMIT ? OFFSET ?
        ) tracks
     )',
-    [
-      [ { sqlt_datatype => 'int', dbic_colname => 'position' }
-        => 1 ],
-      [ { sqlt_datatype => 'int', dbic_colname => 'position' }
-        => 2 ],
-      [$ROWS => 3],
-      [$OFFSET => 8],
-    ],
+    [ @wherebind, [$ROWS => 3], [$OFFSET => 8] ],
     'count_rs db-side limit applied',
   );
 }
@@ -79,17 +68,18 @@ my $schema = DBICTest->init_schema();
                 { 'tracks.position' => [1,2] },
                 { prefetch => [qw/tracks artist/], rows => 3, offset => 4 },
             );
-  is ($rs->all, 1, 'Correct number of objects');
-
-  my ($sql, @bind);
-  $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind));
-  $schema->storage->debug(1);
+  my @wherebind = (
+    [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' }
+      => 1 ],
+    [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' }
+      => 2 ],
+  );
 
-  is ($rs->count, 1, 'Correct count via count()');
+  is ($rs->all, 1, 'Correct number of objects');
 
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    is ($rs->count, 1, 'Correct count via count()');
+  }, [ [
     'SELECT COUNT( * )
       FROM (
         SELECT cds.cdid
@@ -100,10 +90,8 @@ my $schema = DBICTest->init_schema();
         WHERE tracks.position = ? OR tracks.position = ?
         GROUP BY cds.cdid
       ) cds
-    ',
-    [ qw/'1' '2'/ ],
-    'count softlimit applied',
-  );
+    ', @wherebind
+  ]], 'count softlimit applied' );
 
   my $crs = $rs->count_rs;
   is ($crs->next, 1, 'Correct count via count_rs()');
@@ -122,14 +110,7 @@ my $schema = DBICTest->init_schema();
         LIMIT ? OFFSET ?
       ) cds
     )',
-    [
-      [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' }
-        => 1 ],
-      [ { sqlt_datatype => 'int', dbic_colname => 'tracks.position' }
-        => 2 ],
-      [ $ROWS => 3],
-      [$OFFSET => 4],
-    ],
+    [ @wherebind, [$ROWS => 3], [$OFFSET => 4], ],
     'count_rs db-side limit applied',
   );
 }
@@ -6,8 +6,7 @@ use Test::Exception;
 
 use lib qw(t/lib);
 
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -5,7 +5,6 @@ use Test::More;
 
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
 
@@ -7,24 +7,37 @@ use lib qw(t/lib);
 
 use DBICTest;
 
-plan tests => 7;
-
 my $schema = DBICTest->init_schema();
 
 my $cds = $schema->resultset("CD")->search({ cdid => 1 }, { join => { cd_to_producer => 'producer' } });
 cmp_ok($cds->count, '>', 1, "extra joins explode entity count");
 
-is (
-  $cds->search({}, { prefetch => 'cd_to_producer' })->count,
-  1,
-  "Count correct with extra joins collapsed by prefetch"
-);
-
-is (
-  $cds->search({}, { distinct => 1 })->count,
-  1,
-  "Count correct with requested distinct collapse of main table"
-);
+for my $arg (
+  [ 'prefetch-collapsed has_many' => { prefetch => 'cd_to_producer' } ],
+  [ 'distict-collapsed result' => { distinct => 1 } ],
+  [ 'explicit collapse request' => { collapse => 1 } ],
+) {
+  for my $hri (0,1) {
+    my $diag = $arg->[0] . ($hri ? ' with HRI' : '');
+
+    my $rs = $cds->search({}, {
+      %{$arg->[1]},
+      $hri ? ( result_class => 'DBIx::Class::ResultClass::HashRefInflator' ) : (),
+    });
+
+    is
+      $rs->count,
+      1,
+      "Count correct on $diag",
+    ;
+
+    is
+      scalar $rs->all,
+      1,
+      "Amount of constructed objects matches count on $diag",
+    ;
+  }
+}
 
 # JOIN and LEFT JOIN issues mean that we've seen problems where counted rows and fetched rows are sometimes 1 higher than they should
 # be in the related resultset.
@@ -35,3 +48,5 @@ is(scalar($artist->related_resultset('cds')->all()), 0, "No CDs fetched for a sh
 my $artist_rs = $schema->resultset('Artist')->search({artistid => $artist->id});
 is($artist_rs->related_resultset('cds')->count(), 0, "No CDs counted for a shiny new artist using a resultset search");
 is(scalar($artist_rs->related_resultset('cds')->all), 0, "No CDs fetched for a shiny new artist using a resultset search");
+
+done_testing;
@@ -4,8 +4,7 @@ use warnings;
 use lib qw(t/lib);
 
 use Test::More;
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -11,13 +11,13 @@ use DBICTest;
 my $schema = DBICTest->init_schema();
 $schema->_unregister_source('CD');
 
-warnings_like {
+warnings_exist {
   my $s = $schema;
   lives_ok {
     $_->delete for $s->resultset('Artist')->all;
   } 'delete on rows with dangling rels lives';
 } [
-  # 12 == 3 artists * failed cascades:
+  # 9 == 3 artists * failed cascades:
   #   cds
   #   cds_unordered
   #   cds_very_very_very_long_relationship_name
@@ -98,4 +98,52 @@ is("$varchar_datetime", '2006-05-22T19:05:07', 'Correct date/time');
 my $skip_inflation = $event->skip_inflation;
 is ("$skip_inflation", '2006-04-21 18:04:06', 'Correct date/time');
 
+# create and update with literals
+{
+  my $d = {
+    created_on => \ '2001-09-11',
+    starts_at => \[ '?' => '2001-10-26' ],
+  };
+
+  my $ev = $schema->resultset('Event')->create($d);
+
+  for my $col (qw(created_on starts_at)) {
+    ok (ref $ev->$col, "literal untouched in $col");
+    is_deeply( $ev->$col, $d->{$col});
+    is_deeply( $ev->get_inflated_column($col), $d->{$col});
+    is_deeply( $ev->get_column($col), $d->{$col});
+  }
+
+  $ev->discard_changes;
+
+  is_deeply(
+    { $ev->get_dirty_columns },
+    {}
+  );
+
+  for my $col (qw(created_on starts_at)) {
+    isa_ok ($ev->$col, "DateTime", "$col properly inflated on retrieve");
+  }
+
+  for my $meth (qw(set_inflated_columns set_columns)) {
+
+    $ev->$meth({%$d});
+
+    is_deeply(
+      { $ev->get_dirty_columns },
+      $d,
+      "Expected dirty cols after setting literals via $meth",
+    );
+
+    $ev->update;
+
+    for my $col (qw(created_on starts_at)) {
+      ok (ref $ev->$col, "literal untouched in $col updated via $meth");
+      is_deeply( $ev->$col, $d->{$col});
+      is_deeply( $ev->get_inflated_column($col), $d->{$col});
+      is_deeply( $ev->get_column($col), $d->{$col});
+    }
+  }
+}
+
 done_testing;
@@ -29,13 +29,6 @@ plan skip_all => 'Test needs ' .
     $dsn3 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_mssql_ado'))
       or (not $dsn || $dsn2 || $dsn3);
 
-# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
-BEGIN {
-  if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
-    unshift @INC, $_ for split /:/, $lib_dirs;
-  }
-}
-
 if (not ($dsn || $dsn2 || $dsn3)) {
   plan skip_all =>
     'Set $ENV{DBICTEST_MSSQL_ODBC_DSN} and/or $ENV{DBICTEST_MSSQL_DSN} and/or '
@@ -14,7 +14,7 @@ my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
 
 if (not ($dsn && $user && $pass)) {
     plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
-         'Warning: This test drops and creates a table called \'track\'';
+         'Warning: This test drops and creates a table called \'event\'';
 }
 
 # DateTime::Format::Oracle needs this set
@@ -32,21 +32,25 @@ my $timestamp_datatype = ($schema->storage->_server_info->{normalized_dbms_versi
   : 'TIMESTAMP'
 ;
 
-# Need to redefine the last_updated_on column
-my $col_metadata = $schema->class('Track')->column_info('last_updated_on');
-$schema->class('Track')->add_column( 'last_updated_on' => {
-    data_type => 'date' });
-$schema->class('Track')->add_column( 'last_updated_at' => {
-    data_type => $timestamp_datatype });
-
 my $dbh = $schema->storage->dbh;
 
 #$dbh->do("alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SSXFF'");
 
 eval {
-  $dbh->do("DROP TABLE track");
+  $dbh->do("DROP TABLE event");
 };
-$dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at $timestamp_datatype)");
+$dbh->do(<<EOS);
+  CREATE TABLE event (
+    id number NOT NULL,
+    starts_at date NOT NULL,
+    created_on $timestamp_datatype NOT NULL,
+    varchar_date varchar(20),
+    varchar_datetime varchar(20),
+    skip_inflation date,
+    ts_without_tz date,
+    PRIMARY KEY (id)
+  )
+EOS
 
 # TODO is in effect for the rest of the tests
 local $TODO = 'FIXME - something odd is going on with Oracle < 9 datetime support'
@@ -55,27 +59,26 @@ local $TODO = 'FIXME - something odd is going on with Oracle < 9 datetime suppor
 lives_ok {
 
 # insert a row to play with
-my $new = $schema->resultset('Track')->create({ trackid => 1, cd => 1, position => 1, title => 'Track1', last_updated_on => '06-MAY-07', last_updated_at => '2009-05-03 21:17:18.5' });
-is($new->trackid, 1, "insert sucessful");
+my $new = $schema->resultset('Event')->create({ id => 1, starts_at => '06-MAY-07', created_on => '2009-05-03 21:17:18.5' });
+is($new->id, 1, "insert sucessful");
 
-my $track = $schema->resultset('Track')->find( 1 );
+my $event = $schema->resultset('Event')->find( 1 );
 
-is( ref($track->last_updated_on), 'DateTime', "last_updated_on inflated ok");
+is( ref($event->starts_at), 'DateTime', "starts_at inflated ok");
 
-is( $track->last_updated_on->month, 5, "DateTime methods work on inflated column");
+is( $event->starts_at->month, 5, "DateTime methods work on inflated column");
 
-#note '$track->last_updated_at => ', $track->last_updated_at;
-is( ref($track->last_updated_at), 'DateTime', "last_updated_at inflated ok");
+is( ref($event->created_on), 'DateTime', "created_on inflated ok");
 
-is( $track->last_updated_at->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision");
+is( $event->created_on->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision");
 
 my $dt = DateTime->now();
-$track->last_updated_on($dt);
-$track->last_updated_at($dt);
-$track->update;
+$event->starts_at($dt);
+$event->created_on($dt);
+$event->update;
 
-is( $track->last_updated_on->month, $dt->month, "deflate ok");
-is( int $track->last_updated_at->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision");
+is( $event->starts_at->month, $dt->month, "deflate ok");
+is( int $event->created_on->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision");
 
 # test datetime_setup
 
@@ -93,15 +96,15 @@ $dt = DateTime->now();
 my $timestamp = $dt->clone;
 $timestamp->set_nanosecond( int 500_000_000 );
 
-$track = $schema->resultset('Track')->find( 1 );
-$track->update({ last_updated_on => $dt, last_updated_at => $timestamp });
+$event = $schema->resultset('Event')->find( 1 );
+$event->update({ starts_at => $dt, created_on => $timestamp });
 
-$track = $schema->resultset('Track')->find(1);
+$event = $schema->resultset('Event')->find(1);
 
-is( $track->last_updated_on, $dt, 'DateTime round-trip as DATE' );
-is( $track->last_updated_at, $timestamp, 'DateTime round-trip as TIMESTAMP' );
+is( $event->starts_at, $dt, 'DateTime round-trip as DATE' );
+is( $event->created_on, $timestamp, 'DateTime round-trip as TIMESTAMP' );
 
-is( int $track->last_updated_at->nanosecond, int 500_000_000,
+is( int $event->created_on->nanosecond, int 500_000_000,
   'TIMESTAMP nanoseconds survived' );
 
 } 'dateteime operations executed correctly';
@@ -111,7 +114,7 @@ done_testing;
 # clean up our mess
 END {
   if($schema && (my $dbh = $schema->storage->dbh)) {
-    $dbh->do("DROP TABLE track");
+    $dbh->do("DROP TABLE event");
   }
   undef $schema;
 }
@@ -35,7 +35,7 @@ DBICTest::Schema::Serialized->inflate_column( 'serialized',
       deflate => $selected->{deflater},
     },
 );
-Class::C3->reinitialize;
+Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO;
 
 my $struct_hash = {
     a => 1,
@@ -1,50 +0,0 @@
-package DBIC::DebugObj;
-
-use strict;
-use warnings;
-
-use Class::C3;
-
-use base qw/DBIx::Class::Storage::Statistics Exporter Class::Accessor::Fast/;
-
-__PACKAGE__->mk_accessors( qw/dbictest_sql_ref dbictest_bind_ref/ );
-
-
-=head2 new(PKG, SQL_REF, BIND_REF, ...)
-
-Creates a new instance that on subsequent queries will store
-the generated SQL to the scalar pointed to by SQL_REF and bind
-values to the array pointed to by BIND_REF.
-
-=cut
-
-sub new {
-  my $pkg = shift;
-  my $sql_ref = shift;
-  my $bind_ref = shift;
-
-  my $self = $pkg->SUPER::new(@_);
-
-  $self->debugfh(undef);
-
-  $self->dbictest_sql_ref($sql_ref);
-  $self->dbictest_bind_ref($bind_ref || []);
-
-  return $self;
-}
-
-sub query_start {
-  my $self = shift;
-
-  (${$self->dbictest_sql_ref}, @{$self->dbictest_bind_ref}) = @_;
-}
-
-sub query_end { }
-
-sub txn_begin { }
-
-sub txn_commit { }
-
-sub txn_rollback { }
-
-1;
@@ -1,165 +0,0 @@
-package DBIC::SqlMakerTest;
-
-use strict;
-use warnings;
-
-use base qw/Exporter/;
-
-use Carp;
-use SQL::Abstract::Test;
-
-our @EXPORT = qw/
-  is_same_sql_bind
-  is_same_sql
-  is_same_bind
-/;
-our @EXPORT_OK = qw/
-  eq_sql
-  eq_bind
-  eq_sql_bind
-/;
-
-sub is_same_sql_bind {
-  # unroll possible as_query arrayrefrefs
-  my @args;
-
-  for (1,2) {
-    my $chunk = shift @_;
-
-    if ( ref $chunk eq 'REF' and ref $$chunk eq 'ARRAY' ) {
-      my ($sql, @bind) = @$$chunk;
-      push @args, ($sql, \@bind);
-    }
-    else {
-      push @args, $chunk, shift @_;
-    }
-
-  }
-
-  push @args, shift @_;
-
-  croak "Unexpected argument(s) supplied to is_same_sql_bind: " . join ('; ', @_)
-    if @_;
-
-  @_ = @args;
-  goto &SQL::Abstract::Test::is_same_sql_bind;
-}
-
-*is_same_sql = \&SQL::Abstract::Test::is_same_sql;
-*is_same_bind = \&SQL::Abstract::Test::is_same_bind;
-*eq_sql = \&SQL::Abstract::Test::eq_sql;
-*eq_bind = \&SQL::Abstract::Test::eq_bind;
-*eq_sql_bind = \&SQL::Abstract::Test::eq_sql_bind;
-
-1;
-
-__END__
-
-
-=head1 NAME
-
-DBIC::SqlMakerTest - Helper package for testing sql_maker component of DBIC
-
-=head1 SYNOPSIS
-
-  use Test::More;
-  use DBIC::SqlMakerTest;
-
-  my ($sql, @bind) = $schema->storage->sql_maker->select(%args);
-  is_same_sql_bind(
-    $sql, \@bind,
-    $expected_sql, \@expected_bind,
-    'foo bar works'
-  );
-
-=head1 DESCRIPTION
-
-Exports functions that can be used to compare generated SQL and bind values.
-
-This is a thin wrapper around L<SQL::Abstract::Test>, which makes it easier
-to compare as_query sql/bind arrayrefrefs directly.
-
-=head1 FUNCTIONS
-
-=head2 is_same_sql_bind
-
-  is_same_sql_bind(
-    $given_sql, \@given_bind,
-    $expected_sql, \@expected_bind,
-    $test_msg
-  );
-
-  is_same_sql_bind(
-    $rs->as_query
-    $expected_sql, \@expected_bind,
-    $test_msg
-  );
-
-  is_same_sql_bind(
-    \[$given_sql, @given_bind],
-    $expected_sql, \@expected_bind,
-    $test_msg
-  );
-
-Compares given and expected pairs of C<($sql, \@bind)>, and calls
-L<Test::Builder/ok> on the result, with C<$test_msg> as message.
-
-=head2 is_same_sql
-
-  is_same_sql(
-    $given_sql,
-    $expected_sql,
-    $test_msg
-  );
-
-Compares given and expected SQL statement, and calls L<Test::Builder/ok> on the
-result, with C<$test_msg> as message.
-
-=head2 is_same_bind
-
-  is_same_bind(
-    \@given_bind,
-    \@expected_bind,
-    $test_msg
-  );
-
-Compares given and expected bind value lists, and calls L<Test::Builder/ok> on
-the result, with C<$test_msg> as message.
-
-=head2 eq_sql
-
-  my $is_same = eq_sql($given_sql, $expected_sql);
-
-Compares the two SQL statements. Returns true IFF they are equivalent.
-
-=head2 eq_bind
-
-  my $is_same = eq_sql(\@given_bind, \@expected_bind);
-
-Compares two lists of bind values. Returns true IFF their values are the same.
-
-=head2 eq_sql_bind
-
-  my $is_same = eq_sql_bind(
-    $given_sql, \@given_bind,
-    $expected_sql, \@expected_bind
-  );
-
-Compares the two SQL statements and the two lists of bind values. Returns true
-IFF they are equivalent and the bind values are the same.
-
-
-=head1 SEE ALSO
-
-L<SQL::Abstract::Test>, L<Test::More>, L<Test::Builder>.
-
-=head1 AUTHOR
-
-Norbert Buchmuller, <norbi@nix.hu>
-
-=head1 COPYRIGHT AND LICENSE
-
-Copyright 2008 by Norbert Buchmuller.
-
-This library is free software; you can redistribute it and/or modify
-it under the same terms as Perl itself.
@@ -0,0 +1,12 @@
+package #hide from pause
+  DBICTest::Base;
+
+use strict;
+use warnings;
+
+# must load before any DBIx::Class* namespaces
+use DBICTest::RunMode;
+
+sub _skip_namespace_frames { '^DBICTest' }
+
+1;
@@ -4,10 +4,7 @@ package #hide from pause
 use strict;
 use warnings;
 
-# must load before any DBIx::Class* namespaces
-use DBICTest::RunMode;
-
-use base 'DBIx::Class::Core';
+use base qw(DBICTest::Base DBIx::Class::Core);
 
 #use base qw/DBIx::Class::Relationship::Cascade::Rekey DBIx::Class::Core/;
 
@@ -4,11 +4,7 @@ package #hide from pause
 use strict;
 use warnings;
 
-# must load before any DBIx::Class* namespaces
-use DBICTest::RunMode;
-
-use base 'DBIx::Class::ResultSet';
-__PACKAGE__->_skip_namespace_frames('^DBICTest');
+use base qw(DBICTest::Base DBIx::Class::ResultSet);
 
 sub all_hri {
   return [ shift->search ({}, { result_class => 'DBIx::Class::ResultClass::HashRefInflator' })->all ];
@@ -3,10 +3,261 @@ package #hide from pause
 
 use strict;
 use warnings;
+use base qw(DBICTest::Base DBIx::Class::Schema);
 
-# must load before any DBIx::Class* namespaces
-use DBICTest::RunMode;
+use Fcntl qw(:DEFAULT :seek :flock);
+use Time::HiRes 'sleep';
+use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry);
+use DBICTest::Util 'local_umask';
+use namespace::clean;
 
-use base 'DBIx::Class::Schema';
+sub capture_executed_sql_bind {
+  my ($self, $cref) = @_;
+
+  $self->throw_exception("Expecting a coderef to run") unless ref $cref eq 'CODE';
+
+  require DBICTest::SQLTracerObj;
+
+  # hack around stupid, stupid API
+  no warnings 'redefine';
+  local *DBIx::Class::Storage::DBI::_format_for_trace = sub { $_[1] };
+  Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO;
+
+
+  local $self->storage->{debugcb};
+  local $self->storage->{debugobj} = my $tracer_obj = DBICTest::SQLTracerObj->new;
+  local $self->storage->{debug} = 1;
+
+  local $Test::Builder::Level = $Test::Builder::Level + 2;
+  $cref->();
+
+  return $tracer_obj->{sqlbinds} || [];
+}
+
+sub is_executed_querycount {
+  my ($self, $cref, $exp_counts, $msg) = @_;
+
+  local $Test::Builder::Level = $Test::Builder::Level + 1;
+
+  $self->throw_exception("Expecting an hashref of counts or an integer representing total query count")
+    unless ref $exp_counts eq 'HASH' or (defined $exp_counts and ! ref $exp_counts);
+
+  my @got = map { $_->[0] } @{ $self->capture_executed_sql_bind($cref) };
+
+  return Test::More::is( @got, $exp_counts, $msg )
+    unless ref $exp_counts;
+
+  my $got_counts = { map { $_ => 0 } keys %$exp_counts };
+  $got_counts->{$_}++ for @got;
+
+  return Test::More::is_deeply(
+    $got_counts,
+    $exp_counts,
+    $msg,
+  );
+}
+
+sub is_executed_sql_bind {
+  my ($self, $cref, $sqlbinds, $msg) = @_;
+
+  local $Test::Builder::Level = $Test::Builder::Level + 1;
+
+  $self->throw_exception("Expecting an arrayref of SQL/Bind pairs") unless ref $sqlbinds eq 'ARRAY';
+
+  my @expected = @$sqlbinds;
+
+  my @got = map { $_->[1] } @{ $self->capture_executed_sql_bind($cref) };
+
+
+  return Test::Builder->new->ok(1, $msg || "No queries executed while running $cref")
+    if !@got and !@expected;
+
+  require SQL::Abstract::Test;
+  my $ret = 1;
+  while (@expected or @got) {
+    my $left = shift @got;
+    my $right = shift @expected;
+
+    # allow the right side to "simplify" the entire shebang
+    if ($left and $right) {
+      $left = [ @$left ];
+      for my $i (1..$#$right) {
+        if (
+          ! ref $right->[$i]
+            and
+          ref $left->[$i] eq 'ARRAY'
+            and
+          @{$left->[$i]} == 2
+        ) {
+          $left->[$i] = $left->[$i][1]
+        }
+      }
+    }
+
+    $ret &= SQL::Abstract::Test::is_same_sql_bind(
+      \( $left || [] ),
+      \( $right || [] ),
+      $msg,
+    );
+  }
+
+  return $ret;
+}
+
+our $locker;
+END {
+  # we need the $locker to be referenced here for delayed destruction
+  if ($locker->{lock_name} and ($ENV{DBICTEST_LOCK_HOLDER}||0) == $$) {
+    #warn "$$ $0 $locker->{type} LOCK RELEASED";
+  }
+}
+
+my $weak_registry = {};
+
+sub connection {
+  my $self = shift->next::method(@_);
+
+# MASSIVE FIXME
+# we can't really lock based on DSN, as we do not yet have a way to tell that e.g.
+# DBICTEST_MSSQL_DSN=dbi:Sybase:server=192.168.0.11:1433;database=dbtst
+#  and
+# DBICTEST_MSSQL_ODBC_DSN=dbi:ODBC:server=192.168.0.11;port=1433;database=dbtst;driver=FreeTDS;tds_version=8.0
+# are the same server
+# hence we lock everything based on sqlt_type or just globally if not available
+# just pretend we are python you know? :)
+
+
+  # when we get a proper DSN resolution sanitize to produce a portable lockfile name
+  # this may look weird and unnecessary, but consider running tests from
+  # windows over a samba share >.>
+  #utf8::encode($dsn);
+  #$dsn =~ s/([^A-Za-z0-9_\-\.\=])/ sprintf '~%02X', ord($1) /ge;
+  #$dsn =~ s/^dbi/dbi/i;
+
+  # provide locking for physical (non-memory) DSNs, so that tests can
+  # safely run in parallel. While the harness (make -jN test) does set
+  # an envvar, we can not detect when a user invokes prove -jN. Hence
+  # perform the locking at all times, it shouldn't hurt.
+  # the lock fh *should* inherit across forks/subprocesses
+  #
+  # File locking is hard. Really hard. By far the best lock implementation
+  # I've seen is part of the guts of File::Temp. However it is sadly not
+  # reusable. Since I am not aware of folks doing NFS parallel testing,
+  # nor are we known to work on VMS, I am just going to punt this and
+  # use the portable-ish flock() provided by perl itself. If this does
+  # not work for you - patches more than welcome.
+  if (
+    ! $DBICTest::global_exclusive_lock
+      and
+    ( ! $ENV{DBICTEST_LOCK_HOLDER} or $ENV{DBICTEST_LOCK_HOLDER} == $$ )
+      and
+    ref($_[0]) ne 'CODE'
+      and
+    ($_[0]||'') !~ /^ (?i:dbi) \: SQLite \: (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x
+  ) {
+
+    my $locktype = do {
+      # guard against infinite recursion
+      local $ENV{DBICTEST_LOCK_HOLDER} = -1;
+
+      # we need to connect a forced fresh clone so that we do not upset any state
+      # of the main $schema (some tests examine it quite closely)
+      local $SIG{__WARN__} = sub {};
+      local $@;
+      my $storage = eval {
+        my $st = ref($self)->connect(@{$self->storage->connect_info})->storage;
+        $st->ensure_connected;  # do connect here, to catch a possible throw
+        $st;
+      };
+      $storage
+        ? do {
+          my $t = $storage->sqlt_type || 'generic';
+          eval { $storage->disconnect };
+          $t;
+        }
+        : undef
+      ;
+    };
+
+    # Never hold more than one lock. This solves the "lock in order" issues
+    # unrelated tests may have
+    # Also if there is no connection - there is no lock to be had
+    if ($locktype and (!$locker or $locker->{type} ne $locktype)) {
+
+      # this will release whatever lock we may currently be holding
+      # which is fine since the type does not match as checked above
+      undef $locker;
+
+      my $lockpath = DBICTest::RunMode->tmpdir->file("_dbictest_$locktype.lock");
+
+      #warn "$$ $0 $locktype GRABBING LOCK";
+      my $lock_fh;
+      {
+        my $u = local_umask(0); # so that the file opens as 666, and any user can lock
+        sysopen ($lock_fh, $lockpath, O_RDWR|O_CREAT) or die "Unable to open $lockpath: $!";
+      }
+      flock ($lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!";
+      #warn "$$ $0 $locktype LOCK GRABBED";
+
+      # see if anyone was holding a lock before us, and wait up to 5 seconds for them to terminate
+      # if we do not do this we may end up trampling over some long-running END or somesuch
+      seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!";
+      my $old_pid;
+      if (
+        read ($lock_fh, $old_pid, 100)
+          and
+        ($old_pid) = $old_pid =~ /^(\d+)$/
+      ) {
+        for (1..50) {
+          kill (0, $old_pid) or last;
+          sleep 0.1;
+        }
+      }
+      #warn "$$ $0 $locktype POST GRAB WAIT";
+
+      truncate $lock_fh, 0;
+      seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!";
+      $lock_fh->autoflush(1);
+      print $lock_fh $$;
+
+      $ENV{DBICTEST_LOCK_HOLDER} ||= $$;
+
+      $locker = {
+        type => $locktype,
+        fh => $lock_fh,
+        lock_name => "$lockpath",
+      };
+    }
+  }
+
+  if ($INC{'Test/Builder.pm'}) {
+    populate_weakregistry ( $weak_registry, $self->storage );
+
+    my $cur_connect_call = $self->storage->on_connect_call;
+
+    $self->storage->on_connect_call([
+      (ref $cur_connect_call eq 'ARRAY'
+        ? @$cur_connect_call
+        : ($cur_connect_call || ())
+      ),
+      [sub {
+        populate_weakregistry( $weak_registry, shift->_dbh )
+      }],
+    ]);
+  }
+
+  return $self;
+}
+
+sub clone {
+  my $self = shift->next::method(@_);
+  populate_weakregistry ( $weak_registry, $self )
+    if $INC{'Test/Builder.pm'};
+  $self;
+}
+
+END {
+  assert_empty_weakregistry($weak_registry, 'quiet');
+}
 
 1;
@@ -0,0 +1,21 @@
+package # moar hide
+  DBICTest::SQLTracerObj;
+
+use strict;
+use warnings;
+
+use base 'DBIx::Class::Storage::Statistics';
+
+sub query_start { push @{$_[0]{sqlbinds}}, [ ($_[1] =~ /^\s*(\S+)/)[0], [ $_[1], @{ $_[2]||[] } ] ] }
+
+# who the hell came up with this API >:(
+for my $txn (qw(begin rollback commit)) {
+  no strict 'refs';
+  *{"txn_$txn"} = sub { push @{$_[0]{sqlbinds}}, [ uc $txn => [ uc $txn ] ] };
+}
+
+sub svp_begin { push @{$_[0]{sqlbinds}}, [ SAVEPOINT => [ "SAVEPOINT $_[1]" ] ] }
+sub svp_release { push @{$_[0]{sqlbinds}}, [ RELEASE_SAVEPOINT => [ "RELEASE $_[1]" ] ] }
+sub svp_rollback { push @{$_[0]{sqlbinds}}, [ ROLLBACK_TO_SAVEPOINT => [ "ROLLBACK TO $_[1]" ] ] }
+
+1;
@@ -4,8 +4,8 @@ package # hide from PAUSE
 use warnings;
 use strict;
 
-use base qw/DBICTest::BaseResult/;
-use Carp qw/confess/;
+use base 'DBICTest::BaseResult';
+use DBICTest::Util 'check_customcond_args';
 
 __PACKAGE__->table('artist');
 __PACKAGE__->source_info({
@@ -51,26 +51,38 @@ __PACKAGE__->has_many(
     { order_by => { -asc => 'year'} },
 );
 
+__PACKAGE__->has_many(
+  cds_cref_cond => 'DBICTest::Schema::CD',
+  sub {
+    # This is for test purposes only. A regular user does not
+    # need to sanity check the passed-in arguments, this is what
+    # the tests are for :)
+    my $args = &check_customcond_args;
+
+    return (
+      { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} },
+      },
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,  # keep old rowobj syntax as a test
+      }
+    );
+  },
+);
 
 __PACKAGE__->has_many(
   cds_80s => 'DBICTest::Schema::CD',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
-      { "$args->{foreign_alias}.artist" => { '=' => { -ident => "$args->{self_alias}.artistid"} },
+      { "$args->{foreign_alias}.artist" => { '=' => \ "$args->{self_alias}.artistid" },
         "$args->{foreign_alias}.year"   => { '>' => 1979, '<' => 1990 },
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.artist" => { '=' => \[ '?',  $args->{self_result_object}->artistid ] },
         "$args->{foreign_alias}.year"   => { '>' => 1979, '<' => 1990 },
       }
     );
@@ -81,22 +93,17 @@ __PACKAGE__->has_many(
 __PACKAGE__->has_many(
   cds_84 => 'DBICTest::Schema::CD',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" },
         "$args->{foreign_alias}.year"   => 1984,
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid,
         "$args->{foreign_alias}.year"   => 1984,
       }
     );
@@ -107,15 +114,10 @@ __PACKAGE__->has_many(
 __PACKAGE__->has_many(
   cds_90s => 'DBICTest::Schema::CD',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" },
@@ -150,13 +152,17 @@ __PACKAGE__->many_to_many('artworks', 'artwork_to_artist', 'artwork');
 __PACKAGE__->has_many(
     cds_without_genre => 'DBICTest::Schema::CD',
     sub {
-        my $args = shift;
+        # This is for test purposes only. A regular user does not
+        # need to sanity check the passed-in arguments, this is what
+        # the tests are for :)
+        my $args = &check_customcond_args;
+
         return (
           {
             "$args->{foreign_alias}.artist" => { -ident => "$args->{self_alias}.artistid" },
             "$args->{foreign_alias}.genreid" => undef,
-          }, $args->{self_rowobj} && {
-            "$args->{foreign_alias}.artist" => $args->{self_rowobj}->artistid,
+          }, $args->{self_result_object} && {
+            "$args->{foreign_alias}.artist" => $args->{self_result_object}->artistid,
             "$args->{foreign_alias}.genreid" => undef,
           }
         ),
@@ -4,8 +4,8 @@ package # hide from PAUSE
 use warnings;
 use strict;
 
-use base qw/DBICTest::BaseResult/;
-use Carp qw/confess/;
+use base 'DBICTest::BaseResult';
+use DBICTest::Util 'check_customcond_args';
 
 __PACKAGE__->table('cd_artwork');
 __PACKAGE__->add_columns(
@@ -28,21 +28,16 @@ __PACKAGE__->many_to_many('artists_test_m2m_noopt', 'artwork_to_artist', 'artist
 # other test to manytomany
 __PACKAGE__->has_many('artwork_to_artist_test_m2m', 'DBICTest::Schema::Artwork_to_Artist',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.artwork_cd_id" => { -ident => "$args->{self_alias}.cd_id" },
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.artwork_cd_id" => $args->{self_rowobj}->cd_id,
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.artwork_cd_id" => $args->{self_result_object}->cd_id,
       }
     );
   }
@@ -4,8 +4,8 @@ package # hide from PAUSE
 use warnings;
 use strict;
 
-use base qw/DBICTest::BaseResult/;
-use Carp qw/confess/;
+use base 'DBICTest::BaseResult';
+use DBICTest::Util 'check_customcond_args';
 
 __PACKAGE__->table('artwork_to_artist');
 __PACKAGE__->add_columns(
@@ -24,22 +24,17 @@ __PACKAGE__->belongs_to('artist', 'DBICTest::Schema::Artist', 'artist_id');
 
 __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" },
         "$args->{foreign_alias}.rank"     => { '<' => 10 },
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.artistid" => $args->{self_rowobj}->artist_id,
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.artistid" => $args->{self_result_object}->artist_id,
         "$args->{foreign_alias}.rank"   => { '<' => 10 },
       }
     );
@@ -48,15 +43,10 @@ __PACKAGE__->belongs_to('artist_test_m2m', 'DBICTest::Schema::Artist',
 
 __PACKAGE__->belongs_to('artist_test_m2m_noopt', 'DBICTest::Schema::Artist',
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.artistid" => { -ident => "$args->{self_alias}.artist_id" },
@@ -4,7 +4,8 @@ package # hide from PAUSE
 use warnings;
 use strict;
 
-use base qw/DBICTest::BaseResult/;
+use base 'DBICTest::BaseResult';
+use DBICTest::Util 'check_customcond_args';
 
 # this tests table name as scalar ref
 # DO NOT REMOVE THE \
@@ -54,6 +55,14 @@ __PACKAGE__->belongs_to( single_track => 'DBICTest::Schema::Track',
   { join_type => 'left'},
 );
 
+__PACKAGE__->belongs_to( single_track_opaque => 'DBICTest::Schema::Track',
+  sub {
+    my $args = &check_customcond_args;
+    \ " $args->{foreign_alias}.trackid = $args->{self_alias}.single_track ";
+  },
+  { join_type => 'left'},
+);
+
 # add a non-left single relationship for the complex prefetch tests
 __PACKAGE__->belongs_to( existing_single_track => 'DBICTest::Schema::Track',
   { 'foreign.trackid' => 'self.single_track' },
@@ -68,6 +77,9 @@ __PACKAGE__->has_many(
     cd_to_producer => 'DBICTest::Schema::CD_to_Producer' => 'cd'
 );
 
+__PACKAGE__->has_many( twokeys => 'DBICTest::Schema::TwoKeys', 'cd' );
+
+
 # the undef condition in this rel is *deliberate*
 # tests oddball legacy syntax
 __PACKAGE__->might_have(
@@ -118,7 +130,11 @@ __PACKAGE__->might_have(
     'last_track',
     'DBICTest::Schema::Track',
     sub {
-        my $args = shift;
+        # This is for test purposes only. A regular user does not
+        # need to sanity check the passed-in arguments, this is what
+        # the tests are for :)
+        my $args = &check_customcond_args;
+
         return (
             {
                 "$args->{foreign_alias}.trackid" => { '=' =>
@@ -4,8 +4,8 @@ package # hide from PAUSE
 use warnings;
 use strict;
 
-use base qw/DBICTest::BaseResult/;
-use Carp qw/confess/;
+use base 'DBICTest::BaseResult';
+use DBICTest::Util 'check_customcond_args';
 
 __PACKAGE__->load_components(qw{
     +DBICTest::DeployComponent
@@ -53,6 +53,29 @@ __PACKAGE__->grouping_column ('cd');
 __PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD', undef, {
     proxy => { cd_title => 'title' },
 });
+# custom condition coderef
+__PACKAGE__->belongs_to( cd_cref_cond => 'DBICTest::Schema::CD',
+sub {
+  # This is for test purposes only. A regular user does not
+  # need to sanity check the passed-in arguments, this is what
+  # the tests are for :)
+  my $args = &check_customcond_args;
+
+  return (
+    {
+      "$args->{foreign_alias}.cdid" => { -ident => "$args->{self_alias}.cd" },
+    },
+
+    ! $args->{self_result_object} ? () : {
+     "$args->{foreign_alias}.cdid" => $args->{self_result_object}->get_column('cd')
+    },
+
+    ! $args->{foreign_values} ? () : {
+     "$args->{self_alias}.cd" => $args->{foreign_values}{cdid}
+    },
+  );
+}
+);
 __PACKAGE__->belongs_to( disc => 'DBICTest::Schema::CD' => 'cd', {
     proxy => 'year'
 });
@@ -76,28 +99,37 @@ __PACKAGE__->belongs_to(
 __PACKAGE__->has_many (
   next_tracks => __PACKAGE__,
   sub {
-    my $args = shift;
-
     # This is for test purposes only. A regular user does not
     # need to sanity check the passed-in arguments, this is what
     # the tests are for :)
-    my @missing_args = grep { ! defined $args->{$_} }
-      qw/self_alias foreign_alias self_resultsource foreign_relname/;
-    confess "Required arguments not supplied to custom rel coderef: @missing_args\n"
-      if @missing_args;
+    my $args = &check_customcond_args;
 
     return (
       { "$args->{foreign_alias}.cd"       => { -ident => "$args->{self_alias}.cd" },
         "$args->{foreign_alias}.position" => { '>' => { -ident => "$args->{self_alias}.position" } },
       },
-      $args->{self_rowobj} && {
-        "$args->{foreign_alias}.cd"       => $args->{self_rowobj}->get_column('cd'),
-        "$args->{foreign_alias}.position" => { '>' => $args->{self_rowobj}->pos },
+      $args->{self_result_object} && {
+        "$args->{foreign_alias}.cd"       => $args->{self_result_object}->get_column('cd'),
+        "$args->{foreign_alias}.position" => { '>' => $args->{self_result_object}->pos },
       }
     )
   }
 );
 
+__PACKAGE__->has_many (
+  deliberately_broken_all_cd_tracks => __PACKAGE__,
+  sub {
+    # This is for test purposes only. A regular user does not
+    # need to sanity check the passed-in arguments, this is what
+    # the tests are for :)
+    my $args = &check_customcond_args;
+
+    return {
+      "$args->{foreign_alias}.cd" => "$args->{self_alias}.cd"
+    };
+  }
+);
+
 our $hook_cb;
 
 sub sqlt_deploy_hook {
@@ -18,7 +18,7 @@ __PACKAGE__->belongs_to(
     {'foreign.artistid'=>'self.artist'},
 );
 
-__PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD', undef, { is_deferrable => 0, add_fk_index => 0 } );
+__PACKAGE__->belongs_to( cd => 'DBICTest::Schema::CD', undef, { is_deferrable => 0, on_update => undef, on_delete => undef, add_fk_index => 0 } );
 
 __PACKAGE__->has_many(
   'fourkeys_to_twokeys', 'DBICTest::Schema::FourKeys_to_TwoKeys', {
@@ -7,13 +7,6 @@ no warnings 'qw';
 
 use base 'DBICTest::BaseSchema';
 
-use Fcntl qw/:DEFAULT :seek :flock/;
-use Time::HiRes 'sleep';
-use DBICTest::RunMode;
-use DBICTest::Util::LeakTracer qw/populate_weakregistry assert_empty_weakregistry/;
-use DBICTest::Util 'local_umask';
-use namespace::clean;
-
 __PACKAGE__->mk_group_accessors(simple => 'custom_attr');
 
 __PACKAGE__->load_classes(qw/
@@ -69,160 +62,4 @@ sub sqlt_deploy_hook {
   $sqlt_schema->drop_table('dummy');
 }
 
-
-our $locker;
-END {
-  # we need the $locker to be referenced here for delayed destruction
-  if ($locker->{lock_name} and ($ENV{DBICTEST_LOCK_HOLDER}||0) == $$) {
-    #warn "$$ $0 $locker->{type} LOCK RELEASED";
-  }
-}
-
-my $weak_registry = {};
-
-sub connection {
-  my $self = shift->next::method(@_);
-
-# MASSIVE FIXME
-# we can't really lock based on DSN, as we do not yet have a way to tell that e.g.
-# DBICTEST_MSSQL_DSN=dbi:Sybase:server=192.168.0.11:1433;database=dbtst
-#  and
-# DBICTEST_MSSQL_ODBC_DSN=dbi:ODBC:server=192.168.0.11;port=1433;database=dbtst;driver=FreeTDS;tds_version=8.0
-# are the same server
-# hence we lock everything based on sqlt_type or just globally if not available
-# just pretend we are python you know? :)
-
-
-  # when we get a proper DSN resolution sanitize to produce a portable lockfile name
-  # this may look weird and unnecessary, but consider running tests from
-  # windows over a samba share >.>
-  #utf8::encode($dsn);
-  #$dsn =~ s/([^A-Za-z0-9_\-\.\=])/ sprintf '~%02X', ord($1) /ge;
-  #$dsn =~ s/^dbi/dbi/i;
-
-  # provide locking for physical (non-memory) DSNs, so that tests can
-  # safely run in parallel. While the harness (make -jN test) does set
-  # an envvar, we can not detect when a user invokes prove -jN. Hence
-  # perform the locking at all times, it shouldn't hurt.
-  # the lock fh *should* inherit across forks/subprocesses
-  #
-  # File locking is hard. Really hard. By far the best lock implementation
-  # I've seen is part of the guts of File::Temp. However it is sadly not
-  # reusable. Since I am not aware of folks doing NFS parallel testing,
-  # nor are we known to work on VMS, I am just going to punt this and
-  # use the portable-ish flock() provided by perl itself. If this does
-  # not work for you - patches more than welcome.
-  if (
-    ! $DBICTest::global_exclusive_lock
-      and
-    ( ! $ENV{DBICTEST_LOCK_HOLDER} or $ENV{DBICTEST_LOCK_HOLDER} == $$ )
-      and
-    ref($_[0]) ne 'CODE'
-      and
-    ($_[0]||'') !~ /^ (?i:dbi) \: SQLite \: (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x
-  ) {
-
-    my $locktype = do {
-      # guard against infinite recursion
-      local $ENV{DBICTEST_LOCK_HOLDER} = -1;
-
-      # we need to connect a forced fresh clone so that we do not upset any state
-      # of the main $schema (some tests examine it quite closely)
-      local $@;
-      my $storage = eval {
-        my $st = ref($self)->connect(@{$self->storage->connect_info})->storage;
-        $st->ensure_connected;  # do connect here, to catch a possible throw
-        $st;
-      };
-      $storage
-        ? do {
-          my $t = $storage->sqlt_type || 'generic';
-          eval { $storage->disconnect };
-          $t;
-        }
-        : undef
-      ;
-    };
-
-    # Never hold more than one lock. This solves the "lock in order" issues
-    # unrelated tests may have
-    # Also if there is no connection - there is no lock to be had
-    if ($locktype and (!$locker or $locker->{type} ne $locktype)) {
-
-      # this will release whatever lock we may currently be holding
-      # which is fine since the type does not match as checked above
-      undef $locker;
-
-      my $lockpath = DBICTest::RunMode->tmpdir->file("_dbictest_$locktype.lock");
-
-      #warn "$$ $0 $locktype GRABBING LOCK";
-      my $lock_fh;
-      {
-        my $u = local_umask(0); # so that the file opens as 666, and any user can lock
-        sysopen ($lock_fh, $lockpath, O_RDWR|O_CREAT) or die "Unable to open $lockpath: $!";
-      }
-      flock ($lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!";
-      #warn "$$ $0 $locktype LOCK GRABBED";
-
-      # see if anyone was holding a lock before us, and wait up to 5 seconds for them to terminate
-      # if we do not do this we may end up trampling over some long-running END or somesuch
-      seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!";
-      my $old_pid;
-      if (
-        read ($lock_fh, $old_pid, 100)
-          and
-        ($old_pid) = $old_pid =~ /^(\d+)$/
-      ) {
-        for (1..50) {
-          kill (0, $old_pid) or last;
-          sleep 0.1;
-        }
-      }
-      #warn "$$ $0 $locktype POST GRAB WAIT";
-
-      truncate $lock_fh, 0;
-      seek ($lock_fh, 0, SEEK_SET) or die "seek failed $!";
-      $lock_fh->autoflush(1);
-      print $lock_fh $$;
-
-      $ENV{DBICTEST_LOCK_HOLDER} ||= $$;
-
-      $locker = {
-        type => $locktype,
-        fh => $lock_fh,
-        lock_name => "$lockpath",
-      };
-    }
-  }
-
-  if ($INC{'Test/Builder.pm'}) {
-    populate_weakregistry ( $weak_registry, $self->storage );
-
-    my $cur_connect_call = $self->storage->on_connect_call;
-
-    $self->storage->on_connect_call([
-      (ref $cur_connect_call eq 'ARRAY'
-        ? @$cur_connect_call
-        : ($cur_connect_call || ())
-      ),
-      [sub {
-        populate_weakregistry( $weak_registry, shift->_dbh )
-      }],
-    ]);
-  }
-
-  return $self;
-}
-
-sub clone {
-  my $self = shift->next::method(@_);
-  populate_weakregistry ( $weak_registry, $self )
-    if $INC{'Test/Builder.pm'};
-  $self;
-}
-
-END {
-  assert_empty_weakregistry($weak_registry, 'quiet');
-}
-
 1;
@@ -1,63 +0,0 @@
-package DBICTest::Stats;
-use strict;
-use warnings;
-
-use base qw/DBIx::Class::Storage::Statistics/;
-
-sub txn_begin {
-  my $self = shift;
-
-  $self->{'TXN_BEGIN'}++;
-  return $self->{'TXN_BEGIN'};
-}
-
-sub txn_rollback {
-  my $self = shift;
-
-  $self->{'TXN_ROLLBACK'}++;
-  return $self->{'TXN_ROLLBACK'};
-}
-
-sub txn_commit {
-  my $self = shift;
-
-  $self->{'TXN_COMMIT'}++;
-  return $self->{'TXN_COMMIT'};
-}
-
-sub svp_begin {
-  my ($self, $name) = @_;
-
-  $self->{'SVP_BEGIN'}++;
-  return $self->{'SVP_BEGIN'};
-}
-
-sub svp_release {
-  my ($self, $name) = @_;
-
-  $self->{'SVP_RELEASE'}++;
-  return $self->{'SVP_RELEASE'};
-}
-
-sub svp_rollback {
-  my ($self, $name) = @_;
-
-  $self->{'SVP_ROLLBACK'}++;
-  return $self->{'SVP_ROLLBACK'};
-}
-
-sub query_start {
-  my ($self, $string, @bind) = @_;
-
-  $self->{'QUERY_START'}++;
-  return $self->{'QUERY_START'};
-}
-
-sub query_end {
-  my ($self, $string) = @_;
-
-  $self->{'QUERY_END'}++;
-  return $self->{'QUERY_START'};
-}
-
-1;
@@ -5,7 +5,7 @@ use strict;
 
 use Carp;
 use Scalar::Util qw(isweak weaken blessed reftype);
-use DBIx::Class::_Util qw(refcount hrefaddr);
+use DBIx::Class::_Util qw(refcount hrefaddr refdesc);
 use DBIx::Class::Optional::Dependencies;
 use Data::Dumper::Concise;
 use DBICTest::Util 'stacktrace';
@@ -21,15 +21,6 @@ my $refs_traced = 0;
 my $leaks_found = 0;
 my %reg_of_regs;
 
-# so we don't trigger stringification
-sub _describe_ref {
-  sprintf '%s%s(%s)',
-    (defined blessed $_[0]) ? blessed($_[0]) . '=' : '',
-    reftype $_[0],
-    hrefaddr $_[0],
-  ;
-}
-
 sub populate_weakregistry {
   my ($weak_registry, $target, $note) = @_;
 
@@ -65,7 +56,7 @@ sub populate_weakregistry {
     $refs_traced++;
   }
 
-  my $desc = _describe_ref($target);
+  my $desc = refdesc $target;
   $weak_registry->{$refaddr}{slot_names}{$desc} = 1;
   if ($note) {
     $note =~ s/\s*\Q$desc\E\s*//g;
@@ -153,7 +144,7 @@ sub visit_refs {
         } scalar PadWalker::closed_over($r) ] }); # scalar due to RT#92269
       }
       1;
-    } or warn "Could not descend into @{[ _describe_ref($r) ]}: $@\n";
+    } or warn "Could not descend into @{[ refdesc $r ]}: $@\n";
   }
   $visited_cnt;
 }
@@ -173,7 +164,7 @@ sub visit_namespaces {
 
 
     $visited += visit_namespaces({ %$args, package => $_ }) for map
-      { $_ =~ /(.+?)::$/ && "${base}::$1" }
+      { $_ =~ /(.+?)::$/ ? "${base}::$1" : () }
       grep
         { $_ =~ /(?<!^main)::$/ }
         do {  no strict 'refs'; keys %{ $base . '::'} }
@@ -249,6 +240,8 @@ sub symtable_referenced_addresses {
 sub assert_empty_weakregistry {
   my ($weak_registry, $quiet) = @_;
 
+  Sub::Defer::undefer_all();
+
   # in case we hooked bless any extra object creation will wreak
   # havoc during the assert phase
   local *CORE::GLOBAL::bless;
@@ -275,19 +268,32 @@ sub assert_empty_weakregistry {
       if defined $weak_registry->{$addr}{weakref} and ! isweak( $weak_registry->{$addr}{weakref} );
   }
 
-  # the walk is very expensive - if we are $quiet (running in an END block)
-  # we do not really need to be too thorough
-  unless ($quiet) {
-    delete $weak_registry->{$_} for keys %{ symtable_referenced_addresses() };
-  }
-
+  # the symtable walk is very expensive
+  # if we are $quiet (running in an END block) we do not really need to be
+  # that thorough - can get by with only %Sub::Quote::QUOTED
+  delete $weak_registry->{$_} for $quiet
+    ? do {
+      my $refs = {};
+      visit_refs (
+        # only look at the closed over stuffs
+        refs => [ grep { length ref $_ } map { values %{$_->[2]} } grep { ref $_ eq 'ARRAY' } values %Sub::Quote::QUOTED ],
+        seen_refs => $refs,
+        action => sub { 1 },
+      );
+      keys %$refs;
+    }
+    : (
+      # full sumtable walk, starting from ::
+      keys %{ symtable_referenced_addresses() }
+    )
+  ;
 
   for my $addr (sort { $weak_registry->{$a}{display_name} cmp $weak_registry->{$b}{display_name} } keys %$weak_registry) {
 
     next if ! defined $weak_registry->{$addr}{weakref};
 
     $leaks_found++ unless $tb->in_todo;
-    $tb->ok (0, "Leaked $weak_registry->{$addr}{display_name}");
+    $tb->ok (0, "Expected garbage collection of $weak_registry->{$addr}{display_name}");
 
     my $diag = do {
       local $Data::Dumper::Maxdepth = 1;
@@ -336,9 +342,16 @@ sub assert_empty_weakregistry {
 }
 
 END {
-  if ($INC{'Test/Builder.pm'}) {
-    my $tb = Test::Builder->new;
-
+  if (
+    $INC{'Test/Builder.pm'}
+      and
+    my $tb = do {
+      local $@;
+      my $t = eval { Test::Builder->new }
+        or warn "Test::Builder->new failed:\n$@\n";
+      $t;
+    }
+  ) {
     # we check for test passage - a leak may be a part of a TODO
     if ($leaks_found and !$tb->is_passing) {
 
@@ -352,6 +365,21 @@ END {
     else {
       $tb->note("Auto checked $refs_traced references for leaks - none detected");
     }
+
+# Disable this until better times - SQLT and probably other things
+# still load strictures. Let's just wait until Moo2.0 and go from there
+=begin for tears
+    # also while we are here and not in plain runmode: make sure we never
+    # loaded any of the strictures XS bullshit (it's a leak in a sense)
+    unless (DBICTest::RunMode->is_plain) {
+      for (qw(indirect multidimensional bareword::filehandles)) {
+        exists $INC{ Module::Runtime::module_notional_filename($_) }
+          and
+        $tb->ok(0, "$_ load should not have been attempted!!!" )
+      }
+    }
+=cut
+
   }
 }
 
@@ -3,10 +3,45 @@ package DBICTest::Util;
 use warnings;
 use strict;
 
+# this noop trick initializes the STDOUT, so that the TAP::Harness
+# issued IO::Select->can_read calls (which are blocking wtf wtf wtf)
+# keep spinning and scheduling jobs
+# This results in an overall much smoother job-queue drainage, since
+# the Harness blocks less
+# (ideally this needs to be addressed in T::H, but a quick patchjob
+# broke everything so tabling it for now)
+BEGIN {
+  if ($INC{'Test/Builder.pm'}) {
+    local $| = 1;
+    print "#\n";
+  }
+}
+
+use Module::Runtime 'module_notional_filename';
+BEGIN {
+  for my $mod (qw( SQL::Abstract::Test SQL::Abstract )) {
+    if ( $INC{ module_notional_filename($mod) } ) {
+      # FIXME this does not seem to work in BEGIN - why?!
+      #require Carp;
+      #$Carp::Internal{ (__PACKAGE__) }++;
+      #Carp::croak( __PACKAGE__ . " must be loaded before $mod" );
+
+      my ($fr, @frame) = 1;
+      while (@frame = caller($fr++)) {
+        last if $frame[1] !~ m|^t/lib/DBICTest|;
+      }
+
+      die __PACKAGE__ . " must be loaded before $mod (or modules using $mod) at $frame[1] line $frame[2]\n";
+    }
+  }
+}
+
 use Config;
+use Carp 'confess';
+use Scalar::Util qw(blessed refaddr);
 
 use base 'Exporter';
-our @EXPORT_OK = qw/local_umask stacktrace/;
+our @EXPORT_OK = qw(local_umask stacktrace check_customcond_args);
 
 sub local_umask {
   return unless defined $Config{d_umask};
@@ -44,4 +79,50 @@ sub stacktrace {
   return join "\tinvoked as ", map { sprintf ("%s at %s line %d\n", @$_ ) } @stack;
 }
 
+sub check_customcond_args ($) {
+  my $args = shift;
+
+  confess "Expecting a hashref"
+    unless ref $args eq 'HASH';
+
+  for (qw(rel_name foreign_relname self_alias foreign_alias)) {
+    confess "Custom condition argument '$_' must be a plain string"
+      if length ref $args->{$_} or ! length $args->{$_};
+  }
+
+  confess "Current and legacy rel_name arguments do not match"
+    if $args->{rel_name} ne $args->{foreign_relname};
+
+  confess "Custom condition argument 'self_resultsource' must be a rsrc instance"
+    unless defined blessed $args->{self_resultsource} and $args->{self_resultsource}->isa('DBIx::Class::ResultSource');
+
+  confess "Passed resultsource has no record of the supplied rel_name - likely wrong \$rsrc"
+    unless ref $args->{self_resultsource}->relationship_info($args->{rel_name});
+
+  my $struct_cnt = 0;
+
+  if (defined $args->{self_result_object} or defined $args->{self_rowobj} ) {
+    $struct_cnt++;
+    for (qw(self_result_object self_rowobj)) {
+      confess "Custom condition argument '$_' must be a result instance"
+        unless defined blessed $args->{$_} and $args->{$_}->isa('DBIx::Class::Row');
+    }
+
+    confess "Current and legacy self_result_object arguments do not match"
+      if refaddr($args->{self_result_object}) != refaddr($args->{self_rowobj});
+  }
+
+  if (defined $args->{foreign_values}) {
+    $struct_cnt++;
+
+    confess "Custom condition argument 'foreign_values' must be a hash reference"
+      unless ref $args->{foreign_values} eq 'HASH';
+  }
+
+  confess "Data structures supplied on both ends of a relationship"
+    if $struct_cnt == 2;
+
+  $args;
+}
+
 1;
@@ -0,0 +1,4 @@
+# keep stricture tests happy
+use strict;
+use warnings;
+1;
@@ -4,43 +4,9 @@ package # hide from PAUSE
 use strict;
 use warnings;
 
-# this noop trick initializes the STDOUT, so that the TAP::Harness
-# issued IO::Select->can_read calls (which are blocking wtf wtf wtf)
-# keep spinning and scheduling jobs
-# This results in an overall much smoother job-queue drainage, since
-# the Harness blocks less
-# (ideally this needs to be addressed in T::H, but a quick patchjob
-# broke everything so tabling it for now)
-BEGIN {
-  if ($INC{'Test/Builder.pm'}) {
-    local $| = 1;
-    print "#\n";
-  }
-}
-
-use Module::Runtime 'module_notional_filename';
-BEGIN {
-  for my $mod (qw( DBIC::SqlMakerTest SQL::Abstract )) {
-    if ( $INC{ module_notional_filename($mod) } ) {
-      # FIXME this does not seem to work in BEGIN - why?!
-      #require Carp;
-      #$Carp::Internal{ (__PACKAGE__) }++;
-      #Carp::croak( __PACKAGE__ . " must be loaded before $mod" );
-
-      my ($fr, @frame) = 1;
-      while (@frame = caller($fr++)) {
-        last if $frame[1] !~ m|^t/lib/DBICTest|;
-      }
-
-      die __PACKAGE__ . " must be loaded before $mod (or modules using $mod) at $frame[1] line $frame[2]\n";
-    }
-  }
-}
-
-use DBICTest::RunMode;
+use DBICTest::Util 'local_umask';
 use DBICTest::Schema;
 use DBICTest::Util::LeakTracer qw/populate_weakregistry assert_empty_weakregistry/;
-use DBICTest::Util 'local_umask';
 use Carp;
 use Path::Class::File ();
 use File::Spec;
@@ -49,7 +15,7 @@ use Config;
 
 =head1 NAME
 
-DBICTest - Library to be used by DBIx::Class test scripts.
+DBICTest - Library to be used by DBIx::Class test scripts
 
 =head1 SYNOPSIS
 
@@ -64,6 +30,26 @@ DBICTest - Library to be used by DBIx::Class test scripts.
 This module provides the basic utilities to write tests against
 DBIx::Class.
 
+=head1 EXPORTS
+
+The module does not export anything by default, nor provides individual
+function exports in the conventional sense. Instead the following tags are
+recognized:
+
+=head2 :DiffSQL
+
+Same as C<use SQL::Abstract::Test
+qw(L<is_same_sql_bind|SQL::Abstract::Test/is_same_sql_bind>
+L<is_same_sql|SQL::Abstract::Test/is_same_sql>
+L<is_same_bind|SQL::Abstract::Test/is_same_bind>)>
+
+=head2 :GlobalLock
+
+Some tests are very time sensitive and need to run on their own, without
+being disturbed by anything else grabbing CPU or disk IO. Hence why everything
+using C<DBICTest> grabs a shared lock, and the few tests that request a
+C<:GlobalLock> will ask for an exclusive one and block until they can get it.
+
 =head1 METHODS
 
 =head2 init_schema
@@ -80,18 +66,15 @@ DBIx::Class.
 This method removes the test SQLite database in t/var/DBIxClass.db
 and then creates a new, empty database.
 
-This method will call deploy_schema() by default, unless the
-no_deploy flag is set.
+This method will call L<deploy_schema()|/deploy_schema> by default, unless the
+C<no_deploy> flag is set.
 
-Also, by default, this method will call populate_schema() by
-default, unless the no_deploy or no_populate flags are set.
+Also, by default, this method will call L<populate_schema()|/populate_schema>
+by default, unless the C<no_deploy> or C<no_populate> flags are set.
 
 =cut
 
-# some tests are very time sensitive and need to run on their own, without
-# being disturbed by anything else grabbing CPU or disk IO. Hence why everything
-# using DBICTest grabs a shared lock, and the few tests that request a :GlobalLock
-# will ask for an exclusive one and block until they can get it
+# see L</:GlobalLock>
 our ($global_lock_fh, $global_exclusive_lock);
 sub import {
     my $self = shift;
@@ -104,13 +87,21 @@ sub import {
         or die "Unable to open $lockpath: $!";
     }
 
-    for (@_) {
-        if ($_ eq ':GlobalLock') {
+    for my $exp (@_) {
+        if ($exp eq ':GlobalLock') {
             flock ($global_lock_fh, LOCK_EX) or die "Unable to lock $lockpath: $!";
             $global_exclusive_lock = 1;
         }
+        elsif ($exp eq ':DiffSQL') {
+            require SQL::Abstract::Test;
+            my $into = caller(0);
+            for (qw(is_same_sql_bind is_same_sql is_same_bind)) {
+              no strict 'refs';
+              *{"${into}::$_"} = \&{"SQL::Abstract::Test::$_"};
+            }
+        }
         else {
-            croak "Unknown export $_ requested from $self";
+            croak "Unknown export $exp requested from $self";
         }
     }
 
@@ -235,10 +226,16 @@ sub _database {
 }
 
 sub __mk_disconnect_guard {
-  return if DBIx::Class::_ENV_::PEEPEENESS; # leaks handles, delaying DESTROY, can't work right
 
   my $db_file = shift;
-  return unless -f $db_file;
+
+  return if (
+    # this perl leaks handles, delaying DESTROY, can't work right
+    DBIx::Class::_ENV_::PEEPEENESS
+      or
+    ! -f $db_file
+  );
+
 
   my $orig_inode = (stat($db_file))[1]
     or return;
@@ -1,11 +0,0 @@
-package
-    PrefetchBug;
-
-use strict;
-use warnings;
-
-use base qw/DBIx::Class::Schema/;
-
-__PACKAGE__->load_classes();
-
-1;
@@ -0,0 +1,71 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema( no_populate => 1 );
+
+my $t11 = $schema->resultset('Track')->find_or_create({
+  trackid => 1,
+  title => 'Track one cd one',
+  cd => {
+    year => 1,
+    title => 'CD one',
+    very_long_artist_relationship => {
+      name => 'Artist one',
+    }
+  }
+});
+
+my $t12 = $schema->resultset('Track')->find_or_create({
+  trackid => 2,
+  title => 'Track two cd one',
+  cd => {
+    title => 'CD one',
+    very_long_artist_relationship => {
+      name => 'Artist one',
+    }
+  }
+});
+
+# FIXME - MC should be smart enough to infer this on its own...
+$schema->resultset('Artist')->create({ name => 'Artist two' });
+
+my $t2 = $schema->resultset('Track')->find_or_create({
+  trackid => 3,
+  title => 'Track one cd one',
+  cd => {
+    year => 1,
+    title => 'CD one',
+    very_long_artist_relationship => {
+      name => 'Artist two',
+    }
+  }
+});
+
+is_deeply(
+  $schema->resultset('Artist')->search({}, {
+    prefetch => { cds => 'tracks' },
+    order_by => 'tracks.title',
+  })->all_hri,
+  [
+    { artistid => 1, charfield => undef, name => "Artist one", rank => 13, cds => [
+      { artist => 1, cdid => 1, genreid => undef, single_track => undef, title => "CD one", year => 1, tracks => [
+        { cd => 1, last_updated_at => undef, last_updated_on => undef, position => 1, title => "Track one cd one", trackid => 1 },
+        { cd => 1, last_updated_at => undef, last_updated_on => undef, position => 2, title => "Track two cd one", trackid => 2 },
+      ]},
+    ]},
+    { artistid => 2, charfield => undef, name => "Artist two", rank => 13, cds => [
+      { artist => 2, cdid => 2, genreid => undef, single_track => undef, title => "CD one", year => 1, tracks => [
+        { cd => 2, last_updated_at => undef, last_updated_on => undef, position => 1, title => "Track one cd one", trackid => 3 },
+      ]},
+    ]},
+  ],
+  'Expected state of database after several find_or_create rounds'
+);
+
+
+done_testing;
+
@@ -3,11 +3,10 @@ use warnings;
 
 use Test::More;
 use Test::Exception;
+use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
 
-plan tests => 91;
-
 my $schema = DBICTest->init_schema();
 
 lives_ok ( sub {
@@ -403,8 +402,11 @@ lives_ok ( sub {
 
   $kurt_cobain->{cds} = [ $in_utero ];
 
+  warnings_exist {
+    $schema->resultset('Artist')->populate([ $kurt_cobain ]);
+  }  qr/\QFast-path populate() with supplied related objects is not possible/;
+
 
-  $schema->resultset('Artist')->populate([ $kurt_cobain ]); # %)
   my $artist = $schema->resultset('Artist')->find({name => 'Kurt Cobain'});
 
   is($artist->name, 'Kurt Cobain', 'Artist insertion ok');
@@ -468,4 +470,4 @@ lives_ok ( sub {
   is ($m2m_cd->first->producers->first->name, 'Cowboy Neal', 'Correct producer row created');
 }, 'Test multi create over many_to_many');
 
-1;
+done_testing;
@@ -9,19 +9,22 @@ use DBICTest;
 my $schema = DBICTest->init_schema();
 
 my $cd = $schema->resultset('CD')->next;
+$cd->tracks->delete;
 
-lives_ok {
-  $cd->tracks->delete;
+$schema->resultset('CD')->related_resultset('tracks')->delete;
 
-  my @tracks = map
-    { $cd->create_related('tracks', { title => "t_$_", position => $_ }) }
-    (4,2,5,1,3)
-  ;
+is $cd->tracks->count, 0, 'No tracks';
 
-  for (@tracks) {
-    $_->discard_changes;
-    $_->delete;
-  }
-} 'Creation/deletion of out-of order tracks successful';
+$cd->create_related('tracks', { title => "t_$_", position => $_ })
+  for (4,2,3,1,5);
+
+is $cd->tracks->count, 5, 'Created 5 tracks';
+
+# a txn should force the implicit pos reload, regardless of order
+$schema->txn_do(sub {
+  $cd->tracks->delete_all
+});
+
+is $cd->tracks->count, 0, 'Successfully deleted everything';
 
 done_testing;
@@ -4,18 +4,16 @@ use warnings;
 use Test::More;
 use Test::Deep;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
-my $orig_debug = $schema->storage->debug;
 
 my $cdrs = $schema->resultset('CD')->search({ 'me.artist' => { '!=', 2 }});
 
 my $cd_data = { map {
   $_->cdid => {
     siblings => $cdrs->search ({ artist => $_->get_column('artist') })->count - 1,
-    track_titles => [ map { $_->title } ($_->tracks->all) ],
+    track_titles => [ sort $_->tracks->get_column('title')->all ],
   },
 } ( $cdrs->all ) };
 
@@ -36,10 +34,10 @@ is_same_sql_bind(
     SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
            (SELECT COUNT( * )
               FROM cd siblings
-            WHERE siblings.artist = me.artist
+            WHERE me.artist != ?
+              AND siblings.artist = me.artist
               AND siblings.cdid != me.cdid
               AND siblings.cdid != ?
-              AND me.artist != ?
            ),
            tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at
       FROM cd me
@@ -50,12 +48,12 @@ is_same_sql_bind(
   [
 
     # subselect
-    [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' }
-      => 23414 ],
-
     [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' }
       => 2 ],
 
+    [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' }
+      => 23414 ],
+
     # outher WHERE
     [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' }
       => 2 ],
@@ -63,26 +61,19 @@ is_same_sql_bind(
   'Expected SQL on correlated realiased subquery'
 );
 
-my $queries = 0;
-$schema->storage->debugcb(sub { $queries++; });
-$schema->storage->debug(1);
-
-cmp_deeply (
-  { map
-    { $_->cdid => {
-      track_titles => [ map { $_->title } ($_->tracks->all) ],
-      siblings => $_->get_column ('sibling_count'),
-    } }
-    $c_rs->all
-  },
-  $cd_data,
-  'Proper information retrieved from correlated subquery'
-);
-
-is ($queries, 1, 'Only 1 query fired to retrieve everything');
-
-$schema->storage->debug($orig_debug);
-$schema->storage->debugcb(undef);
+$schema->is_executed_querycount( sub {
+  cmp_deeply (
+    { map
+      { $_->cdid => {
+        track_titles => [ sort map { $_->title } ($_->tracks->all) ],
+        siblings => $_->get_column ('sibling_count'),
+      } }
+      $c_rs->all
+    },
+    $cd_data,
+    'Proper information retrieved from correlated subquery'
+  );
+}, 1, 'Only 1 query fired to retrieve everything');
 
 # now add an unbalanced select/as pair
 $c_rs = $c_rs->search ({}, {
@@ -102,15 +93,15 @@ is_same_sql_bind(
     SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
            (SELECT COUNT( * )
               FROM cd siblings
-            WHERE siblings.artist = me.artist
+            WHERE me.artist != ?
+              AND siblings.artist = me.artist
               AND siblings.cdid != me.cdid
               AND siblings.cdid != ?
-              AND me.artist != ?
            ),
            (SELECT MIN( year ), MAX( year )
               FROM cd siblings
-            WHERE siblings.artist = me.artist
-              AND me.artist != ?
+            WHERE me.artist != ?
+              AND siblings.artist = me.artist
            ),
            tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at
       FROM cd me
@@ -121,12 +112,12 @@ is_same_sql_bind(
   [
 
     # first subselect
-    [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' }
-      => 23414 ],
-
     [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' }
       => 2 ],
 
+    [ { sqlt_datatype => 'integer', dbic_colname => 'siblings.cdid' }
+      => 23414 ],
+
     # second subselect
     [ { sqlt_datatype => 'integer', dbic_colname => 'me.artist' }
       => 2 ],
@@ -3,10 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
-
-plan tests => 23;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -15,7 +12,6 @@ my $cd_rs = $schema->resultset('CD')->search (
   { prefetch => ['tracks', 'artist'] },
 );
 
-
 is($cd_rs->count, 5, 'CDs with tracks count');
 is($cd_rs->search_related('tracks')->count, 15, 'Tracks associated with CDs count (before SELECT()ing)');
 
@@ -77,26 +73,23 @@ is_same_sql_bind (
       => 4 ] ],
 );
 
-
 {
   local $TODO = "Chaining with prefetch is fundamentally broken";
+  $schema->is_executed_querycount( sub {
 
-  my $queries;
-  $schema->storage->debugcb ( sub { $queries++ } );
-  $schema->storage->debug (1);
-
-  my $cds = $cd2->search_related ('artist', {}, { prefetch => { cds => 'tracks' }, join => 'twokeys' })
+    my $cds = $cd2->search_related ('artist', {}, { prefetch => { cds => 'tracks' }, join => 'twokeys' })
                   ->search_related ('cds');
 
-  my $tracks = $cds->search_related ('tracks');
-
-  is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds");
-  is(scalar($tracks->all), 2, "2 Tracks prefetched on cd via artist via one of the cds");
-  is($tracks->count, 2, "Cached 2 Tracks counted on cd via artist via one of the cds");
+    my $tracks = $cds->search_related ('tracks');
 
-  is($cds->count, 2, "2 CDs counted on artist via one of the cds");
-  is(scalar($cds->all), 2, "2 CDs prefetched on artist via one of the cds");
-  is($cds->count, 2, "Cached 2 CDs counted on artist via one of the cds");
+    is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds");
+    is(scalar($tracks->all), 2, "2 Tracks prefetched on cd via artist via one of the cds");
+    is($tracks->count, 2, "Cached 2 Tracks counted on cd via artist via one of the cds");
 
-  is ($queries, 3, '2 counts + 1 prefetch?');
+    is($cds->count, 2, "2 CDs counted on artist via one of the cds");
+    is(scalar($cds->all), 2, "2 CDs prefetched on artist via one of the cds");
+    is($cds->count, 2, "Cached 2 CDs counted on artist via one of the cds");
+  }, 3, '2 counts + 1 prefetch?' );
 }
+
+done_testing;
@@ -3,8 +3,7 @@ use strict;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -0,0 +1,39 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+my $queries;
+my $debugcb = sub { $queries++; };
+my $orig_debug = $schema->storage->debug;
+
+{
+  $queries = 0;
+  $schema->storage->debugcb($debugcb);
+  $schema->storage->debug(1);
+
+  my $cds_rs = $schema->resultset('CD')
+    ->search(\'0 = 1', { prefetch => 'tracks', cache => 1 });
+
+  my @cds = $cds_rs->all;
+  is( $queries, 1, '->all on empty original resultset hit db' );
+  is_deeply( $cds_rs->get_cache, [], 'empty cache on original resultset' );
+  is( 0+@cds, 0, 'empty original resultset' );
+
+  my $tracks_rs = $cds_rs->related_resultset('tracks');
+  is_deeply( $tracks_rs->get_cache, [], 'empty cache on related resultset' );
+
+  my @tracks = $tracks_rs->all;
+  is( $queries, 1, "->all on empty related resultset didn't hit db" );
+  is( 0+@tracks, 0, 'empty related resultset' );
+
+  $schema->storage->debugcb(undef);
+  $schema->storage->debug($orig_debug);
+}
+
+done_testing;
@@ -15,29 +15,20 @@ $schema->resultset('CD')->create({
   },
 });
 
-my $orig_debug = $schema->storage->debug;
-
-my $queries = 0;
-$schema->storage->debugcb(sub { $queries++; });
-$schema->storage->debug(1);
-
-my $cd = $schema->resultset('CD')->search( {}, { prefetch => 'artist' })->next;
-
-cmp_deeply
-  { $cd->get_columns },
-  { artist => 0, cdid => 0, genreid => 0, single_track => 0, title => '', year => 0 },
-  'Expected CD columns present',
-;
-
-cmp_deeply
-  { $cd->artist->get_columns },
-  { artistid => 0, charfield => 0, name => "", rank => 0 },
-  'Expected Artist columns present',
-;
-
-is $queries, 1, 'Only one query fired - prefetch worked';
-
-$schema->storage->debugcb(undef);
-$schema->storage->debug($orig_debug);
+$schema->is_executed_querycount( sub {
+  my $cd = $schema->resultset('CD')->search( {}, { prefetch => 'artist' })->next;
+
+  cmp_deeply
+    { $cd->get_columns },
+    { artist => 0, cdid => 0, genreid => 0, single_track => 0, title => '', year => 0 },
+    'Expected CD columns present',
+  ;
+
+  cmp_deeply
+    { $cd->artist->get_columns },
+    { artistid => 0, charfield => 0, name => "", rank => 0 },
+    'Expected Artist columns present',
+  ;
+}, 1, 'Only one query fired - prefetch worked' );
 
 done_testing;
@@ -4,15 +4,13 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype;
 my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype;
 
 my $schema = DBICTest->init_schema();
-my $sdebug = $schema->storage->debug;
 
 my $cd_rs = $schema->resultset('CD')->search (
   { 'tracks.cd' => { '!=', undef } },
@@ -25,10 +23,12 @@ for ($cd_rs->all) {
   is ($_->tracks->count, 3, '3 tracks for CD' . $_->id );
 }
 
+my @cdids = sort $cd_rs->get_column ('cdid')->all;
+
 # Test a belongs_to prefetch of a has_many
 {
   my $track_rs = $schema->resultset ('Track')->search (
-    { 'me.cd' => { -in => [ $cd_rs->get_column ('cdid')->all ] } },
+    { 'me.cd' => { -in => \@cdids } },
     {
       select => [
         'me.cd',
@@ -49,21 +49,13 @@ for ($cd_rs->all) {
   is($track_rs->count, 5, 'Prefetched count with groupby');
   is($track_rs->all, 5, 'Prefetched objects with groupby');
 
-  {
-    my $query_cnt = 0;
-    $schema->storage->debugcb ( sub { $query_cnt++ } );
-    $schema->storage->debug (1);
-
+  $schema->is_executed_querycount( sub {
     while (my $collapsed_track = $track_rs->next) {
       my $cdid = $collapsed_track->get_column('cd');
       is($collapsed_track->get_column('track_count'), 3, "Correct count of tracks for CD $cdid" );
       ok($collapsed_track->cd->title, "Prefetched title for CD $cdid" );
     }
-
-    is ($query_cnt, 1, 'Single query on prefetched titles');
-    $schema->storage->debugcb (undef);
-    $schema->storage->debug ($sdebug);
-  }
+  }, 1, 'Single query on prefetched titles');
 
   # Test sql by hand, as the sqlite db will simply paper over
   # improper group/select combinations
@@ -82,7 +74,7 @@ for ($cd_rs->all) {
       me
     )',
     [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' }
-      => $_ ] } ($cd_rs->get_column ('cdid')->all) ],
+      => $_ ] } @cdids ],
     'count() query generated expected SQL',
   );
 
@@ -101,7 +93,7 @@ for ($cd_rs->all) {
       WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
     )',
     [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' }
-      => $_ ] } ( ($cd_rs->get_column ('cdid')->all) x 2 ) ],
+      => $_ ] } (@cdids) x 2 ],
     'next() query generated expected SQL',
   );
 
@@ -190,22 +182,16 @@ for ($cd_rs->all) {
   my ($top_cd) = $most_tracks_rs->all;
   is ($top_cd->id, 2, 'Correct cd fetched on top'); # 2 because of the slice(1,1) earlier
 
-  my $query_cnt = 0;
-  $schema->storage->debugcb ( sub { $query_cnt++ } );
-  $schema->storage->debug (1);
-
-  is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly');
-  is ($top_cd->tracks->count, 4, 'Count of prefetched tracks rs still correct');
-  is ($top_cd->tracks->all, 4, 'Number of prefetched track objects still correct');
-  is (
-    $top_cd->liner_notes->notes,
-    'Buy Whiskey!',
-    'Correct liner pre-fetched with top cd',
-  );
-
-  is ($query_cnt, 0, 'No queries executed during prefetched data access');
-  $schema->storage->debugcb (undef);
-  $schema->storage->debug ($sdebug);
+  $schema->is_executed_querycount( sub {
+    is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly');
+    is ($top_cd->tracks->count, 4, 'Count of prefetched tracks rs still correct');
+    is ($top_cd->tracks->all, 4, 'Number of prefetched track objects still correct');
+    is (
+      $top_cd->liner_notes->notes,
+      'Buy Whiskey!',
+      'Correct liner pre-fetched with top cd',
+    );
+  }, 0, 'No queries executed during prefetched data access');
 }
 
 {
@@ -256,20 +242,14 @@ for ($cd_rs->all) {
   my ($top_cd) = $most_tracks_rs->all;
   is ($top_cd->id, 2, 'Correct cd fetched on top'); # 2 because of the slice(1,1) earlier
 
-  my $query_cnt = 0;
-  $schema->storage->debugcb ( sub { $query_cnt++ } );
-  $schema->storage->debug (1);
-
-  is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly');
-  is (
-    $top_cd->liner_notes->notes,
-    'Buy Whiskey!',
-    'Correct liner pre-fetched with top cd',
-  );
-
-  is ($query_cnt, 0, 'No queries executed during prefetched data access');
-  $schema->storage->debugcb (undef);
-  $schema->storage->debug ($sdebug);
+  $schema->is_executed_querycount( sub {
+    is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly');
+    is (
+      $top_cd->liner_notes->notes,
+      'Buy Whiskey!',
+      'Correct liner pre-fetched with top cd',
+    );
+  }, 0, 'No queries executed during prefetched data access');
 }
 
 
@@ -305,7 +285,7 @@ for ($cd_rs->all) {
 # RT 47779, test group_by as a scalar ref
 {
   my $track_rs = $schema->resultset ('Track')->search (
-    { 'me.cd' => { -in => [ $cd_rs->get_column ('cdid')->all ] } },
+    { 'me.cd' => { -in => \@cdids } },
     {
       select => [
         'me.cd',
@@ -334,7 +314,7 @@ for ($cd_rs->all) {
       me
     )',
     [ map { [ { sqlt_datatype => 'integer', dbic_colname => 'me.cd' }
-      => $_ ] } ($cd_rs->get_column ('cdid')->all) ],
+      => $_ ] } (@cdids) ],
     'count() query generated expected SQL',
   );
 }
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Deep;
 use Test::Exception;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -3,8 +3,7 @@ use strict;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -257,24 +257,20 @@ if ($ENV{TEST_VERBOSE}) {
     for @lines;
 }
 
-{
-  my $queries = 0;
-  $schema->storage->debugcb(sub { $queries++ });
-  my $orig_debug = $schema->storage->debug;
-  $schema->storage->debug (1);
-
+$schema->is_executed_querycount( sub {
   for my $use_next (0, 1) {
     my @random_cds;
+    my $rs_r = $rs_random;
     if ($use_next) {
       warnings_exist {
-        while (my $o = $rs_random->next) {
+        while (my $o = $rs_r->next) {
           push @random_cds, $o;
         }
       } qr/performed an eager cursor slurp underneath/,
       'Warned on auto-eager cursor';
     }
     else {
-      @random_cds = $rs_random->all;
+      @random_cds = $rs_r->all;
     }
 
     is (@random_cds, 6, 'object count matches');
@@ -306,11 +302,7 @@ if ($ENV{TEST_VERBOSE}) {
       }
     }
   }
-
-  $schema->storage->debugcb(undef);
-  $schema->storage->debug($orig_debug);
-  is ($queries, 2, "Only two queries for two prefetch calls total");
-}
+}, 2, "Only two queries for two prefetch calls total");
 
 # can't cmp_deeply a random set - need *some* order
 my $ord_rs = $rs->search({}, {
@@ -2,11 +2,11 @@ use strict;
 use warnings;
 
 use Test::More;
+use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
 
 my $schema = DBICTest->init_schema();
-my $sdebug = $schema->storage->debug;
 
 #( 1 -> M + M )
 my $cd_rs = $schema->resultset('CD')->search( { 'me.title' => 'Forkful of bees' } );
@@ -15,33 +15,24 @@ my $pr_cd_rs = $cd_rs->search( {}, { prefetch => [qw/tracks tags/], } );
 my $tracks_rs    = $cd_rs->first->tracks;
 my $tracks_count = $tracks_rs->count;
 
-my ( $pr_tracks_rs, $pr_tracks_count );
+$schema->is_executed_querycount( sub {
+  my $pcr = $pr_cd_rs;
+  my $pr_tracks_rs;
 
-my $queries = 0;
-$schema->storage->debugcb( sub { $queries++ } );
-$schema->storage->debug(1);
+  warnings_exist {
+    $pr_tracks_rs = $pcr->first->tracks;
+  } [], 'no warning on attempt to prefetch several same level has_many\'s (1 -> M + M)' ;
 
-my $o_mm_warn;
-{
-    local $SIG{__WARN__} = sub { $o_mm_warn = shift };
-    $pr_tracks_rs = $pr_cd_rs->first->tracks;
-};
-$pr_tracks_count = $pr_tracks_rs->count;
+  is( $pr_tracks_rs->count, $tracks_count,
+    'equal count of prefetched relations over several same level has_many\'s (1 -> M + M)'
+  );
 
-ok( !$o_mm_warn,
-'no warning on attempt to prefetch several same level has_many\'s (1 -> M + M)'
-);
+  is( $pr_tracks_rs->all, $tracks_count,
+    'equal amount of objects returned with and without prefetch over several same level has_many\'s (1 -> M + M)'
+  );
 
-is( $queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' );
-$schema->storage->debugcb(undef);
-$schema->storage->debug($sdebug);
+}, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' );
 
-is( $pr_tracks_count, $tracks_count,
-'equal count of prefetched relations over several same level has_many\'s (1 -> M + M)'
-);
-is( $pr_tracks_rs->all, $tracks_rs->all,
-'equal amount of objects returned with and without prefetch over several same level has_many\'s (1 -> M + M)'
-);
 
 #( M -> 1 -> M + M )
 my $note_rs =
@@ -52,32 +43,22 @@ my $pr_note_rs =
 my $tags_rs    = $note_rs->first->cd->tags;
 my $tags_count = $tags_rs->count;
 
-my ( $pr_tags_rs, $pr_tags_count );
-
-$queries = 0;
-$schema->storage->debugcb( sub { $queries++ } );
-$schema->storage->debug(1);
-
-my $m_o_mm_warn;
-{
-    local $SIG{__WARN__} = sub { $m_o_mm_warn = shift };
-    $pr_tags_rs = $pr_note_rs->first->cd->tags;
-};
-$pr_tags_count = $pr_tags_rs->count;
-
-ok( !$m_o_mm_warn,
-'no warning on attempt to prefetch several same level has_many\'s (M -> 1 -> M + M)'
-);
-
-is( $queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' );
-$schema->storage->debugcb(undef);
-$schema->storage->debug($sdebug);
-
-is( $pr_tags_count, $tags_count,
-'equal count of prefetched relations over several same level has_many\'s (M -> 1 -> M + M)'
-);
-is( $pr_tags_rs->all, $tags_rs->all,
-'equal amount of objects with and without prefetch over several same level has_many\'s (M -> 1 -> M + M)'
-);
+$schema->is_executed_querycount( sub {
+  my $pnr = $pr_note_rs;
+  my $pr_tags_rs;
+
+  warnings_exist {
+    $pr_tags_rs = $pnr->first->cd->tags;
+  } [], 'no warning on attempt to prefetch several same level has_many\'s (M -> 1 -> M + M)';
+
+  is( $pr_tags_rs->count, $tags_count,
+    'equal count of prefetched relations over several same level has_many\'s (M -> 1 -> M + M)'
+  );
+  is( $pr_tags_rs->all, $tags_count,
+    'equal amount of objects with and without prefetch over several same level has_many\'s (M -> 1 -> M + M)'
+  );
+
+}, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query' );
+
 
 done_testing;
@@ -6,28 +6,10 @@ use Test::Deep;
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
+use DBIx::Class::_Util 'sigwarn_silencer';
 
 my $schema = DBICTest->init_schema();
 
-my $mo_rs = $schema->resultset('Artist')->search(
-  { 'me.artistid' => 4 },
-  {
-    prefetch   => [
-      {
-        cds => [
-          { tracks     => { cd_single => 'tracks' } },
-          { cd_to_producer => 'producer' }
-        ]
-      },
-      { artwork_to_artist => 'artwork' }
-    ],
-
-    result_class => 'DBIx::Class::ResultClass::HashRefInflator',
-
-    order_by => [qw/tracks.position tracks.trackid producer.producerid tracks_2.trackid artwork.cd_id/],
-  }
-);
-
 $schema->resultset('Artist')->create(
   {
     name => 'mo',
@@ -78,11 +60,7 @@ $schema->resultset('Artist')->create(
   }
 );
 
-my $mo = $mo_rs->next;
-
-is( @{$mo->{cds}}, 2, 'two CDs' );
-
-cmp_deeply( $mo, {
+my $artist_with_extras = {
   artistid => 4, charfield => undef, name => 'mo', rank => 1337,
   artwork_to_artist => [
     { artist_id => 4, artwork_cd_id => 1, artwork => { cd_id => 1 } },
@@ -125,6 +103,53 @@ cmp_deeply( $mo, {
       ],
     }
   ],
+};
+
+my $art_rs = $schema->resultset('Artist')->search({ 'me.artistid' => 4 });
+
+
+my $art_rs_prefetch = $art_rs->search({}, {
+  order_by => [qw/tracks.position tracks.trackid producer.producerid tracks_2.trackid artwork.cd_id/],
+  result_class => 'DBIx::Class::ResultClass::HashRefInflator',
+  prefetch => [
+    {
+      cds => [
+        { tracks => { cd_single => 'tracks' } },
+        { cd_to_producer => 'producer' }
+      ]
+    },
+    { artwork_to_artist => 'artwork' }
+  ],
 });
 
+cmp_deeply( $art_rs_prefetch->next, $artist_with_extras );
+
+
+for my $order (
+  [ [qw( cds.cdid tracks.position )] ],
+
+  [ [qw( artistid tracks.cd tracks.position )],
+    'we need to proxy the knowledge from the collapser that tracks.cd is a stable sorter for CDs' ],
+) {
+
+  my $cds_rs_prefetch = $art_rs->related_resultset('cds')->search({}, {
+    order_by => [ $order->[0], qw(producer.name tracks_2.position) ],
+    result_class => 'DBIx::Class::ResultClass::HashRefInflator',
+    prefetch => [
+      { tracks => { cd_single => 'tracks' } },
+      { cd_to_producer => 'producer' },
+    ],
+  });
+
+  local $SIG{__WARN__} = sigwarn_silencer(qr/Unable to properly collapse has_many results/) if $order->[1];
+
+  cmp_deeply( $cds_rs_prefetch->next, $artist_with_extras->{cds}[0], '1st cd structure matches' );
+  cmp_deeply( $cds_rs_prefetch->next, $artist_with_extras->{cds}[1], '2nd cd structure matches' );
+
+  # INTERNALS! (a.k.a boars, gore and whores) DO NOT CARGOCULT!!!
+  local $TODO = $order->[1] if $order->[1];
+  ok( $cds_rs_prefetch->_resolved_attrs->{_ordered_for_collapse}, 'ordered_for_collapse detected properly' );
+}
+
+
 done_testing;
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($ROWS, $OFFSET) = (
@@ -12,20 +12,14 @@ my $artist = $schema->resultset ('Artist')->find ({artistid => 1});
 is ($artist->cds->count, 3, 'Correct number of CDs');
 is ($artist->cds->search_related ('genre')->count, 1, 'Only one of the cds has a genre');
 
-my $queries = 0;
-my $orig_cb = $schema->storage->debugcb;
-$schema->storage->debugcb(sub { $queries++ });
-$schema->storage->debug(1);
-
-my $pref = $schema->resultset ('Artist')
+$schema->is_executed_querycount( sub {
+  my $pref = $schema->resultset ('Artist')
                      ->search ({ 'me.artistid' => $artist->id }, { prefetch => { cds => 'genre' } })
                       ->next;
 
-is ($pref->cds->count, 3, 'Correct number of CDs prefetched');
-is ($pref->cds->search_related ('genre')->count, 1, 'Only one of the prefetched cds has a prefetched genre');
+  is ($pref->cds->count, 3, 'Correct number of CDs prefetched');
+  is ($pref->cds->search_related ('genre')->count, 1, 'Only one of the prefetched cds has a prefetched genre');
 
-is ($queries, 1, 'All happened within one query only');
-$schema->storage->debugcb($orig_cb);
-$schema->storage->debug(0);
+}, 1, 'All happened within one query only');
 
 done_testing;
@@ -25,11 +25,7 @@ is (
 );
 
 # this still should emit no queries:
-{
-  my $queries = 0;
-  my $orig_debug = $schema->storage->debug;
-  $schema->storage->debugcb(sub { $queries++; });
-  $schema->storage->debug(1);
+$schema->is_executed_querycount( sub {
 
   my $cds = $art->cds;
   is (
@@ -47,10 +43,6 @@ is (
     );
   }
 
-  $schema->storage->debug($orig_debug);
-  $schema->storage->debugcb(undef);
-
-  is ($queries, 0, 'No queries on prefetched operations');
-}
+}, 0, 'No queries on prefetched operations');
 
 done_testing;
@@ -7,33 +7,26 @@ use lib qw(t/lib);
 use DBICTest;
 
 my $schema = DBICTest->init_schema();
-my $orig_debug = $schema->storage->debug;
 
-my $queries = 0;
-$schema->storage->debugcb(sub { $queries++; });
-$schema->storage->debug(1);
-
-my $search = { 'artist.name' => 'Caterwauler McCrae' };
-my $attr = { prefetch => [ qw/artist liner_notes/ ],
+my $rs;
+$schema->is_executed_querycount( sub {
+  my $search = { 'artist.name' => 'Caterwauler McCrae' };
+  my $attr = { prefetch => [ qw/artist liner_notes/ ],
              order_by => 'me.cdid' };
 
-my $rs = $schema->resultset("CD")->search($search, $attr);
-my @cd = $rs->all;
-
-is($cd[0]->title, 'Spoonful of bees', 'First record returned ok');
+  $rs = $schema->resultset("CD")->search($search, $attr);
+  my @cd = $rs->all;
 
-ok(!defined $cd[0]->liner_notes, 'No prefetch for NULL LEFT join');
+  is($cd[0]->title, 'Spoonful of bees', 'First record returned ok');
 
-is($cd[1]->{_relationship_data}{liner_notes}->notes, 'Buy Whiskey!', 'Prefetch for present LEFT JOIN');
+  ok(!defined $cd[0]->liner_notes, 'No prefetch for NULL LEFT join');
 
-is(ref $cd[1]->liner_notes, 'DBICTest::LinerNotes', 'Prefetch returns correct class');
+  is($cd[1]->{_relationship_data}{liner_notes}->notes, 'Buy Whiskey!', 'Prefetch for present LEFT JOIN');
 
-is($cd[2]->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'Prefetch on parent object ok');
+  is(ref $cd[1]->liner_notes, 'DBICTest::LinerNotes', 'Prefetch returns correct class');
 
-is($queries, 1, 'prefetch ran only 1 select statement');
-
-$schema->storage->debug($orig_debug);
-$schema->storage->debugobj->callback(undef);
+  is($cd[2]->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'Prefetch on parent object ok');
+}, 1, 'prefetch ran only 1 select statement');
 
 # test for partial prefetch via columns attr
 my $cd = $schema->resultset('CD')->find(1,
@@ -42,66 +35,50 @@ my $cd = $schema->resultset('CD')->find(1,
       join => { 'artist' => {} }
     }
 );
-ok(eval { $cd->artist->name eq 'Caterwauler McCrae' }, 'single related column prefetched');
+is( $cd->artist->name, 'Caterwauler McCrae', 'single related column prefetched');
 
 # start test for nested prefetch SELECT count
-$queries = 0;
-$schema->storage->debugcb(sub { $queries++ });
-$schema->storage->debug(1);
-
-$rs = $schema->resultset('Tag')->search(
-  { 'me.tagid' => 1 },
-  {
-    prefetch => { cd => 'artist' }
-  }
-);
-
-my $tag = $rs->first;
+my $tag;
+$schema->is_executed_querycount( sub {
+  $rs = $schema->resultset('Tag')->search(
+    { 'me.tagid' => 1 },
+    {
+      prefetch => { cd => 'artist' }
+    }
+  );
 
-is( $tag->cd->title, 'Spoonful of bees', 'step 1 ok for nested prefetch' );
+  $tag = $rs->first;
 
-is( $tag->cd->artist->name, 'Caterwauler McCrae', 'step 2 ok for nested prefetch');
+  is( $tag->cd->title, 'Spoonful of bees', 'step 1 ok for nested prefetch' );
 
-# count the SELECTs
-#$selects++ if /SELECT(?!.*WHERE 1=0.*)/;
-is($queries, 1, 'nested prefetch ran exactly 1 select statement (excluding column_info)');
+  is( $tag->cd->artist->name, 'Caterwauler McCrae', 'step 2 ok for nested prefetch');
+}, 1, 'nested prefetch ran exactly 1 select statement');
 
-$queries = 0;
 
-is($tag->search_related('cd')->search_related('artist')->first->name,
+$schema->is_executed_querycount( sub {
+  is($tag->search_related('cd')->search_related('artist')->first->name,
    'Caterwauler McCrae',
    'chained belongs_to->belongs_to search_related ok');
+}, 0, 'chained search_related after belongs_to->belongs_to prefetch ran no queries');
 
-is($queries, 0, 'chained search_related after belontgs_to->belongs_to prefetch ran no queries');
-
-$queries = 0;
-
-$cd = $schema->resultset('CD')->find(1, { prefetch => 'artist' });
 
-is($cd->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'artist prefetched correctly on find');
+$schema->is_executed_querycount( sub {
+  $cd = $schema->resultset('CD')->find(1, { prefetch => 'artist' });
 
-is($queries, 1, 'find with prefetch ran exactly 1 select statement (excluding column_info)');
+  is($cd->{_inflated_column}{artist}->name, 'Caterwauler McCrae', 'artist prefetched correctly on find');
+}, 1, 'find with prefetch ran exactly 1 select statement (excluding column_info)');
 
-$queries = 0;
+$schema->is_executed_querycount( sub {
+  $cd = $schema->resultset('CD')->find(1, { prefetch => { cd_to_producer => 'producer' }, order_by => 'producer.producerid' });
 
-$schema->storage->debugcb(sub { $queries++; });
+  is($cd->producers->first->name, 'Matt S Trout', 'many_to_many accessor ok');
+}, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query');
 
-$cd = $schema->resultset('CD')->find(1, { prefetch => { cd_to_producer => 'producer' }, order_by => 'producer.producerid' });
+$schema->is_executed_querycount( sub {
+  my $producers = $cd->search_related('cd_to_producer')->search_related('producer');
 
-is($cd->producers->first->name, 'Matt S Trout', 'many_to_many accessor ok');
-
-is($queries, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query');
-
-$queries = 0;
-
-my $producers = $cd->search_related('cd_to_producer')->search_related('producer');
-
-is($producers->first->name, 'Matt S Trout', 'chained many_to_many search_related ok');
-
-is($queries, 0, 'chained search_related after many_to_many prefetch ran no queries');
-
-$schema->storage->debug($orig_debug);
-$schema->storage->debugobj->callback(undef);
+  is($producers->first->name, 'Matt S Trout', 'chained many_to_many search_related ok');
+}, 0, 'chained search_related after many_to_many prefetch ran no queries');
 
 $rs = $schema->resultset('Tag')->search(
   {},
@@ -180,27 +157,22 @@ my $left_join = $schema->resultset('CD')->search(
 
 cmp_ok($left_join, '==', 1, 'prefetch with no join record present');
 
-$queries = 0;
-$schema->storage->debugcb(sub { $queries++ });
-$schema->storage->debug(1);
-
-my $tree_like =
-     $schema->resultset('TreeLike')->find(5,
-       { join     => { parent => { parent => 'parent' } },
+my $tree_like;
+$schema->is_executed_querycount( sub {
+  $tree_like =
+    $schema->resultset('TreeLike')->find(5,
+      { join     => { parent => { parent => 'parent' } },
          prefetch => { parent => { parent => 'parent' } } });
 
-is($tree_like->name, 'quux', 'Bottom of tree ok');
-$tree_like = $tree_like->parent;
-is($tree_like->name, 'baz', 'First level up ok');
-$tree_like = $tree_like->parent;
-is($tree_like->name, 'bar', 'Second level up ok');
-$tree_like = $tree_like->parent;
-is($tree_like->name, 'foo', 'Third level up ok');
+  is($tree_like->name, 'quux', 'Bottom of tree ok');
+  $tree_like = $tree_like->parent;
+  is($tree_like->name, 'baz', 'First level up ok');
+  $tree_like = $tree_like->parent;
+  is($tree_like->name, 'bar', 'Second level up ok');
+  $tree_like = $tree_like->parent;
+  is($tree_like->name, 'foo', 'Third level up ok');
 
-$schema->storage->debug($orig_debug);
-$schema->storage->debugobj->callback(undef);
-
-cmp_ok($queries, '==', 1, 'Only one query run');
+}, 1, 'Only one query run');
 
 $tree_like = $schema->resultset('TreeLike')->search({'me.id' => 2});
 $tree_like = $tree_like->search_related('children')->search_related('children')->search_related('children')->first;
@@ -210,15 +182,15 @@ $tree_like = $schema->resultset('TreeLike')->search_related('children',
     { 'children.id' => 3, 'children_2.id' => 4 },
     { prefetch => { children => 'children' } }
   )->first;
-is(eval { $tree_like->children->first->children->first->name }, 'quux',
+is( $tree_like->children->first->children->first->name, 'quux',
    'Tree search_related with prefetch ok');
 
-$tree_like = eval { $schema->resultset('TreeLike')->search(
+$tree_like = $schema->resultset('TreeLike')->search(
     { 'children.id' => 3, 'children_2.id' => 6 },
     { join => [qw/children children children/] }
   )->search_related('children', { 'children_4.id' => 7 }, { prefetch => 'children' }
-  )->first->children->first; };
-is(eval { $tree_like->name }, 'fong', 'Tree with multiple has_many joins ok');
+  )->first->children->first;
+is( $tree_like->name, 'fong', 'Tree with multiple has_many joins ok');
 
 $rs = $schema->resultset('Artist');
 $rs->create({ artistid => 4, name => 'Unknown singer-songwriter' });
@@ -274,32 +246,24 @@ sub make_hash_struc {
     return $struc;
 }
 
-$queries = 0;
-$schema->storage->debugcb(sub { $queries++ });
-$schema->storage->debug(1);
-
-my $prefetch_result = make_hash_struc($art_rs_pr);
 
-is($queries, 1, 'nested prefetch across has_many->has_many ran exactly 1 query');
-
-my $nonpre_result   = make_hash_struc($art_rs);
+my $prefetch_result;
+$schema->is_executed_querycount( sub {
+  $prefetch_result = make_hash_struc($art_rs_pr);
+}, 1, 'nested prefetch across has_many->has_many ran exactly 1 query');
 
+my $nonpre_result = make_hash_struc($art_rs);
 is_deeply( $prefetch_result, $nonpre_result,
     'Compare 2 level prefetch result to non-prefetch result' );
 
-$queries = 0;
-
-is_deeply(
-  [ sort map { $_->title } $art_rs_pr->search_related('cds')->search_related('tracks')->all ],
-  [ 'Apiary', 'Beehind You', 'Boring Name', 'Boring Song', 'Fowlin', 'Howlin',
-    'No More Ideas', 'Sad', 'Sticky Honey', 'Stripy', 'Stung with Success',
-    'Suicidal', 'The Bees Knees', 'Under The Weather', 'Yowlin' ],
-  'chained has_many->has_many search_related ok'
-);
-
-is($queries, 0, 'chained search_related after has_many->has_many prefetch ran no queries');
-
-$schema->storage->debug($orig_debug);
-$schema->storage->debugobj->callback(undef);
+$schema->is_executed_querycount( sub {
+  is_deeply(
+    [ sort map { $_->title } $art_rs_pr->search_related('cds')->search_related('tracks')->all ],
+    [ 'Apiary', 'Beehind You', 'Boring Name', 'Boring Song', 'Fowlin', 'Howlin',
+      'No More Ideas', 'Sad', 'Sticky Honey', 'Stripy', 'Stung with Success',
+      'Suicidal', 'The Bees Knees', 'Under The Weather', 'Yowlin' ],
+    'chained has_many->has_many search_related ok'
+  );
+}, 0, 'chained search_related after has_many->has_many prefetch ran no queries');
 
 done_testing;
@@ -9,10 +9,6 @@ use DBICTest;
 
 my $schema = DBICTest->init_schema();
 
-my $queries;
-my $debugcb = sub { $queries++; };
-my $orig_debug = $schema->storage->debug;
-
 lives_ok ( sub {
   my $no_prefetch = $schema->resultset('Track')->search_related(cd =>
     {
@@ -73,16 +69,12 @@ lives_ok ( sub {
 {
   my $cd = $schema->resultset('CD')->search({}, { prefetch => 'cd_to_producer' })->find(1);
 
-  $queries = 0;
-  $schema->storage->debugcb ($debugcb);
-  $schema->storage->debug (1);
-
-  is( $cd->cd_to_producer->count, 3 ,'Count of prefetched m2m links via accessor' );
-  is( scalar $cd->cd_to_producer->all, 3, 'Amount of prefetched m2m link objects via accessor' );
-  is( $cd->search_related('cd_to_producer')->count, 3, 'Count of prefetched m2m links via search_related' );
-  is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Amount of prefetched m2m links via search_related' );
-
-  is($queries, 0, 'No queries ran so far');
+  $schema->is_executed_querycount( sub {
+      is( $cd->cd_to_producer->count, 3 ,'Count of prefetched m2m links via accessor' );
+    is( scalar $cd->cd_to_producer->all, 3, 'Amount of prefetched m2m link objects via accessor' );
+    is( $cd->search_related('cd_to_producer')->count, 3, 'Count of prefetched m2m links via search_related' );
+    is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Amount of prefetched m2m links via search_related' );
+  }, 0, 'No queries ran so far');
 
   is( scalar $cd->cd_to_producer->search_related('producer')->all, 3,
       'Amount of objects via search_related off prefetched linker' );
@@ -97,16 +89,12 @@ lives_ok ( sub {
   is( $cd->producers->count, 3,
       'Count via m2m accessor' );
 
-  $queries = 0;
-
-  is( $cd->cd_to_producer->count, 3 ,'Review count of prefetched m2m links via accessor' );
-  is( scalar $cd->cd_to_producer->all, 3, 'Review amount of prefetched m2m link objects via accessor' );
-  is( $cd->search_related('cd_to_producer')->count, 3, 'Review count of prefetched m2m links via search_related' );
-  is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Rreview amount of prefetched m2m links via search_related' );
-
-  is($queries, 0, 'Still no queries on prefetched linker');
-  $schema->storage->debugcb (undef);
-  $schema->storage->debug ($orig_debug);
+  $schema->is_executed_querycount( sub {
+    is( $cd->cd_to_producer->count, 3 ,'Review count of prefetched m2m links via accessor' );
+    is( scalar $cd->cd_to_producer->all, 3, 'Review amount of prefetched m2m link objects via accessor' );
+    is( $cd->search_related('cd_to_producer')->count, 3, 'Review count of prefetched m2m links via search_related' );
+    is( scalar $cd->search_related('cd_to_producer')->all, 3, 'Rreview amount of prefetched m2m links via search_related' );
+  }, 0, 'Still no queries on prefetched linker');
 }
 
 # tests with distinct => 1
@@ -169,21 +157,18 @@ lives_ok (sub {
     is($rs->all, 1, 'distinct with prefetch (objects)');
     is($rs->count, 1, 'distinct with prefetch (count)');
 
-    $queries = 0;
-    $schema->storage->debugcb ($debugcb);
-    $schema->storage->debug (1);
+    local $TODO = "This makes another 2 trips to the database, it can't be right";
+    $schema->is_executed_querycount( sub {
 
-    # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2
-    is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)');
-    is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)');
+      # the is() calls are not todoified
+      local $TODO;
 
-    {
-      local $TODO = "This makes another 2 trips to the database, it can't be right";
-      is ($queries, 0, 'No extra queries fired (prefetch survives search_related)');
-    }
+      # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2
+      is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)');
+      is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)');
+
+    }, 0, 'No extra queries fired (prefetch survives search_related)');
 
-    $schema->storage->debugcb (undef);
-    $schema->storage->debug ($orig_debug);
 }, 'distinct generally works with prefetch on deep search_related chains');
 
 # pathological "user knows what they're doing" case
@@ -200,7 +185,7 @@ lives_ok (sub {
   });
 
   is_deeply(
-    $rs->all_hri,
+    $rs->search({}, { order_by => 'me.title' })->all_hri,
     [
       { title => "Caterwaulin' Blues", max_trk => 3 },
       { title => "Come Be Depressed With Us", max_trk => 3 },
@@ -6,8 +6,7 @@ use warnings;
 use Test::More;
 use Test::Exception;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype;
@@ -152,8 +151,8 @@ throws_ok (
       {'tracks.title' => { '!=' => 'foo' }},
       { order_by => \ 'some oddball literal sql', join => { cds => 'tracks' } }
     )->next
-  }, qr/A required group_by clause could not be constructed automatically/,
-) || exit;
+  }, qr/Unable to programatically derive a required group_by from the supplied order_by criteria/,
+);
 
 my $artist = $use_prefetch->search({'cds.title' => $artist_many_cds->cds->first->title })->next;
 is($artist->cds->count, 1, "count on search limiting prefetched has_many");
@@ -4,11 +4,9 @@ use warnings;
 use Test::More;
 use Test::Exception;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
-my $sdebug = $schema->storage->debug;
 
 # has_a test
 my $cd = $schema->resultset("CD")->find(4);
@@ -33,17 +31,14 @@ $artist->create_related( 'cds', {
 my $big_flop_cd = ($artist->search_related('cds'))[3];
 is( $big_flop_cd->title, 'Big Flop', 'create_related ok' );
 
-{ # make sure we are not making pointless select queries when a FK IS NULL
-  my $queries = 0;
-  $schema->storage->debugcb(sub { $queries++; });
-  $schema->storage->debug(1);
+# make sure we are not making pointless select queries when a FK IS NULL
+$schema->is_executed_querycount( sub {
   $big_flop_cd->genre; #should not trigger a select query
-  is($queries, 0, 'No SELECT made for belongs_to if key IS NULL');
+},  0, 'No SELECT made for belongs_to if key IS NULL');
+
+$schema->is_executed_querycount( sub {
   $big_flop_cd->genre_inefficient; #should trigger a select query
-  is($queries, 1, 'SELECT made for belongs_to if key IS NULL when undef_on_null_fk disabled');
-  $schema->storage->debug($sdebug);
-  $schema->storage->debugcb(undef);
-}
+}, 1, 'SELECT made for belongs_to if key IS NULL when undef_on_null_fk disabled');
 
 my( $rs_from_list ) = $artist->search_related_rs('cds');
 isa_ok( $rs_from_list, 'DBIx::Class::ResultSet', 'search_related_rs in list context returns rs' );
@@ -232,7 +227,7 @@ is( $twokey->fourkeys_to_twokeys->count, 0,
 
 
 my $undef_artist_cd = $schema->resultset("CD")->new_result({ 'title' => 'badgers', 'year' => 2007 });
-is($undef_artist_cd->has_column_loaded('artist'), '', 'FK not loaded');
+ok(! $undef_artist_cd->has_column_loaded('artist'), 'FK not loaded');
 is($undef_artist_cd->search_related('artist')->count, 0, '0=1 search when FK does not exist and object not yet in db');
 lives_ok {
      $undef_artist_cd->related_resultset('artist')->new({name => 'foo'});
@@ -3,9 +3,9 @@ use warnings;
 
 use Test::More;
 use Test::Exception;
+use Test::Warn;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -44,7 +44,7 @@ is_same_sql_bind(
   )',
   [
     [
-      { sqlt_datatype => 'integer', dbic_colname => 'me.artist' }
+      {}
         => 21
     ],
     [
@@ -152,15 +152,20 @@ is_deeply(
 
 } 'prefetchy-fetchy-fetch';
 
+# create_related a plain cd via the equoivalent coderef cond, with no extra conditions
+lives_ok {
+  $artist->create_related('cds_cref_cond', { title => 'related creation via coderef cond', year => '2010' } );
+} 'created_related with simple condition works';
 
 # try to create_related a 80s cd
 throws_ok {
   $artist->create_related('cds_80s', { title => 'related creation 1' });
-} qr/\QCustom relationship 'cds_80s' not definitive - returns conditions instead of values for column(s): 'year'/,
+} qr/\QUnable to complete value inferrence - custom relationship 'cds_80s' on source 'Artist' returns conditions instead of values for column(s): 'year'/,
 'Create failed - complex cond';
 
 # now supply an explicit arg overwriting the ambiguous cond
-my $id_2020 = $artist->create_related('cds_80s', { title => 'related creation 2', year => '2020' })->id;
+my $cd_2020 = $artist->create_related('cds_80s', { title => 'related creation 2', year => '2020' });
+my $id_2020 = $cd_2020->id;
 is(
   $schema->resultset('CD')->find($id_2020)->title,
   'related creation 2',
@@ -178,7 +183,7 @@ is(
 # try a specific everything via a non-simplified rel
 throws_ok {
   $artist->create_related('cds_90s', { title => 'related_creation 4', year => '2038' });
-} qr/\QCustom relationship 'cds_90s' does not resolve to a join-free condition fragment/,
+} qr/\QRelationship 'cds_90s' on source 'Artist' does not resolve to a join-free condition fragment/,
 'Create failed - non-simplified rel';
 
 # Do a self-join last-entry search
@@ -269,4 +274,71 @@ is_deeply (
   'Prefetched singles in proper order'
 );
 
+# test set_from_related/find_related with a belongs_to custom condition
+my $preexisting_cd = $schema->resultset('CD')->find(1);
+
+my $cd_single_track = $schema->resultset('CD')->create({
+  artist => $artist,
+  title => 'one one one',
+  year => 2001,
+  tracks => [{ title => 'uno uno uno' }]
+});
+
+my $single_track = $cd_single_track->tracks->next;
+
+is(
+  $single_track->cd_cref_cond->title,
+  $cd_single_track->title,
+  'Got back the expected single-track cd title',
+);
+
+is_deeply
+  { $schema->resultset('Track')->find({ cd_cref_cond => { cdid => $cd_single_track->id } })->get_columns },
+  { $single_track->get_columns },
+  'Proper find with related via coderef cond',
+;
+
+warnings_exist {
+  is_same_sql_bind(
+    $single_track->deliberately_broken_all_cd_tracks->as_query,
+    '(
+      SELECT me.trackid, me.cd, me.position, me.title, me.last_updated_on, me.last_updated_at
+        FROM track track__row
+        JOIN track me
+          ON me.cd = ?
+      WHERE track__row.trackid = ?
+    )',
+    [
+      [{ dbic_colname => "me.cd", sqlt_datatype => "integer" }
+        => "track__row.cd" ],
+      [{ dbic_colname => "track__row.trackid", sqlt_datatype => "integer" }
+        => 19 ],
+    ],
+    'Expected nonsensical JOIN cond',
+  ),
+} qr/\Qrelationship 'deliberately_broken_all_cd_tracks' on source 'Track' specifies equality of column 'cd' and the *VALUE* 'cd' (you did not use the { -ident => ... } operator)/,
+  'Warning on 99.9999% malformed custom cond'
+;
+
+$single_track->set_from_related( cd_cref_cond => undef );
+ok $single_track->is_column_changed('cd');
+is $single_track->get_column('cd'), undef, 'UNset from related via coderef cond';
+is $single_track->cd, undef, 'UNset related object via coderef cond';
+
+$single_track->discard_changes;
+
+$single_track->set_from_related( cd_cref_cond => $preexisting_cd );
+ok $single_track->is_column_changed('cd');
+is $single_track->get_column('cd'), 1, 'set from related via coderef cond';
+is_deeply
+  { $single_track->cd->get_columns },
+  { $preexisting_cd->get_columns },
+  'set from related via coderef cond inflates properly',
+;
+
+throws_ok {
+  local $schema->source('Track')->relationship_info('cd_cref_cond')->{cond} = sub { 1,2,3 };
+  $schema->resultset('Track')->find({ cd_cref_cond => {} });
+} qr/\QA custom condition coderef can return at most 2 conditions, but relationship 'cd_cref_cond' on source 'Track' returned extra values: 3/;
+
 done_testing;
@@ -0,0 +1,51 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib 't/lib';
+use DBICTest;
+
+my $schema = DBICTest->init_schema( no_populate => 1, quote_names => 1 );
+
+$schema->resultset('CD')->create({
+  title => 'Equinoxe',
+  year => 1978,
+  artist => { name => 'JMJ' },
+  genre => { name => 'electro' },
+  tracks => [
+    { title => 'e1' },
+    { title => 'e2' },
+    { title => 'e3' },
+  ],
+  single_track => {
+    title => 'o1',
+    cd => {
+      title => 'Oxygene',
+      year => 1976,
+      artist => { name => 'JMJ' },
+    },
+  },
+});
+
+my $cd = $schema->resultset('CD')->search({ single_track => { '!=', undef } })->first;
+
+$schema->is_executed_sql_bind(
+  sub { is( eval{$cd->single_track_opaque->title}, 'o1', 'Found correct single track' ) },
+  [
+    [
+      'SELECT "me"."trackid", "me"."cd", "me"."position", "me"."title", "me"."last_updated_on", "me"."last_updated_at"
+          FROM cd "cd__row"
+          JOIN "track" "me"
+            ON me.trackid = cd__row.single_track
+        WHERE "cd__row"."cdid" = ?
+      ',
+      [
+        { dbic_colname => "cd__row.cdid", sqlt_datatype => "integer" }
+          => 2
+      ]
+    ],
+  ],
+);
+
+done_testing;
@@ -0,0 +1,28 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest::Schema::Artist;
+
+my $pkg = 'DBICTest::Schema::Artist';
+
+for my $call (qw(has_many might_have has_one belongs_to)) {
+  {
+    local $TODO = 'stupid stupid heuristic - needs to die'
+      if $call eq 'belongs_to';
+
+    throws_ok {
+      $pkg->$call( foos => 'nonexistent bars', { foo => 'self.artistid' } );
+    } qr/Malformed relationship condition key 'foo': must be prefixed with 'foreign.'/,
+    "Correct exception on $call with malformed foreign.";
+  }
+
+  throws_ok {
+    $pkg->has_many( foos => 'nonexistent bars', { 'foreign.foo' => 'name' } );
+  } qr/\QMalformed relationship condition value 'name': must be prefixed with 'self.'/,
+  "Correct exception on $call with malformed self.";
+}
+
+done_testing;
@@ -6,10 +6,8 @@ use Test::Exception;
 use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
-my $sdebug = $schema->storage->debug;
 
 my $artist = $schema->resultset ('Artist')->find(1);
 
@@ -79,28 +77,30 @@ throws_ok {
 # expect a create, after a failed search using *only* the
 # *current* relationship and the unique column constraints
 # (so no year)
-my @sql;
-$schema->storage->debugcb(sub { push @sql, $_[1] });
-$schema->storage->debug (1);
-
-$genre->update_or_create_related ('cds', {
-  title => 'the best thing since vertical toasters',
-  artist => $artist,
-  year => 2012,
-});
-
-$schema->storage->debugcb(undef);
-$schema->storage->debug ($sdebug);
-
-my ($search_sql) = $sql[0] =~ /^(SELECT .+?)\:/;
-is_same_sql (
-  $search_sql,
-  'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
-    FROM cd me
-    WHERE ( me.artist = ? AND me.title = ? AND me.genreid = ? )
-  ',
-  'expected select issued',
-);
+$schema->is_executed_sql_bind( sub {
+  $genre->update_or_create_related ('cds', {
+    title => 'the best thing since vertical toasters',
+    artist => $artist,
+    year => 2012,
+  });
+}, [
+  [
+    'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+        FROM cd me
+      WHERE ( me.artist = ? AND me.genreid = ? AND me.title = ? )
+    ',
+    1,
+    2,
+    "the best thing since vertical toasters",
+  ],
+  [
+    'INSERT INTO cd ( artist, genreid, title, year) VALUES ( ?, ?, ?, ? )',
+    1,
+    2,
+    "the best thing since vertical toasters",
+    2012,
+  ],
+], 'expected select issued' );
 
 # a has_many search without a unique constraint makes no sense
 # but I am not sure what to test for - leaving open
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 my $art_rs = $schema->resultset('Artist');
@@ -43,8 +42,8 @@ my $rank_resolved_bind = [
 {
   is_same_sql_bind(
     $art_rs->as_query,
-    "(SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE ( ( ( rank = ? ) AND ( name = ? ) ) ) )",
-    [ $rank_resolved_bind, $name_resolved_bind ],
+    "(SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE name = ? AND rank = ? )",
+    [ $name_resolved_bind, $rank_resolved_bind ],
   );
 }
 
@@ -53,8 +52,8 @@ my $rscol = $art_rs->get_column( 'charfield' );
 {
   is_same_sql_bind(
     $rscol->as_query,
-    "(SELECT me.charfield FROM artist me WHERE ( ( ( rank = ? ) AND ( name = ? ) ) ) )",
-    [ $rank_resolved_bind, $name_resolved_bind ],
+    "(SELECT me.charfield FROM artist me WHERE name = ? AND rank = ? )",
+    [ $name_resolved_bind, $rank_resolved_bind ],
   );
 }
 
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -40,8 +39,6 @@ is_same_sql_bind (
   'Resultset-class attributes do not seep outside of the subselect',
 );
 
-$schema->storage->debug(1);
-
 is_same_sql_bind(
   $schema->resultset('CD')->search ({}, {
     rows => 2,
@@ -70,5 +67,4 @@ is_same_sql_bind(
   [ [{ sqlt_datatype => 'integer' } => 2 ] ],
 );
 
-
 done_testing;
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema;
 
@@ -16,8 +15,6 @@ my $where_bind = {
 my $rs;
 
 {
-    local $TODO = 'bind args order needs fixing (semifor)';
-
     # First, the simple cases...
     $rs = $schema->resultset('Artist')->search(
             { artistid => 1 },
@@ -37,7 +34,6 @@ my $rs;
     is ( $rs->count, 1, 'where/bind last' );
 
     # and the complex case
-    local $TODO = 'bind args order needs fixing (semifor)';
     $rs = $schema->resultset('CustomSql')->search({}, { bind => [ 1999 ] })
         ->search({ 'artistid' => 1 }, {
             where => \'title like ?',
@@ -0,0 +1,34 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+my $rs = $schema->resultset('Artist');
+
+for my $id (
+  2,
+  \' = 2 ',
+  \[ '= ?', 2 ],
+) {
+  lives_ok {
+    is( $rs->find({ artistid => $id })->id, 2 )
+  } "Correctly found artist with id of @{[ explain $id ]}";
+}
+
+for my $id (
+  2,
+  \'2',
+  \[ '?', 2 ],
+) {
+  my $cond = { artistid => { '=', $id } };
+  lives_ok {
+    is( $rs->find($cond)->id, 2 )
+  } "Correctly found artist with id of @{[ explain $cond ]}";
+}
+
+done_testing;
@@ -502,4 +502,31 @@ sub cmp_structures {
   cmp_deeply($left, $right, $msg||()) or next INFTYPE;
 }
 
+{
+  package DBICTest::_DoubleResult;
+
+  sub inflate_result {
+    my $class = shift;
+    return map { DBIx::Class::ResultClass::HashRefInflator->inflate_result(@_) } (1,2);
+  }
+}
+
+my $oxygene_rs = $schema->resultset('CD')->search({ 'me.title' => 'Oxygene' });
+
+is_deeply(
+  [ $oxygene_rs->search({}, { result_class => 'DBICTest::_DoubleResult' })->all ],
+  [ ({ $oxygene_rs->single->get_columns }) x 2 ],
+);
+
+is_deeply(
+  [ $oxygene_rs->search({}, {
+    result_class => 'DBICTest::_DoubleResult', prefetch => [qw(artist tracks)],
+    order_by => [qw(me.cdid tracks.title)],
+  })->all ],
+  [ (@{$oxygene_rs->search({}, {
+    prefetch=> [qw(artist tracks)],
+    order_by => [qw(me.cdid tracks.title)],
+  })->all_hri}) x 2 ],
+);
+
 done_testing;
@@ -5,6 +5,7 @@ use Test::More;
 use lib qw(t/lib);
 use DBICTest;
 use B::Deparse;
+use DBIx::Class::_Util 'perlstring';
 
 # globally set for the rest of test
 # the rowparser maker does not order its hashes by default for the miniscule
@@ -758,7 +759,7 @@ sub is_same_src { SKIP: {
   skip "Not testing equality of source containing defined-or operator on this perl $]", 1
     if ($] < 5.010 and$expect =~ m!\Q//=!);
 
-  $expect =~ s/__NBC__/B::perlstring($DBIx::Class::ResultSource::RowParser::Util::null_branch_class)/ge;
+  $expect =~ s/__NBC__/perlstring($DBIx::Class::ResultSource::RowParser::Util::null_branch_class)/ge;
 
   $expect = "  { use strict; use warnings FATAL => 'all';\n$expect\n  }";
 
@@ -5,6 +5,11 @@ use lib qw(t/lib);
 use Test::More;
 use Test::Exception;
 
+# MASSIVE FIXME - there is a hole in ::RSC / as_subselect_rs
+# losing the order. Needs a rework/extract of the realiaser,
+# and that's a whole another bag of dicks
+BEGIN { $ENV{DBIC_SHUFFLE_UNORDERED_RESULTSETS} = 0 }
+
 use DBICTest::Schema::CD;
 BEGIN {
   # the default scalarref table name will not work well for this test
@@ -12,16 +17,9 @@ BEGIN {
 }
 
 use DBICTest;
-use DBIC::DebugObj;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema;
 
-my ($sql, @bind);
-my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
-my $orig_debugobj = $schema->storage->debugobj;
-my $orig_debug = $schema->storage->debug;
-
 my $tkfks = $schema->resultset('FourKeys_to_TwoKeys');
 
 my ($fa, $fb, $fc) = $tkfks->related_resultset ('fourkeys')->populate ([
@@ -38,7 +36,7 @@ my ($fa, $fb, $fc) = $tkfks->related_resultset ('fourkeys')->populate ([
 #  [qw/2       2  /],
 #]);
 my ($ta, $tb) = $schema->resultset ('TwoKeys')
-                  ->search ( [ { artist => 1, cd => 1 }, { artist => 2, cd => 2 } ])
+                  ->search ( [ { artist => 1, cd => 1 }, { artist => 2, cd => 2 } ], { order_by => 'artist' })
                     ->all;
 
 my $tkfk_cnt = $tkfks->count;
@@ -64,64 +62,89 @@ my $fks = $schema->resultset ('FourKeys')->search (
 );
 
 is ($fks->count, 4, 'Joined FourKey count correct (2x2)');
-
-$schema->storage->debugobj ($debugobj);
-$schema->storage->debug (1);
-$fks->update ({ read_count => \ 'read_count + 1' });
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
+$schema->is_executed_sql_bind( sub {
+  $fks->update ({ read_count => \ 'read_count + 1' })
+}, [[
   'UPDATE fourkeys
    SET read_count = read_count + 1
    WHERE ( ( ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? ) )
   ',
-  [ ("'1'", "'2'") x 4, "'c'" ],
-  'Correct update-SQL with multijoin with pruning',
-);
+  (1, 2) x 4,
+  'c',
+]], 'Correct update-SQL with multijoin with pruning' );
 
 is ($fa->discard_changes->read_count, 11, 'Update ran only once on discard-join resultset');
 is ($fb->discard_changes->read_count, 21, 'Update ran only once on discard-join resultset');
 is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier');
 
 # make the multi-join stick
-my $fks_multi = $fks->search({ 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } });
-
-$schema->storage->debugobj ($debugobj);
-$schema->storage->debug (1);
-$fks_multi->update ({ read_count => \ 'read_count + 1' });
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
-  'UPDATE fourkeys
-   SET read_count = read_count + 1
-   WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )',
-  [ map { "'$_'" } ( (1) x 4, (2) x 4 ) ],
-  'Correct update-SQL with multijoin without pruning',
+my $fks_multi = $fks->search(
+  { 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } },
+  { order_by => [ $fks->result_source->primary_columns ] },
 );
+$schema->is_executed_sql_bind( sub {
+  $fks_multi->update ({ read_count => \ 'read_count + 1' })
+}, [
+  [ 'BEGIN' ],
+  [
+    'SELECT me.foo, me.bar, me.hello, me.goodbye
+      FROM fourkeys me
+      LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys
+        ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello
+      WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ?
+      GROUP BY me.foo, me.bar, me.hello, me.goodbye
+      ORDER BY foo, bar, hello, goodbye
+    ',
+    (1, 2) x 2,
+    666,
+    (1, 2) x 2,
+    'c',
+  ],
+  [
+    'UPDATE fourkeys
+     SET read_count = read_count + 1
+     WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )
+    ',
+    ( (1) x 4, (2) x 4 ),
+  ],
+  [ 'COMMIT' ],
+], 'Correct update-SQL with multijoin without pruning' );
 
 is ($fa->discard_changes->read_count, 12, 'Update ran only once on joined resultset');
 is ($fb->discard_changes->read_count, 22, 'Update ran only once on joined resultset');
 is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier');
 
+$schema->is_executed_sql_bind( sub {
+  my $res = $fks_multi->search (\' "blah" = "bleh" ')->delete;
+  ok ($res, 'operation is true');
+  cmp_ok ($res, '==', 0, 'zero rows affected');
+}, [
+  [ 'BEGIN' ],
+  [
+    'SELECT me.foo, me.bar, me.hello, me.goodbye
+      FROM fourkeys me
+      LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys
+        ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello
+      WHERE "blah" = "bleh" AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ?
+      GROUP BY me.foo, me.bar, me.hello, me.goodbye
+      ORDER BY foo, bar, hello, goodbye
+    ',
+    (1, 2) x 2,
+    666,
+    (1, 2) x 2,
+    'c',
+  ],
+  [ 'COMMIT' ],
+], 'Correct null-delete-SQL with multijoin without pruning' );
+
+
 # try the same sql with forced multicolumn in
-$schema->storage->_use_multicolumn_in (1);
-$schema->storage->debugobj ($debugobj);
-$schema->storage->debug (1);
-throws_ok { $fks_multi->update ({ read_count => \ 'read_count + 1' }) } # this can't actually execute, we just need the "as_query"
-  qr/\QDBI Exception:/ or do { $sql = ''; @bind = () };
-$schema->storage->_use_multicolumn_in (undef);
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
+$schema->is_executed_sql_bind( sub {
+  local $schema->storage->{_use_multicolumn_in} = 1;
+
+  # this can't actually execute on sqlite
+  eval { $fks_multi->update ({ read_count => \ 'read_count + 1' }) };
+}, [[
   'UPDATE fourkeys
     SET read_count = read_count + 1
     WHERE (
@@ -133,42 +156,49 @@ is_same_sql_bind (
             AND fourkeys_to_twokeys.f_foo = me.foo
             AND fourkeys_to_twokeys.f_goodbye = me.goodbye
             AND fourkeys_to_twokeys.f_hello = me.hello
-        WHERE fourkeys_to_twokeys.pilot_sequence != ? AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ?
+        WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ?
+        ORDER BY foo, bar, hello, goodbye
       )
     )
   ',
+  ( 1, 2) x 2,
+  666,
+  ( 1, 2) x 2,
+  'c',
+]], 'Correct update-SQL with multicolumn in support' );
+
+$schema->is_executed_sql_bind( sub {
+  $fks->search({ 'twokeys.artist' => { '!=' => 666 } })->update({ read_count => \ 'read_count + 1' });
+}, [
+  [ 'BEGIN' ],
   [
-    "'666'",
-    ("'1'", "'2'") x 4,
-    "'c'",
+    'SELECT me.foo, me.bar, me.hello, me.goodbye
+      FROM fourkeys me
+      LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys
+        ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello
+      LEFT JOIN twokeys twokeys
+        ON twokeys.artist = fourkeys_to_twokeys.t_artist AND twokeys.cd = fourkeys_to_twokeys.t_cd
+      WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? AND twokeys.artist != ?
+      GROUP BY me.foo, me.bar, me.hello, me.goodbye
+    ',
+    (1, 2) x 4,
+    'c',
+    666,
   ],
-  'Correct update-SQL with multicolumn in support',
-);
-
-# make a *premultiplied* join stick
-my $fks_premulti = $fks->search({ 'twokeys.artist' => { '!=' => 666 } });
-
-$schema->storage->debugobj ($debugobj);
-$schema->storage->debug (1);
-$fks_premulti->update ({ read_count => \ 'read_count + 1' });
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
-  'UPDATE fourkeys
-   SET read_count = read_count + 1
-   WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )',
-  [ map { "'$_'" } ( (1) x 4, (2) x 4 ) ],
-  'Correct update-SQL with premultiplied restricting join without pruning',
-);
+  [
+    'UPDATE fourkeys
+     SET read_count = read_count + 1
+     WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? )
+    ',
+    ( (1) x 4, (2) x 4 ),
+  ],
+  [ 'COMMIT' ],
+], 'Correct update-SQL with premultiplied restricting join without pruning' );
 
 is ($fa->discard_changes->read_count, 13, 'Update ran only once on joined resultset');
 is ($fb->discard_changes->read_count, 23, 'Update ran only once on joined resultset');
 is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier');
 
-
 #
 # Make sure multicolumn in or the equivalent functions correctly
 #
@@ -252,43 +282,34 @@ is ($tkfks->count, $tkfk_cnt -= 1, 'Only one row deleted');
 
 
 # check with sql-equality, as sqlite will accept most bad sql just fine
-$schema->storage->debugobj ($debugobj);
-$schema->storage->debug (1);
-
 {
   my $rs = $schema->resultset('CD')->search(
     { 'me.year' => { '!=' => 2010 } },
   );
 
-  $rs->search({}, { join => 'liner_notes' })->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $rs->search({}, { join => 'liner_notes' })->delete;
+  }, [[
     'DELETE FROM cd WHERE ( year != ? )',
-    ["'2010'"],
-    'Non-restricting multijoins properly thrown out'
-  );
+    2010,
+  ]], 'Non-restricting multijoins properly thrown out' );
 
-  $rs->search({}, { prefetch => 'liner_notes' })->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $rs->search({}, { prefetch => 'liner_notes' })->delete;
+  }, [[
     'DELETE FROM cd WHERE ( year != ? )',
-    ["'2010'"],
-    'Non-restricting multiprefetch thrown out'
-  );
+    2010,
+  ]], 'Non-restricting multiprefetch thrown out' );
 
-  $rs->search({}, { prefetch => 'artist' })->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $rs->search({}, { prefetch => 'artist' })->delete;
+  }, [[
     'DELETE FROM cd WHERE ( cdid IN ( SELECT me.cdid FROM cd me JOIN artist artist ON artist.artistid = me.artist WHERE ( me.year != ? ) ) )',
-    ["'2010'"],
-    'Restricting prefetch left in, selector thrown out'
-  );
+    2010,
+  ]], 'Restricting prefetch left in, selector thrown out');
 
-  # switch artist and cd to fully qualified table names
-  # make sure nothing is stripped out
+### switch artist and cd to fully qualified table names
+### make sure nothing is stripped out
   my $cd_rsrc = $schema->source('CD');
   $cd_rsrc->name('main.cd');
   $cd_rsrc->relationship_info($_)->{attrs}{cascade_delete} = 0
@@ -299,85 +320,80 @@ $schema->storage->debug (1);
   $art_rsrc->relationship_info($_)->{attrs}{cascade_delete} = 0
     for $art_rsrc->relationships;
 
-  $rs->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
-    'DELETE FROM main.cd WHERE ( year != ? )',
-    ["'2010'"],
-    'delete with fully qualified table name'
-  );
+  $schema->is_executed_sql_bind( sub {
+    $rs->delete
+  }, [[
+    'DELETE FROM main.cd WHERE year != ?',
+    2010,
+  ]], 'delete with fully qualified table name' );
 
   $rs->create({ title => 'foo', artist => 1, year => 2000 });
-  $rs->delete_all;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
-    'DELETE FROM main.cd WHERE ( cdid = ? )',
-    ["'1'"],
-    'delete_all with fully qualified table name'
-  );
-
-  $rs->create({ cdid => 42, title => 'foo', artist => 2, year => 2000 });
-  $rs->find(42)->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
-    'DELETE FROM main.cd WHERE ( cdid = ? )',
-    ["'42'"],
-    'delete of object from table with fully qualified name'
-  );
+  $schema->is_executed_sql_bind( sub {
+    $rs->delete_all
+  }, [
+    [ 'BEGIN' ],
+    [
+      'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM main.cd me WHERE me.year != ?',
+      2010,
+    ],
+    [
+      'DELETE FROM main.cd WHERE ( cdid = ? )',
+      1,
+    ],
+    [ 'COMMIT' ],
+  ], 'delete_all with fully qualified table name' );
 
   $rs->create({ cdid => 42, title => 'foo', artist => 2, year => 2000 });
-  $rs->find(42)->related_resultset('artist')->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  my $cd42 = $rs->find(42);
+
+  $schema->is_executed_sql_bind( sub {
+    $cd42->delete
+  }, [[
+    'DELETE FROM main.cd WHERE cdid = ?',
+    42,
+  ]], 'delete of object from table with fully qualified name' );
+
+  $schema->is_executed_sql_bind( sub {
+    $cd42->related_resultset('artist')->delete
+  }, [[
     'DELETE FROM main.artist WHERE ( artistid IN ( SELECT me.artistid FROM main.artist me WHERE ( me.artistid = ? ) ) )',
-    ["'2'"],
-    'delete of related object from scalarref fully qualified named table',
-  );
+    2,
+  ]], 'delete of related object from scalarref fully qualified named table' );
 
-  $schema->resultset('Artist')->find(3)->related_resultset('cds')->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  my $art3 = $schema->resultset('Artist')->find(3);
+
+  $schema->is_executed_sql_bind( sub {
+    $art3->related_resultset('cds')->delete;
+  }, [[
     'DELETE FROM main.cd WHERE ( artist = ? )',
-    ["'3'"],
-    'delete of related object from fully qualified named table',
-  );
+    3,
+  ]], 'delete of related object from fully qualified named table' );
 
-  $schema->resultset('Artist')->find(3)->cds_unordered->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $art3->cds_unordered->delete;
+  }, [[
     'DELETE FROM main.cd WHERE ( artist = ? )',
-    ["'3'"],
-    'delete of related object from fully qualified named table via relaccessor',
-  );
+    3,
+  ]], 'delete of related object from fully qualified named table via relaccessor' );
 
-  $rs->search({}, { prefetch => 'artist' })->delete;
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $rs->search({}, { prefetch => 'artist' })->delete;
+  }, [[
     'DELETE FROM main.cd WHERE ( cdid IN ( SELECT me.cdid FROM main.cd me JOIN main.artist artist ON artist.artistid = me.artist WHERE ( me.year != ? ) ) )',
-    ["'2010'"],
-    'delete with fully qualified table name and subquery correct'
-  );
+    2010,
+  ]], 'delete with fully qualified table name and subquery correct' );
 
   # check that as_subselect_rs works ok
   # inner query is untouched, then a selector
   # and an IN condition
-  $schema->resultset('CD')->search({
-    'me.cdid' => 1,
-    'artist.name' => 'partytimecity',
-  }, {
-    join => 'artist',
-  })->as_subselect_rs->delete;
-
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    $schema->resultset('CD')->search({
+      'me.cdid' => 1,
+      'artist.name' => 'partytimecity',
+    }, {
+      join => 'artist',
+    })->as_subselect_rs->delete;
+  }, [[
     '
       DELETE FROM main.cd
       WHERE (
@@ -392,12 +408,9 @@ $schema->storage->debug (1);
         )
       )
     ',
-    ["'partytimecity'", "'1'"],
-    'Delete from as_subselect_rs works correctly'
-  );
+    'partytimecity',
+    1,
+  ]], 'Delete from as_subselect_rs works correctly' );
 }
 
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
 done_testing;
@@ -0,0 +1,31 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+my $cd = $schema->resultset('CD')->search({}, {
+  '+columns' => { avg_year => $schema->resultset('CD')->get_column('year')->func_rs('avg')->as_query },
+  order_by => 'cdid',
+})->next;
+
+my $ccd = $cd->copy({ cdid => 5_000_000, artist => 2 });
+
+cmp_ok(
+  $ccd->id,
+  '!=',
+  $cd->id,
+  'IDs differ'
+);
+
+is(
+  $ccd->title,
+  $cd->title,
+  'Title same on copied object',
+);
+
+done_testing;
@@ -8,65 +8,65 @@ use DBICTest;
 
 my $from_storage_ran = 0;
 my $to_storage_ran = 0;
-my $schema = DBICTest->init_schema();
+my $schema = DBICTest->init_schema( no_populate => 1 );
 DBICTest::Schema::Artist->load_components(qw(FilterColumn InflateColumn));
-DBICTest::Schema::Artist->filter_column(rank => {
-  filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 },
-  filter_to_storage   => sub { $to_storage_ran++; $_[1] / 2 },
+DBICTest::Schema::Artist->filter_column(charfield => {
+  filter_from_storage => sub { $from_storage_ran++; defined $_[1] ? $_[1] * 2 : undef },
+  filter_to_storage   => sub { $to_storage_ran++; defined $_[1] ? $_[1] / 2 : undef },
 });
-Class::C3->reinitialize();
+Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
 
-my $artist = $schema->resultset('Artist')->create( { rank => 20 } );
+my $artist = $schema->resultset('Artist')->create( { charfield => 20 } );
 
 # this should be using the cursor directly, no inflation/processing of any sort
-my ($raw_db_rank) = $schema->resultset('Artist')
+my ($raw_db_charfield) = $schema->resultset('Artist')
                              ->search ($artist->ident_condition)
-                               ->get_column('rank')
+                               ->get_column('charfield')
                                 ->_resultset
                                  ->cursor
                                   ->next;
 
-is ($raw_db_rank, 10, 'INSERT: correctly unfiltered on insertion');
+is ($raw_db_charfield, 10, 'INSERT: correctly unfiltered on insertion');
 
 for my $reloaded (0, 1) {
   my $test = $reloaded ? 'reloaded' : 'stored';
   $artist->discard_changes if $reloaded;
 
-  is( $artist->rank , 20, "got $test filtered rank" );
+  is( $artist->charfield , 20, "got $test filtered charfield" );
 }
 
 $artist->update;
 $artist->discard_changes;
-is( $artist->rank , 20, "got filtered rank" );
+is( $artist->charfield , 20, "got filtered charfield" );
 
-$artist->update ({ rank => 40 });
-($raw_db_rank) = $schema->resultset('Artist')
+$artist->update ({ charfield => 40 });
+($raw_db_charfield) = $schema->resultset('Artist')
                              ->search ($artist->ident_condition)
-                               ->get_column('rank')
+                               ->get_column('charfield')
                                 ->_resultset
                                  ->cursor
                                   ->next;
-is ($raw_db_rank, 20, 'UPDATE: correctly unflitered on update');
+is ($raw_db_charfield, 20, 'UPDATE: correctly unflitered on update');
 
 $artist->discard_changes;
-$artist->rank(40);
-ok( !$artist->is_column_changed('rank'), 'column is not dirty after setting the same value' );
+$artist->charfield(40);
+ok( !$artist->is_column_changed('charfield'), 'column is not dirty after setting the same value' );
 
 MC: {
    my $cd = $schema->resultset('CD')->create({
-      artist => { rank => 20 },
+      artist => { charfield => 20 },
       title => 'fun time city!',
       year => 'forevertime',
    });
-   ($raw_db_rank) = $schema->resultset('Artist')
+   ($raw_db_charfield) = $schema->resultset('Artist')
                                 ->search ($cd->artist->ident_condition)
-                                  ->get_column('rank')
+                                  ->get_column('charfield')
                                    ->_resultset
                                     ->cursor
                                      ->next;
 
-   is $raw_db_rank, 10, 'artist rank gets correctly unfiltered w/ MC';
-   is $cd->artist->rank, 20, 'artist rank gets correctly filtered w/ MC';
+   is $raw_db_charfield, 10, 'artist charfield gets correctly unfiltered w/ MC';
+   is $cd->artist->charfield, 20, 'artist charfield gets correctly filtered w/ MC';
 }
 
 CACHE_TEST: {
@@ -79,122 +79,210 @@ CACHE_TEST: {
   is $from_storage_ran, $expected_from, 'from has not run yet';
   is $to_storage_ran, $expected_to, 'to has not run yet';
 
-  $artist->rank;
+  $artist->charfield;
   cmp_ok (
-    $artist->get_filtered_column('rank'),
+    $artist->get_filtered_column('charfield'),
       '!=',
-    $artist->get_column('rank'),
+    $artist->get_column('charfield'),
     'filter/unfilter differ'
   );
   is $from_storage_ran, ++$expected_from, 'from ran once, therefor caches';
   is $to_storage_ran, $expected_to,  'to did not run';
 
-  $artist->rank(6);
+  $artist->charfield(6);
   is $from_storage_ran, $expected_from, 'from did not run';
   is $to_storage_ran, ++$expected_to,  'to ran once';
 
-  ok ($artist->is_column_changed ('rank'), 'Column marked as dirty');
+  ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty');
 
-  $artist->rank;
+  $artist->charfield;
   is $from_storage_ran, $expected_from, 'from did not run';
   is $to_storage_ran, $expected_to,  'to did not run';
 
   $artist->update;
 
-  $artist->set_column(rank => 3);
-  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same set_column value');
-  is ($artist->rank, '6', 'Column set properly (cache blown)');
+  $artist->set_column(charfield => 3);
+  ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on same set_column value');
+  is ($artist->charfield, '6', 'Column set properly (cache blown)');
   is $from_storage_ran, ++$expected_from, 'from ran once (set_column blew cache)';
   is $to_storage_ran, $expected_to,  'to did not run';
 
-  $artist->rank(6);
-  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same accessor-set value');
-  is ($artist->rank, '6', 'Column set properly');
+  $artist->charfield(6);
+  ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on same accessor-set value');
+  is ($artist->charfield, '6', 'Column set properly');
   is $from_storage_ran, $expected_from, 'from did not run';
-  is $to_storage_ran, $expected_to,  'to did not run';
+  is $to_storage_ran, ++$expected_to,  'to did run once (call in to set_column)';
 
-  $artist->store_column(rank => 4);
-  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on differing store_column value');
-  is ($artist->rank, '8', 'Cache properly blown');
+  $artist->store_column(charfield => 4);
+  ok (! $artist->is_column_changed ('charfield'), 'Column not marked as dirty on differing store_column value');
+  is ($artist->charfield, '8', 'Cache properly blown');
   is $from_storage_ran, ++$expected_from, 'from did not run';
   is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->update({ charfield => undef });
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, ++$expected_to,  'to did run';
+
+  $artist->discard_changes;
+  is ( $artist->get_column('charfield'), undef, 'Got back null' );
+  is ( $artist->charfield, undef, 'Got back null through filter' );
+
+  is $from_storage_ran, ++$expected_from, 'from did run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+}
+
+# test in-memory operations
+for my $artist_maker (
+  sub { $schema->resultset('Artist')->new({ charfield => 42 }) },
+  sub { my $art = $schema->resultset('Artist')->new({}); $art->charfield(42); $art },
+) {
+
+  my $expected_from = $from_storage_ran;
+  my $expected_to   = $to_storage_ran;
+
+  my $artist = $artist_maker->();
+
+  is $from_storage_ran, $expected_from, 'from has not run yet';
+  is $to_storage_ran, $expected_to, 'to has not run yet';
+
+  ok( ! $artist->has_column_loaded('artistid'), 'pk not loaded' );
+  ok( $artist->has_column_loaded('charfield'), 'Filtered column marked as loaded under new' );
+  is( $artist->charfield, 42, 'Proper unfiltered value' );
+  is( $artist->get_column('charfield'), 21, 'Proper filtered value' );
+}
+
+# test literals
+for my $v ( \ '16', \[ '?', '16' ] ) {
+  my $rs = $schema->resultset('Artist');
+  $rs->delete;
+
+  my $art = $rs->new({ charfield => 10 });
+  $art->charfield($v);
+
+  is_deeply( $art->charfield, $v);
+  is_deeply( $art->get_filtered_column("charfield"), $v);
+  is_deeply( $art->get_column("charfield"), $v);
+
+  $art->insert;
+  $art->discard_changes;
+
+  is ($art->get_column("charfield"), 16, "Literal inserted into database properly");
+  is ($art->charfield, 32, "filtering still works");
+
+  $art->update({ charfield => $v });
+
+  is_deeply( $art->charfield, $v);
+  is_deeply( $art->get_filtered_column("charfield"), $v);
+  is_deeply( $art->get_column("charfield"), $v);
+
+  $art->discard_changes;
+
+  is ($art->get_column("charfield"), 16, "Literal inserted into database properly");
+  is ($art->charfield, 32, "filtering still works");
 }
 
 IC_DIE: {
-  dies_ok {
-     DBICTest::Schema::Artist->inflate_column(rank =>
+  throws_ok {
+     DBICTest::Schema::Artist->inflate_column(charfield =>
         { inflate => sub {}, deflate => sub {} }
      );
-  } q(Can't inflate column after filter column);
+  } qr/InflateColumn can not be used on a column with a declared FilterColumn filter/, q(Can't inflate column after filter column);
 
   DBICTest::Schema::Artist->inflate_column(name =>
      { inflate => sub {}, deflate => sub {} }
   );
 
-  dies_ok {
+  throws_ok {
      DBICTest::Schema::Artist->filter_column(name => {
         filter_to_storage => sub {},
         filter_from_storage => sub {}
      });
-  } q(Can't filter column after inflate column);
+  } qr/FilterColumn can not be used on a column with a declared InflateColumn inflator/, q(Can't filter column after inflate column);
 }
 
 # test when we do not set both filter_from_storage/filter_to_storage
-DBICTest::Schema::Artist->filter_column(rank => {
+DBICTest::Schema::Artist->filter_column(charfield => {
   filter_to_storage => sub { $to_storage_ran++; $_[1] },
 });
-Class::C3->reinitialize();
+Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
 
 ASYMMETRIC_TO_TEST: {
   # initialise value
-  $artist->rank(20);
+  $artist->charfield(20);
   $artist->update;
 
   my $expected_from = $from_storage_ran;
   my $expected_to   = $to_storage_ran;
 
-  $artist->rank(10);
-  ok ($artist->is_column_changed ('rank'), 'Column marked as dirty on accessor-set value');
-  is ($artist->rank, '10', 'Column set properly');
+  $artist->charfield(10);
+  ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty on accessor-set value');
+  is ($artist->charfield, '10', 'Column set properly');
   is $from_storage_ran, $expected_from, 'from did not run';
   is $to_storage_ran, ++$expected_to,  'to did run';
 
   $artist->discard_changes;
 
-  is ($artist->rank, '20', 'Column set properly');
+  is ($artist->charfield, '20', 'Column set properly');
   is $from_storage_ran, $expected_from, 'from did not run';
   is $to_storage_ran, $expected_to,  'to did not run';
 }
 
-DBICTest::Schema::Artist->filter_column(rank => {
+DBICTest::Schema::Artist->filter_column(charfield => {
   filter_from_storage => sub { $from_storage_ran++; $_[1] },
 });
-Class::C3->reinitialize();
+Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
 
 ASYMMETRIC_FROM_TEST: {
   # initialise value
-  $artist->rank(23);
+  $artist->charfield(23);
   $artist->update;
 
   my $expected_from = $from_storage_ran;
   my $expected_to   = $to_storage_ran;
 
-  $artist->rank(13);
-  ok ($artist->is_column_changed ('rank'), 'Column marked as dirty on accessor-set value');
-  is ($artist->rank, '13', 'Column set properly');
+  $artist->charfield(13);
+  ok ($artist->is_column_changed ('charfield'), 'Column marked as dirty on accessor-set value');
+  is ($artist->charfield, '13', 'Column set properly');
   is $from_storage_ran, $expected_from, 'from did not run';
   is $to_storage_ran, $expected_to,  'to did not run';
 
   $artist->discard_changes;
 
-  is ($artist->rank, '23', 'Column set properly');
+  is ($artist->charfield, '23', 'Column set properly');
   is $from_storage_ran, ++$expected_from, 'from did run';
   is $to_storage_ran, $expected_to,  'to did not run';
 }
 
-throws_ok { DBICTest::Schema::Artist->filter_column( rank => {} ) }
+throws_ok { DBICTest::Schema::Artist->filter_column( charfield => {} ) }
   qr/\QAn invocation of filter_column() must specify either a filter_from_storage or filter_to_storage/,
   'Correctly throws exception for empty attributes'
 ;
 
+FC_ON_PK_TEST: {
+  # there are cases in the wild that autovivify stuff deep in the
+  # colinfo guts. While this is insane, there is no alternative
+  # so at leats make sure it keeps working...
+
+  $schema->source('Artist')->column_info('artistid')->{_filter_info} ||= {};
+
+  for my $key ('', 'primary') {
+    lives_ok {
+      $schema->resultset('Artist')->find_or_create({ artistid => 42 }, { $key ? ( key => $key ) : () });
+    };
+  }
+
+
+  DBICTest::Schema::Artist->filter_column(artistid => {
+    filter_to_storage => sub { $_[1] * 100 },
+    filter_from_storage => sub { $_[1] - 100 },
+  });
+
+  for my $key ('', 'primary') {
+    throws_ok {
+      $schema->resultset('Artist')->find_or_create({ artistid => 42 }, { $key ? ( key => $key ) : () });
+    } qr/\QUnable to satisfy requested constraint 'primary', FilterColumn values not usable for column(s): 'artistid'/;
+  }
+}
+
 done_testing;
@@ -4,8 +4,6 @@ use warnings;
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
 
@@ -15,20 +13,20 @@ $schema->resultset('CD')->delete;
 my $artist  = $schema->resultset("Artist")->create({ artistid => 21, name => 'Michael Jackson', rank => 20 });
 my $cd = $artist->create_related('cds', { year => 1975, title => 'Compilation from 1975' });
 
-my ($sql, @bind);
-local $schema->storage->{debug} = 1;
-local $schema->storage->{debugobj} = DBIC::DebugObj->new(\$sql, \@bind);
-
-my $find_cd = $artist->find_related('cds',{title => 'Compilation from 1975'});
-
-s/^'//, s/'\z// for @bind; # why does DBIC::DebugObj not do this?
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
-  'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( me.artist = ? AND me.title = ? ) ) ORDER BY year ASC',
-  [21, 'Compilation from 1975'],
-  'find_related only uses foreign key condition once',
-);
+$schema->is_executed_sql_bind(sub {
+  my $find_cd = $artist->find_related('cds',{title => 'Compilation from 1975'});
+}, [
+  [
+    ' SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+        FROM cd me
+      WHERE me.artist = ? AND me.title = ?
+      ORDER BY year ASC
+    ',
+    [ { dbic_colname => "me.artist", sqlt_datatype => "integer" }
+      => 21 ],
+    [ { dbic_colname => "me.title",  sqlt_datatype => "varchar", sqlt_size => 100 }
+      => "Compilation from 1975" ],
+  ]
+], 'find_related only uses foreign key condition once' );
 
 done_testing;
@@ -0,0 +1,32 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+my $rs_with_avg = $schema->resultset('CD')->search({}, {
+  '+columns' => { avg_year => $schema->resultset('CD')->get_column('year')->func_rs('avg')->as_query },
+  order_by => 'cdid',
+});
+
+for my $in_storage (1, 0) {
+  my $cd = $rs_with_avg->first;
+
+  ok ! $cd->is_column_changed('avg_year'), 'no changes';
+
+  $cd->in_storage($in_storage);
+
+  ok ! $cd->is_column_changed('avg_year'), 'still no changes';
+
+  $cd->set_column( avg_year => 42 );
+  $cd->set_column( avg_year => 69 );
+
+  ok $cd->is_column_changed('avg_year'), 'changed';
+  is $cd->get_column('avg_year'), 69, 'correct value'
+}
+
+done_testing;
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -0,0 +1,51 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest ':DiffSQL';
+
+my $schema = DBICTest->init_schema();
+
+my $rs = $schema->resultset('Artist')->search(
+  [ -and => [ {}, [] ], -or => [ {}, [] ] ],
+  {
+    select => [],
+    columns => {},
+    '+columns' => 'artistid',
+    join => [ {}, [ [ {}, {} ] ], {} ],
+    prefetch => [ [ [ {}, [] ], {} ], {}, [ {} ] ],
+    order_by => [],
+    group_by => [],
+    offset => 0,
+  }
+);
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(SELECT me.artistid FROM artist me)',
+  [],
+);
+
+is_same_sql_bind(
+  $rs->count_rs->as_query,
+  '(SELECT COUNT(*) FROM artist me)',
+  [],
+);
+
+is_same_sql_bind(
+  $rs->as_subselect_rs->search({}, { columns => 'artistid' })->as_query,
+  '(SELECT me.artistid FROM (SELECT me.artistid FROM artist me) me)',
+  [],
+);
+
+{
+  local $TODO = 'Stupid misdesigned as_subselect_rs';
+  is_same_sql_bind(
+    $rs->as_subselect_rs->as_query,
+    $rs->as_subselect_rs->search({}, { columns => 'artistid' })->as_query,
+  );
+}
+
+done_testing;
@@ -5,11 +5,9 @@ use Test::More;
 use Test::Exception;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
+use DBICTest ':DiffSQL';
 
-use Storable qw/dclone/;
+use Storable 'dclone';
 
 my $schema = DBICTest->init_schema();
 
@@ -5,7 +5,6 @@ use Test::More;
 
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
 
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype;
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -0,0 +1,92 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest ':DiffSQL';
+use SQL::Abstract qw(is_plain_value is_literal_value);
+use List::Util 'shuffle';
+use Data::Dumper;
+$Data::Dumper::Terse = 1;
+$Data::Dumper::Useqq = 1;
+$Data::Dumper::Indent = 0;
+
+my $schema = DBICTest->init_schema();
+
+for my $c (
+  { cond => undef, sql => 'IS NULL' },
+  { cond => { -value => undef }, sql => 'IS NULL' },
+  { cond => \'foo', sql => '= foo' },
+  { cond => 'foo', sql => '= ?', bind => [
+    [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ],
+    [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ],
+  ]},
+  { cond => { -value => 'foo' }, sql => '= ?', bind => [
+    [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ],
+    [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'foo' ],
+  ]},
+  { cond => \[ '?', "foo" ], sql => '= ?', bind => [
+    [ {} => 'foo' ],
+    [ {} => 'foo' ],
+  ]},
+) {
+  my $rs = $schema->resultset('CD')->search({}, { columns => 'title' });
+
+  my $bare_cond = is_literal_value($c->{cond}) ? { '=', $c->{cond} } : $c->{cond};
+
+  my @query_steps = (
+    # these are monkey-wrenches, always there
+    { title => { '!=', [ -and => \'bar' ] }, year => { '!=', [ -and => 'bar' ] } },
+    { -or => [ genreid => undef, genreid => { '!=' => \42 } ] },
+    { -or => [ genreid => undef, genreid => { '!=' => \42 } ] },
+
+    { title => $bare_cond, year => { '=', $c->{cond} } },
+    { -and => [ year => $bare_cond, { title => { '=', $c->{cond} } } ] },
+    [ year => $bare_cond ],
+    [ title => $bare_cond ],
+    { -and => [ { year => { '=', $c->{cond} } }, { title => { '=', $c->{cond} } } ] },
+    { -and => { -or => { year => { '=', $c->{cond} } } }, -or => { title => $bare_cond } },
+  );
+
+  if (my $v = is_plain_value($c->{cond})) {
+    push @query_steps,
+      { year => $$v },
+      { title => $$v },
+      { -and => [ year => $$v, title => $$v ] },
+    ;
+  }
+
+  @query_steps = shuffle @query_steps;
+
+  $rs = $rs->search($_) for @query_steps;
+
+  my @bind = @{$c->{bind} || []};
+  {
+    no warnings 'misc';
+    splice @bind, 1, 0, [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => 'bar' ];
+  }
+
+  is_same_sql_bind (
+    $rs->as_query,
+    "(
+      SELECT me.title
+        FROM cd me
+      WHERE
+        ( genreid != 42 OR genreid IS NULL )
+          AND
+        ( genreid != 42 OR genreid IS NULL )
+          AND
+        title != bar
+          AND
+        title $c->{sql}
+          AND
+        year != ?
+          AND
+        year $c->{sql}
+    )",
+    \@bind,
+    'Double condition correctly collapsed for steps' . Dumper \@query_steps,
+  );
+}
+
+done_testing;
@@ -4,9 +4,9 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
+use DBIx::Class::_Util 'sigwarn_silencer';
 
 my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype;
 
@@ -165,6 +165,8 @@ my @tests = (
 for my $i (0 .. $#tests) {
   my $t = $tests[$i];
   for my $p (1, 2) {  # repeat everything twice, make sure we do not clobber search arguments
+    local $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ );
+
     is_same_sql_bind (
       $t->{rs}->search ($t->{search}, $t->{attrs})->as_query,
       $t->{sqlbind},
@@ -6,8 +6,7 @@ use Test::Exception;
 use Math::BigInt;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($ROWS, $OFFSET) = (
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema(no_deploy => 1);
 
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -25,7 +24,7 @@ my ($sql, @bind) = $sql_maker->select(
                 '-join_type' => ''
               },
               {
-                'artist.artistid' => 'me.artist'
+                'artist.artistid' => { -ident => 'me.artist' },
               }
             ],
             [
@@ -34,7 +33,7 @@ my ($sql, @bind) = $sql_maker->select(
                 '-join_type' => 'left'
               },
               {
-                'tracks.cd' => 'me.cdid'
+                'tracks.cd' => { -ident => 'me.cdid' },
               }
             ],
           ],
@@ -308,7 +307,7 @@ $sql_maker->quote_char([qw/[ ]/]);
                 '-join_type' => ''
               },
               {
-                'artist.artistid' => 'me.artist'
+                'artist.artistid' => { -ident => 'me.artist' }
               }
             ]
           ],
@@ -0,0 +1,497 @@
+use strict;
+use warnings;
+use Test::More;
+use Test::Warn;
+
+use lib qw(t/lib);
+use DBICTest ':DiffSQL';
+use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION';
+
+use Data::Dumper;
+BEGIN {
+  if ( eval { require Test::Differences } ) {
+    no warnings 'redefine';
+    *is_deeply = \&Test::Differences::eq_or_diff;
+  }
+}
+
+my $schema = DBICTest->init_schema( no_deploy => 1);
+my $sm = $schema->storage->sql_maker;
+
+{
+  package # hideee
+    DBICTest::SillyInt;
+
+  use overload
+    fallback => 1,
+    '0+' => sub { ${$_[0]} },
+  ;
+}
+my $num = bless( \do { my $foo = 69 }, 'DBICTest::SillyInt' );
+
+is($num, 69, 'test overloaded object is "sane"');
+is("$num", 69, 'test overloaded object is "sane"');
+
+for my $t (
+  {
+    where => { artistid => 1, charfield => undef },
+    cc_result => { artistid => 1, charfield => undef },
+    sql => 'WHERE artistid = ? AND charfield IS NULL',
+    efcc_result => { artistid => 1 },
+    efcc_n_result => { artistid => 1, charfield => undef },
+  },
+  {
+    where => { -and => [ artistid => 1, charfield => undef, { rank => 13 } ] },
+    cc_result => { artistid => 1, charfield => undef, rank => 13 },
+    sql => 'WHERE artistid = ?  AND charfield IS NULL AND rank = ?',
+    efcc_result => { artistid => 1, rank => 13 },
+    efcc_n_result => { artistid => 1, charfield => undef, rank => 13 },
+  },
+  {
+    where => { -and => [ { artistid => 1, charfield => undef}, { rank => 13 } ] },
+    cc_result => { artistid => 1, charfield => undef, rank => 13 },
+    sql => 'WHERE artistid = ?  AND charfield IS NULL AND rank = ?',
+    efcc_result => { artistid => 1, rank => 13 },
+    efcc_n_result => { artistid => 1, charfield => undef, rank => 13 },
+  },
+  {
+    where => { -and => [ -or => { name => 'Caterwauler McCrae' }, 'rank' ] },
+    cc_result => { name => 'Caterwauler McCrae', rank => undef },
+    sql => 'WHERE name = ? AND rank IS NULL',
+    efcc_result => { name => 'Caterwauler McCrae' },
+    efcc_n_result => { name => 'Caterwauler McCrae', rank => undef },
+  },
+  {
+    where => { -and => [ [ [ artist => {'=' => \'foo' } ] ], { name => \[ '= ?', 'bar' ] } ] },
+    cc_result => { artist => {'=' => \'foo' }, name => \[ '= ?', 'bar' ] },
+    sql => 'WHERE artist = foo AND name = ?',
+    efcc_result => { artist => \'foo' },
+  },
+  {
+    where => { -and => [ -or => { name => 'Caterwauler McCrae', artistid => 2 } ] },
+    cc_result => { -or => [ artistid => 2, name => 'Caterwauler McCrae' ] },
+    sql => 'WHERE artistid = ? OR name = ?',
+    efcc_result => {},
+  },
+  {
+    where => { -or => { name => 'Caterwauler McCrae', artistid => 2 } },
+    cc_result => { -or => [ artistid => 2, name => 'Caterwauler McCrae' ] },
+    sql => 'WHERE artistid = ? OR name = ?',
+    efcc_result => {},
+  },
+  {
+    where => { -and => [ \'foo=bar',  [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'} ] },
+    cc_result => { -and => [ \'foo=bar' ], name => 'Caterwauler McCrae', artistid => $num },
+    sql => 'WHERE foo=bar AND artistid = ? AND name = ?',
+    efcc_result => { name => 'Caterwauler McCrae', artistid => $num },
+  },
+  {
+    where => { -and => [ \'foo=bar',  [ { artistid => { '=', $num } } ], { name => 'Caterwauler McCrae'}, \'buzz=bozz' ] },
+    cc_result => { -and => [ \'foo=bar', \'buzz=bozz' ], name => 'Caterwauler McCrae', artistid => $num },
+    sql => 'WHERE foo=bar AND artistid = ? AND name = ? AND buzz=bozz',
+    collapsed_sql => 'WHERE foo=bar AND buzz=bozz AND artistid = ? AND name = ?',
+    efcc_result => { name => 'Caterwauler McCrae', artistid => $num },
+  },
+  {
+    where => { artistid => [ $num ], rank => [ 13, 2, 3 ], charfield => [ undef ] },
+    cc_result => { artistid => $num, charfield => undef, rank => [13, 2, 3] },
+    sql => 'WHERE artistid = ? AND charfield IS NULL AND ( rank = ? OR rank = ? OR rank = ? )',
+    efcc_result => { artistid => $num },
+    efcc_n_result => { artistid => $num, charfield => undef },
+  },
+  {
+    where => { artistid => { '=' => 1 }, rank => { '>' => 12 }, charfield => { '=' => undef } },
+    cc_result => { artistid => 1, charfield => undef, rank => { '>' => 12 } },
+    sql => 'WHERE artistid = ? AND charfield IS NULL AND rank > ?',
+    efcc_result => { artistid => 1 },
+    efcc_n_result => { artistid => 1, charfield => undef },
+  },
+  {
+    where => { artistid => { '=' => [ 1 ], }, charfield => { '=' => [ -AND => \'1', \['?',2] ] }, rank => { '=' => [ -OR => $num, $num ] } },
+    cc_result => { artistid => 1, charfield => [-and => { '=' => \['?',2] }, { '=' => \'1' } ], rank => { '=' => [$num, $num] } },
+    sql => 'WHERE artistid = ? AND charfield = 1 AND charfield = ? AND ( rank = ? OR rank = ? )',
+    collapsed_sql => 'WHERE artistid = ? AND charfield = ? AND charfield = 1 AND ( rank = ? OR rank = ? )',
+    efcc_result => { artistid => 1, charfield => UNRESOLVABLE_CONDITION },
+  },
+  {
+    where => { -and => [ artistid => 1, artistid => 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => [ -or => { '=', 2 } ], rank => [-and => undef, { '=', undef }, { '!=', 2 } ] },
+    cc_result => { artistid => [ -and => 1, 2 ], name => [ -and => { '!=', 1 }, 2 ], charfield => 2, rank => [ -and => { '!=', 2 }, undef ] },
+    sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank IS NULL AND rank IS NULL AND rank != ?',
+    collapsed_sql => 'WHERE artistid = ? AND artistid = ? AND charfield = ? AND name != ? AND name = ? AND rank != ? AND rank IS NULL',
+    efcc_result => {
+      artistid => UNRESOLVABLE_CONDITION,
+      name => 2,
+      charfield => 2,
+    },
+    efcc_n_result => {
+      artistid => UNRESOLVABLE_CONDITION,
+      name => 2,
+      charfield => 2,
+      rank => undef,
+    },
+  },
+  (map { {
+    where => $_,
+    sql => 'WHERE (rank = 13 OR charfield IS NULL OR artistid = ?) AND (artistid = ? OR charfield IS NULL OR rank != 42)',
+    collapsed_sql => 'WHERE (artistid = ? OR charfield IS NULL OR rank = 13) AND (artistid = ? OR charfield IS NULL OR rank != 42)',
+    cc_result => { -and => [
+      { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] },
+      { -or => [ artistid => 1, charfield => undef, rank => { '!=' => \42 } ] },
+    ] },
+    efcc_result => {},
+    efcc_n_result => {},
+  } } (
+
+    { -and => [
+      -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ],
+      -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } },
+    ] },
+
+    {
+      -OR => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ],
+      -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } },
+    },
+
+  ) ),
+  {
+    where => { -or => [
+      -and => [ foo => { '!=', { -value => undef } }, bar => { -in => [ 69, 42 ] } ],
+      foo => { '=', { -value => undef } },
+      baz => { '!=' => { -ident => 'bozz' } },
+      baz => { -ident => 'buzz' },
+    ] },
+    sql => 'WHERE ( foo IS NOT NULL AND bar IN ( ?, ? ) ) OR foo IS NULL OR baz != bozz OR baz = buzz',
+    collapsed_sql => 'WHERE baz != bozz OR baz = buzz OR foo IS NULL OR ( bar IN ( ?, ? ) AND foo IS NOT NULL )',
+    cc_result => { -or => [
+      baz => { '!=' => { -ident => 'bozz' } },
+      baz => { '=' => { -ident => 'buzz' } },
+      foo => undef,
+      { bar => { -in => [ 69, 42 ] }, foo => { '!=', undef } }
+    ] },
+    efcc_result => {},
+  },
+  {
+    where => { -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => { '=' => 1 }, genreid => { '=' => \['?', 2] } ] },
+    sql => 'WHERE rank = 13 OR charfield IS NULL OR artistid = ? OR genreid = ?',
+    collapsed_sql => 'WHERE artistid = ? OR charfield IS NULL OR genreid = ? OR rank = 13',
+    cc_result => { -or => [ artistid => 1, charfield => undef, genreid => { '=' => \['?', 2] }, rank => { '=' => \13 } ] },
+    efcc_result => {},
+    efcc_n_result => {},
+  },
+  {
+    where => { -and => [
+      -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ],
+      -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '=' => \13 } },
+    ] },
+    cc_result => { -and => [
+      { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] },
+      { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] },
+    ] },
+    sql => 'WHERE (rank = 13 OR charfield IS NULL OR artistid = ?) AND (artistid = ? OR charfield IS NULL OR rank = 13)',
+    collapsed_sql => 'WHERE (artistid = ? OR charfield IS NULL OR rank = 13) AND (artistid = ? OR charfield IS NULL OR rank = 13)',
+    efcc_result => {},
+    efcc_n_result => {},
+  },
+  {
+    where => { -and => [
+      -or => [ rank => { '=' => \13 }, charfield => { '=' => undef }, artistid => 1 ],
+      -or => { artistid => { '=' => 1 }, charfield => undef, rank => { '!=' => \42 } },
+      -and => [ foo => { '=' => \1 }, bar => 2 ],
+      -and => [ foo => 3, bar => { '=' => \4 } ],
+      -exists => \'(SELECT 1)',
+      -exists => \'(SELECT 2)',
+      -not => { foo => 69 },
+      -not => { foo => 42 },
+    ]},
+    sql => 'WHERE
+          ( rank = 13 OR charfield IS NULL OR artistid = ? )
+      AND ( artistid = ? OR charfield IS NULL OR rank != 42 )
+      AND foo = 1
+      AND bar = ?
+      AND foo = ?
+      AND bar = 4
+      AND (EXISTS (SELECT 1))
+      AND (EXISTS (SELECT 2))
+      AND NOT foo = ?
+      AND NOT foo = ?
+    ',
+    collapsed_sql => 'WHERE
+          ( artistid = ? OR charfield IS NULL OR rank = 13 )
+      AND ( artistid = ? OR charfield IS NULL OR rank != 42 )
+      AND (EXISTS (SELECT 1))
+      AND (EXISTS (SELECT 2))
+      AND NOT foo = ?
+      AND NOT foo = ?
+      AND bar = 4
+      AND bar = ?
+      AND foo = 1
+      AND foo = ?
+    ',
+    cc_result => {
+      -and => [
+        { -or => [ artistid => 1, charfield => undef, rank => { '=' => \13 } ] },
+        { -or => [ artistid => 1, charfield => undef, rank => { '!=' => \42 } ] },
+        { -exists => \'(SELECT 1)' },
+        { -exists => \'(SELECT 2)' },
+        { -not => { foo => 69 } },
+        { -not => { foo => 42 } },
+      ],
+      foo => [ -and => { '=' => \1 }, 3 ],
+      bar => [ -and => { '=' => \4 }, 2 ],
+    },
+    efcc_result => {
+      foo => UNRESOLVABLE_CONDITION,
+      bar => UNRESOLVABLE_CONDITION,
+    },
+    efcc_n_result => {
+      foo => UNRESOLVABLE_CONDITION,
+      bar => UNRESOLVABLE_CONDITION,
+    },
+  },
+  {
+    where => { -and => [
+      [ '_macro.to' => { -like => '%correct%' }, '_wc_macros.to' => { -like => '%correct%' } ],
+      { -and => [ { 'group.is_active' => 1 }, { 'me.is_active' => 1 } ] }
+    ] },
+    cc_result => {
+      'group.is_active' => 1,
+      'me.is_active' => 1,
+      -or => [
+        '_macro.to' => { -like => '%correct%' },
+        '_wc_macros.to' => { -like => '%correct%' },
+      ],
+    },
+    sql => 'WHERE ( _macro.to LIKE ? OR _wc_macros.to LIKE ? ) AND group.is_active = ? AND me.is_active = ?',
+    efcc_result => { 'group.is_active' => 1, 'me.is_active' => 1 },
+  },
+
+  {
+    where => { -and => [
+      artistid => { -value => [1] },
+      charfield => { -ident => 'foo' },
+      name => { '=' => { -value => undef } },
+      rank => { '=' => { -ident => 'bar' } },
+    ] },
+    sql => 'WHERE artistid = ? AND charfield = foo AND name IS NULL AND rank = bar',
+    cc_result => {
+      artistid => { -value => [1] },
+      name => undef,
+      charfield => { '=', { -ident => 'foo' } },
+      rank => { '=' => { -ident => 'bar' } },
+    },
+    efcc_result => {
+      artistid => [1],
+      charfield => { -ident => 'foo' },
+      rank => { -ident => 'bar' },
+    },
+    efcc_n_result => {
+      artistid => [1],
+      name => undef,
+      charfield => { -ident => 'foo' },
+      rank => { -ident => 'bar' },
+    },
+  },
+
+  {
+    where => { artistid => [] },
+    cc_result => { artistid => [] },
+    efcc_result => {},
+  },
+  (map {
+    {
+      where => { -and => $_ },
+      cc_result => undef,
+      efcc_result => {},
+      sql => '',
+    },
+    {
+      where => { -or => $_ },
+      cc_result => undef,
+      efcc_result => {},
+      sql => '',
+    },
+    {
+      where => { -or => [ foo => 1, $_ ] },
+      cc_result => { foo => 1 },
+      efcc_result => { foo => 1 },
+      sql => 'WHERE foo = ?',
+    },
+    {
+      where => { -or => [ $_, foo => 1 ] },
+      cc_result => { foo => 1 },
+      efcc_result => { foo => 1 },
+      sql => 'WHERE foo = ?',
+    },
+    {
+      where => { -and => [ fuu => 2, $_, foo => 1 ] },
+      sql => 'WHERE fuu = ? AND foo = ?',
+      collapsed_sql => 'WHERE foo = ? AND fuu = ?',
+      cc_result => { foo => 1, fuu => 2 },
+      efcc_result => { foo => 1, fuu => 2 },
+    },
+  } (
+    # bare
+    [], {},
+    # singles
+    [ {} ], [ [] ],
+    # doubles
+    [ [], [] ], [ {}, {} ], [ [], {} ], [ {}, [] ],
+    # tripples
+    [ {}, [], {} ], [ [], {}, [] ]
+  )),
+
+  # FIXME legacy compat crap, possibly worth undef/dieing in SQLMaker
+  { where => { artistid => {} }, sql => '', cc_result => undef, efcc_result => {}, efcc_n_result => {} },
+
+  # batshit insanity, just to be thorough
+  {
+    where => { -and => [ [ 'artistid' ], [ -and => [ artistid => { '!=', 69 }, artistid => undef, artistid => { '=' => 200 } ]], artistid => [], { -or => [] }, { -and => [] }, [ 'charfield' ], { name => [] }, 'rank' ] },
+    cc_result => { artistid => [ -and => [], { '!=', 69 }, undef, 200  ], charfield => undef, name => [], rank => undef },
+    sql => 'WHERE artistid IS NULL AND artistid != ? AND artistid IS NULL AND artistid = ? AND 0=1 AND charfield IS NULL AND 0=1 AND rank IS NULL',
+    collapsed_sql => 'WHERE 0=1 AND artistid != ? AND artistid IS NULL AND artistid = ? AND charfield IS NULL AND 0=1 AND rank IS NULL',
+    efcc_result => { artistid => UNRESOLVABLE_CONDITION },
+    efcc_n_result => { artistid => UNRESOLVABLE_CONDITION, charfield => undef, rank => undef },
+  },
+
+  # original test from RT#93244
+  {
+    where => {
+      -and => [
+        \[
+          "LOWER(me.title) LIKE ?",
+          '%spoon%',
+        ],
+        [ { 'me.title' => 'Spoonful of bees' } ],
+    ]},
+    cc_result => {
+      -and => [ \[
+        "LOWER(me.title) LIKE ?",
+        '%spoon%',
+      ]],
+      'me.title' => 'Spoonful of bees',
+    },
+    sql => 'WHERE LOWER(me.title) LIKE ? AND me.title = ?',
+    efcc_result => { 'me.title' => 'Spoonful of bees' },
+  },
+
+  # crazy literals
+  {
+    where => {
+      -or => [
+        \'foo = bar',
+      ],
+    },
+    sql => 'WHERE foo = bar',
+    cc_result => {
+      -and => [
+        \'foo = bar',
+      ],
+    },
+    efcc_result => {},
+  },
+  {
+    where => {
+      -or => [
+        \'foo = bar',
+        \'baz = ber',
+      ],
+    },
+    sql => 'WHERE foo = bar OR baz = ber',
+    collapsed_sql => 'WHERE baz = ber OR foo = bar',
+    cc_result => {
+      -or => [
+        \'baz = ber',
+        \'foo = bar',
+      ],
+    },
+    efcc_result => {},
+  },
+  {
+    where => {
+      -and => [
+        \'foo = bar',
+        \'baz = ber',
+      ],
+    },
+    sql => 'WHERE foo = bar AND baz = ber',
+    cc_result => {
+      -and => [
+        \'foo = bar',
+        \'baz = ber',
+      ],
+    },
+    efcc_result => {},
+  },
+  {
+    where => {
+      -and => [
+        \'foo = bar',
+        \'baz = ber',
+        x => { -ident => 'y' },
+      ],
+    },
+    sql => 'WHERE foo = bar AND baz = ber AND x = y',
+    cc_result => {
+      -and => [
+        \'foo = bar',
+        \'baz = ber',
+      ],
+      x => { '=' => { -ident => 'y' } }
+    },
+    efcc_result => { x => { -ident => 'y' } },
+  },
+) {
+
+  for my $w (
+    $t->{where},
+    $t->{where},  # do it twice, make sure we didn't destory the condition
+    [ -and => $t->{where} ],
+    [ -AND => $t->{where} ],
+    { -OR => [ -AND => $t->{where} ] },
+    ( keys %{$t->{where}} <= 1 ? [ %{$t->{where}} ] : () ),
+    ( (keys %{$t->{where}} == 1 and $t->{where}{-or})
+      ? ( ref $t->{where}{-or} eq 'HASH'
+        ? [ map { $_ => $t->{where}{-or}{$_} } sort keys %{$t->{where}{-or}} ]
+        : $t->{where}{-or}
+      )
+      : ()
+    ),
+  ) {
+    my $name = do { local ($Data::Dumper::Indent, $Data::Dumper::Terse, $Data::Dumper::Sortkeys) = (0, 1, 1); Dumper $w };
+
+    my ($generated_sql) = $sm->where($w);
+
+    is_same_sql ( $generated_sql, $t->{sql}, "Expected SQL from $name" )
+      if exists $t->{sql};
+
+    is_same_sql(
+      ($sm->where($t->{cc_result}))[0],
+      ( $t->{collapsed_sql} || $t->{sql} || $generated_sql ),
+      "Collapse did not alter *the semantics* of the final SQL based on $name",
+    );
+
+    my $collapsed_cond = $schema->storage->_collapse_cond($w);
+
+    is_deeply(
+      $collapsed_cond,
+      $t->{cc_result},
+      "Expected collapsed condition produced on $name",
+    );
+
+    is_deeply(
+      $schema->storage->_extract_fixed_condition_columns($w),
+      $t->{efcc_result},
+      "Expected fixed_condition produced on $name",
+    );
+
+    is_deeply(
+      $schema->storage->_extract_fixed_condition_columns($w, 'consider_nulls'),
+      $t->{efcc_n_result},
+      "Expected fixed_condition including NULLs produced on $name",
+    ) if $t->{efcc_n_result};
+
+    die unless Test::Builder->new->is_passing;
+  }
+}
+
+done_testing;
@@ -23,9 +23,7 @@ BEGIN {
   );
 }
 
-use DBICTest;
-use DBICTest::Schema;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 use DBIx::Class::SQLMaker::LimitDialects;
 my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype;
@@ -0,0 +1,100 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest ':DiffSQL';
+use DBIx::Class::_Util 'sigwarn_silencer';
+
+use DBIx::Class::SQLMaker;
+my $sa = DBIx::Class::SQLMaker->new;
+
+$SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ );
+
+my @j = (
+    { child => 'person' },
+    [ { father => 'person' }, { 'father.person_id' => 'child.father_id' }, ],
+    [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
+);
+my $match = 'person child JOIN person father ON ( father.person_id = '
+          . 'child.father_id ) JOIN person mother ON ( mother.person_id '
+          . '= child.mother_id )'
+          ;
+is_same_sql(
+  $sa->_recurse_from(@j),
+  $match,
+  'join 1 ok'
+);
+
+my @j2 = (
+    { mother => 'person' },
+    [   [   { child => 'person' },
+            [   { father             => 'person' },
+                { 'father.person_id' => 'child.father_id' }
+            ]
+        ],
+        { 'mother.person_id' => 'child.mother_id' }
+    ],
+);
+$match = 'person mother JOIN (person child JOIN person father ON ('
+       . ' father.person_id = child.father_id )) ON ( mother.person_id = '
+       . 'child.mother_id )'
+       ;
+is_same_sql(
+  $sa->_recurse_from(@j2),
+  $match,
+  'join 2 ok'
+);
+
+my @j3 = (
+    { child => 'person' },
+    [ { father => 'person', -join_type => 'inner' }, { 'father.person_id' => 'child.father_id' }, ],
+    [ { mother => 'person', -join_type => 'inner'  }, { 'mother.person_id' => 'child.mother_id' } ],
+);
+$match = 'person child INNER JOIN person father ON ( father.person_id = '
+          . 'child.father_id ) INNER JOIN person mother ON ( mother.person_id '
+          . '= child.mother_id )'
+          ;
+
+is_same_sql(
+  $sa->_recurse_from(@j3),
+  $match,
+  'join 3 (inner join) ok'
+);
+
+my @j4 = (
+    { mother => 'person' },
+    [   [   { child => 'person', -join_type => 'left' },
+            [   { father             => 'person', -join_type => 'right' },
+                { 'father.person_id' => 'child.father_id' }
+            ]
+        ],
+        { 'mother.person_id' => 'child.mother_id' }
+    ],
+);
+$match = 'person mother LEFT JOIN (person child RIGHT JOIN person father ON ('
+       . ' father.person_id = child.father_id )) ON ( mother.person_id = '
+       . 'child.mother_id )'
+       ;
+is_same_sql(
+  $sa->_recurse_from(@j4),
+  $match,
+  'join 4 (nested joins + join types) ok'
+);
+
+my @j5 = (
+    { child => 'person' },
+    [ { father => 'person' }, { 'father.person_id' => \'!= child.father_id' }, ],
+    [ { mother => 'person' }, { 'mother.person_id' => 'child.mother_id' } ],
+);
+$match = 'person child JOIN person father ON ( father.person_id != '
+          . 'child.father_id ) JOIN person mother ON ( mother.person_id '
+          . '= child.mother_id )'
+          ;
+is_same_sql(
+  $sa->_recurse_from(@j5),
+  $match,
+  'join 5 (SCALAR reference for ON statement) ok'
+);
+
+done_testing;
@@ -5,9 +5,7 @@ use Test::More;
 use Test::Warn;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBICTest::Schema;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 # This is legacy stuff from SQL::Absract::Limit
 # Keep it around just in case someone is using it
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema;
 
@@ -114,6 +113,15 @@ for my $ord_set (
     exselect_outer => 'ORDER__BY__001, ORDER__BY__002, ORDER__BY__003',
     exselect_inner => 'title AS ORDER__BY__001, bar AS ORDER__BY__002, sensors AS ORDER__BY__003',
   },
+
+  {
+    order_by => [
+      'name',
+    ],
+    order_inner => 'name',
+    order_outer => 'name DESC',
+    order_req => 'name',
+  },
 ) {
   my $o_sel = $ord_set->{exselect_outer}
     ? ', ' . $ord_set->{exselect_outer}
@@ -124,8 +132,13 @@ for my $ord_set (
     : ''
   ;
 
+  my $rs = $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}});
+
+  # query actually works
+  ok( defined $rs->count, 'Query actually works' );
+
   is_same_sql_bind(
-    $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query,
+    $rs->as_query,
     "(SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name
         FROM (
           SELECT me.id, me.source, me.owner, me.price, owner__id, owner__name$o_sel
@@ -145,6 +158,7 @@ for my $ord_set (
     [ [ { sqlt_datatype => 'varchar', sqlt_size => 100, dbic_colname => 'source' }
         => 'Library' ] ],
   );
+
 }
 
 # with groupby
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($LIMIT, $OFFSET) = (
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 use lib qw(t/lib);
 use List::Util 'min';
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 my ($ROWS, $TOTAL, $OFFSET) = (
    DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype,
@@ -2,8 +2,7 @@ use strict;
 use warnings;
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype;
 my $TOTAL  = DBIx::Class::SQLMaker::LimitDialects->__total_bindtype;
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($TOTAL, $OFFSET) = (
@@ -4,8 +4,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($TOTAL, $OFFSET, $ROWS) = (
@@ -42,7 +41,7 @@ for my $test_set (
     sql => '(
       SELECT id, artist__id, bleh
       FROM (
-        SELECT id, artist__id, bleh, ROWNUM rownum__index
+        SELECT id, artist__id, bleh, ROWNUM AS rownum__index
         FROM (
           SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR (foo.womble, "blah") AS bleh
             FROM cd me
@@ -70,7 +69,7 @@ for my $test_set (
     sql => '(
       SELECT id, artist__id, bleh
       FROM (
-        SELECT id, artist__id, bleh, ROWNUM rownum__index
+        SELECT id, artist__id, bleh, ROWNUM AS rownum__index
         FROM (
           SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR(foo.womble, "blah") AS bleh
             FROM cd me
@@ -102,7 +101,7 @@ for my $test_set (
     sql => '(
       SELECT id, artist__id, bleh
       FROM (
-        SELECT id, artist__id, bleh, ROWNUM rownum__index
+        SELECT id, artist__id, bleh, ROWNUM AS rownum__index
         FROM (
           SELECT foo.id AS id, bar.id AS artist__id, TO_CHAR(foo.womble, "blah") AS bleh
             FROM cd me
@@ -130,7 +129,7 @@ for my $test_set (
     sql => '(
       SELECT id, ends_with_me__id
       FROM (
-        SELECT id, ends_with_me__id, ROWNUM rownum__index
+        SELECT id, ends_with_me__id, ROWNUM AS rownum__index
         FROM (
           SELECT foo.id AS id, ends_with_me.id AS ends_with_me__id
             FROM cd me
@@ -157,7 +156,7 @@ for my $test_set (
     sql => '(
       SELECT id, ends_with_me__id
       FROM (
-        SELECT id, ends_with_me__id, ROWNUM rownum__index
+        SELECT id, ends_with_me__id, ROWNUM AS rownum__index
         FROM (
           SELECT foo.id AS id, ends_with_me.id AS ends_with_me__id
             FROM cd me
@@ -202,7 +201,7 @@ is_same_sql_bind(
   '(
     SELECT owner_name, owner_books
       FROM (
-        SELECT owner_name, owner_books, ROWNUM rownum__index
+        SELECT owner_name, owner_books, ROWNUM AS rownum__index
           FROM (
             SELECT  owner.name AS owner_name,
               ( SELECT COUNT( * ) FROM owners owner WHERE (count.id = owner.id)) AS owner_books
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::LimitDialects;
 
 my ($LIMIT, $OFFSET) = (
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema;
 
@@ -5,15 +5,16 @@ use Test::More;
 use Test::Exception;
 use Storable 'dclone';
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema;
 my $native_limit_dialect = $schema->storage->sql_maker->{limit_dialect};
 
+my $where_string = 'me.title = ? AND source != ? AND source = ?';
+
 my @where_bind = (
-  [ {} => 'Study' ],
   [ {} => 'kama sutra' ],
+  [ {} => 'Study' ],
   [ { sqlt_datatype => 'varchar', sqlt_size => 100, dbic_colname => 'source' } => 'Library' ],
 );
 my @select_bind = (
@@ -36,17 +37,23 @@ my @order_bind = (
 my $tests = {
 
   LimitOffset => {
+    limit_plain => [
+      "( SELECT me.artistid FROM artist me LIMIT ? )",
+      [
+        [ { sqlt_datatype => 'integer' } => 5 ]
+      ],
+    ],
     limit => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         LIMIT ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -56,17 +63,17 @@ my $tests = {
       ],
     ],
     limit_offset => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         LIMIT ?
         OFFSET ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -77,17 +84,17 @@ my $tests = {
       ],
     ],
     ordered_limit => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
         LIMIT ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -98,18 +105,18 @@ my $tests = {
       ]
     ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
         LIMIT ?
         OFFSET ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -121,7 +128,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT me.name, me.id
@@ -130,7 +137,7 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 3 ],
         [ { sqlt_datatype => 'integer' } => 1 ],
@@ -139,18 +146,24 @@ my $tests = {
   },
 
   LimitXY => {
+    limit_plain => [
+      "( SELECT me.artistid FROM artist me LIMIT ? )",
+      [
+        [ { sqlt_datatype => 'integer' } => 5 ]
+      ],
+    ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
         LIMIT ?, ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -162,7 +175,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT me.name, me.id
@@ -171,7 +184,7 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 1 ],
         [ { sqlt_datatype => 'integer' } => 3 ],
@@ -180,17 +193,23 @@ my $tests = {
   },
 
   SkipFirst => {
+    limit_plain => [
+      "( SELECT FIRST ? me.artistid FROM artist me )",
+      [
+        [ { sqlt_datatype => 'integer' } => 5 ]
+      ],
+    ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT SKIP ? FIRST ? me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 3 ],
         [ { sqlt_datatype => 'integer' } => 4 ],
@@ -202,7 +221,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT SKIP ? FIRST ? me.name, me.id
@@ -210,7 +229,7 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 1 ],
         [ { sqlt_datatype => 'integer' } => 3 ],
@@ -219,17 +238,23 @@ my $tests = {
   },
 
   FirstSkip => {
+    limit_plain => [
+      "( SELECT FIRST ? me.artistid FROM artist me )",
+      [
+        [ { sqlt_datatype => 'integer' } => 5 ]
+      ],
+    ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT FIRST ? SKIP ? me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 4 ],
         [ { sqlt_datatype => 'integer' } => 3 ],
@@ -241,7 +266,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT FIRST ? SKIP ? me.name, me.id
@@ -249,7 +274,7 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 3 ],
         [ { sqlt_datatype => 'integer' } => 1 ],
@@ -258,7 +283,7 @@ my $tests = {
   },
 
   RowNumberOver => do {
-    my $unordered_sql = '(
+    my $unordered_sql = "(
       SELECT me.id, owner__id, owner__name, bar, baz
         FROM (
           SELECT me.id, owner__id, owner__name, bar, baz, ROW_NUMBER() OVER() AS rno__row__index
@@ -267,15 +292,15 @@ my $tests = {
                 FROM books me
                 JOIN owners owner
                   ON owner.id = me.owner
-              WHERE source != ? AND me.title = ? AND source = ?
+              WHERE $where_string
               GROUP BY (me.id / ?), owner.id
               HAVING ?
             ) me
       ) me
       WHERE rno__row__index >= ? AND rno__row__index <= ?
-    )';
+    )";
 
-    my $ordered_sql = '(
+    my $ordered_sql = "(
       SELECT me.id, owner__id, owner__name, bar, baz
         FROM (
           SELECT me.id, owner__id, owner__name, bar, baz, ROW_NUMBER() OVER( ORDER BY ORDER__BY__001, ORDER__BY__002 ) AS rno__row__index
@@ -285,15 +310,32 @@ my $tests = {
                 FROM books me
                 JOIN owners owner
                   ON owner.id = me.owner
-              WHERE source != ? AND me.title = ? AND source = ?
+              WHERE $where_string
               GROUP BY (me.id / ?), owner.id
               HAVING ?
             ) me
       ) me
       WHERE rno__row__index >= ? AND rno__row__index <= ?
-    )';
+    )";
 
     {
+      limit_plain => [
+        "(
+          SELECT me.artistid
+            FROM (
+              SELECT me.artistid, ROW_NUMBER() OVER(  ) AS rno__row__index
+                FROM (
+                  SELECT me.artistid
+                    FROM artist me
+                ) me
+            ) me
+          WHERE rno__row__index >= ? AND rno__row__index <= ?
+        )",
+        [
+          [ { sqlt_datatype => 'integer' } => 1 ],
+          [ { sqlt_datatype => 'integer' } => 5 ],
+        ],
+      ],
       limit => [$unordered_sql,
         [
           @select_bind,
@@ -337,7 +379,7 @@ my $tests = {
         ],
       ],
       limit_offset_prefetch => [
-        '(
+        "(
           SELECT me.name, books.id, books.source, books.owner, books.title, books.price
             FROM (
               SELECT me.name, me.id
@@ -351,7 +393,7 @@ my $tests = {
             ) me
             LEFT JOIN books books
               ON books.owner = me.id
-        )',
+        )",
         [
           [ { sqlt_datatype => 'integer' } => 2 ],
           [ { sqlt_datatype => 'integer' } => 4 ],
@@ -362,23 +404,36 @@ my $tests = {
 
   RowNum => do {
     my $limit_sql = sub {
-      sprintf '(
+      sprintf "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
               FROM books me
               JOIN owners owner
                 ON owner.id = me.owner
-            WHERE source != ? AND me.title = ? AND source = ?
+            WHERE $where_string
             GROUP BY (me.id / ?), owner.id
             HAVING ?
             %s
           ) me
         WHERE ROWNUM <= ?
-      )', $_[0] || '';
+      )", $_[0] || '';
     };
 
     {
+      limit_plain => [
+        "(
+          SELECT me.artistid
+            FROM (
+              SELECT me.artistid
+                FROM artist me
+            ) me
+          WHERE ROWNUM <= ?
+        )",
+        [
+          [ { sqlt_datatype => 'integer' } => 5 ],
+        ],
+      ],
       limit => [ $limit_sql->(),
         [
           @select_bind,
@@ -389,22 +444,22 @@ my $tests = {
         ],
       ],
       limit_offset => [
-        '(
+        "(
           SELECT me.id, owner__id, owner__name, bar, baz
             FROM (
-              SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index
+              SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index
                 FROM (
                   SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
                     FROM books me
                     JOIN owners owner
                       ON owner.id = me.owner
-                  WHERE source != ? AND me.title = ? AND source = ?
+                  WHERE $where_string
                   GROUP BY (me.id / ?), owner.id
                   HAVING ?
                 ) me
             ) me
           WHERE rownum__index BETWEEN ? AND ?
-        )',
+        )",
         [
           @select_bind,
           @where_bind,
@@ -425,16 +480,16 @@ my $tests = {
         ],
       ],
       ordered_limit_offset => [
-        '(
+        "(
           SELECT me.id, owner__id, owner__name, bar, baz
             FROM (
-              SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM rownum__index
+              SELECT me.id, owner__id, owner__name, bar, baz, ROWNUM AS rownum__index
                 FROM (
                   SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
                     FROM books me
                     JOIN owners owner
                       ON owner.id = me.owner
-                  WHERE source != ? AND me.title = ? AND source = ?
+                  WHERE $where_string
                   GROUP BY (me.id / ?), owner.id
                   HAVING ?
                   ORDER BY ? / ?, ?
@@ -442,7 +497,7 @@ my $tests = {
               WHERE ROWNUM <= ?
             ) me
           WHERE rownum__index >= ?
-        )',
+        )",
         [
           @select_bind,
           @where_bind,
@@ -454,12 +509,12 @@ my $tests = {
         ],
       ],
       limit_offset_prefetch => [
-        '(
+        "(
           SELECT me.name, books.id, books.source, books.owner, books.title, books.price
             FROM (
               SELECT me.name, me.id
                 FROM (
-                  SELECT me.name, me.id, ROWNUM rownum__index
+                  SELECT me.name, me.id, ROWNUM AS rownum__index
                     FROM (
                       SELECT me.name, me.id
                         FROM owners me
@@ -468,7 +523,7 @@ my $tests = {
             ) me
             LEFT JOIN books books
               ON books.owner = me.id
-        )',
+        )",
         [
           [ { sqlt_datatype => 'integer' } => 2 ],
           [ { sqlt_datatype => 'integer' } => 4 ],
@@ -478,17 +533,21 @@ my $tests = {
   },
 
   FetchFirst => {
+    limit_plain => [
+      "( SELECT me.artistid FROM artist me FETCH FIRST 5 ROWS ONLY )",
+      [],
+    ],
     limit => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         FETCH FIRST 4 ROWS ONLY
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -497,14 +556,14 @@ my $tests = {
       ],
     ],
     limit_offset => [
-      '(
+      "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
               FROM books me
               JOIN owners owner
                 ON owner.id = me.owner
-            WHERE source != ? AND me.title = ? AND source = ?
+            WHERE $where_string
             GROUP BY (me.id / ?), owner.id
             HAVING ?
             ORDER BY me.id
@@ -512,7 +571,7 @@ my $tests = {
           ) me
         ORDER BY me.id DESC
         FETCH FIRST 4 ROWS ONLY
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -521,17 +580,17 @@ my $tests = {
       ],
     ],
     ordered_limit => [
-      '(
+      "(
         SELECT me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
         FETCH FIRST 4 ROWS ONLY
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -541,7 +600,7 @@ my $tests = {
       ],
     ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT me.id, owner__id, owner__name, bar, baz, ORDER__BY__001, ORDER__BY__002
@@ -550,7 +609,7 @@ my $tests = {
                   FROM books me
                   JOIN owners owner
                     ON owner.id = me.owner
-                WHERE source != ? AND me.title = ? AND source = ?
+                WHERE $where_string
                 GROUP BY (me.id / ?), owner.id
                 HAVING ?
                 ORDER BY ? / ?, ?
@@ -560,7 +619,7 @@ my $tests = {
             FETCH FIRST 4 ROWS ONLY
           ) me
         ORDER BY ORDER__BY__001, ORDER__BY__002
-      )',
+      )",
       [
         @select_bind,
         @order_bind,
@@ -571,7 +630,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT me.name, me.id
@@ -586,22 +645,26 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [],
     ],
   },
 
   Top => {
+    limit_plain => [
+      "( SELECT TOP 5 me.artistid FROM artist me )",
+      [],
+    ],
     limit => [
-      '(
+      "(
         SELECT TOP 4 me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -610,20 +673,20 @@ my $tests = {
       ],
     ],
     limit_offset => [
-      '(
+      "(
         SELECT TOP 4 me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT TOP 7 me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz
               FROM books me
               JOIN owners owner
                 ON owner.id = me.owner
-            WHERE source != ? AND me.title = ? AND source = ?
+            WHERE $where_string
             GROUP BY (me.id / ?), owner.id
             HAVING ?
             ORDER BY me.id
           ) me
         ORDER BY me.id DESC
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -632,16 +695,16 @@ my $tests = {
       ],
     ],
     ordered_limit => [
-      '(
+      "(
         SELECT TOP 4 me.id, owner.id, owner.name, ? * ?, ?
           FROM books me
           JOIN owners owner
             ON owner.id = me.owner
-        WHERE source != ? AND me.title = ? AND source = ?
+        WHERE $where_string
         GROUP BY (me.id / ?), owner.id
         HAVING ?
         ORDER BY ? / ?, ?
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -651,7 +714,7 @@ my $tests = {
       ],
     ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT TOP 4 me.id, owner__id, owner__name, bar, baz, ORDER__BY__001, ORDER__BY__002
@@ -660,7 +723,7 @@ my $tests = {
                   FROM books me
                   JOIN owners owner
                     ON owner.id = me.owner
-                WHERE source != ? AND me.title = ? AND source = ?
+                WHERE $where_string
                 GROUP BY (me.id / ?), owner.id
                 HAVING ?
                 ORDER BY ? / ?, ?
@@ -668,7 +731,7 @@ my $tests = {
             ORDER BY ORDER__BY__001 DESC, ORDER__BY__002 DESC
           ) me
         ORDER BY ORDER__BY__001, ORDER__BY__002
-      )',
+      )",
       [
         @select_bind,
         @order_bind,
@@ -679,7 +742,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT TOP 3 me.name, me.id
@@ -692,21 +755,40 @@ my $tests = {
           ) me
           LEFT JOIN books books
             ON books.owner = me.id
-      )',
+      )",
       [],
     ],
   },
 
   GenericSubQ => {
+    limit_plain => [
+      "(
+        SELECT me.artistid
+          FROM (
+            SELECT me.artistid
+              FROM artist me
+          ) me
+        WHERE
+          (
+            SELECT COUNT(*)
+              FROM artist rownum__emulation
+            WHERE rownum__emulation.artistid < me.artistid
+          ) < ?
+        ORDER BY me.artistid ASC
+      )",
+      [
+        [ { sqlt_datatype => 'integer' } => 5 ]
+      ],
+    ],
     ordered_limit => [
-      '(
+      "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz, me.price
               FROM books me
               JOIN owners owner
                 ON owner.id = me.owner
-            WHERE source != ? AND me.title = ? AND source = ?
+            WHERE $where_string
             GROUP BY (me.id / ?), owner.id
             HAVING ?
           ) me
@@ -735,7 +817,7 @@ my $tests = {
             )
           ) < ?
         ORDER BY me.price DESC, me.id ASC
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -745,14 +827,14 @@ my $tests = {
       ],
     ],
     ordered_limit_offset => [
-      '(
+      "(
         SELECT me.id, owner__id, owner__name, bar, baz
           FROM (
             SELECT me.id, owner.id AS owner__id, owner.name AS owner__name, ? * ? AS bar, ? AS baz, me.price
               FROM books me
               JOIN owners owner
                 ON owner.id = me.owner
-            WHERE source != ? AND me.title = ? AND source = ?
+            WHERE $where_string
             GROUP BY (me.id / ?), owner.id
             HAVING ?
           ) me
@@ -781,7 +863,7 @@ my $tests = {
             )
           ) BETWEEN ? AND ?
         ORDER BY me.price DESC, me.id ASC
-      )',
+      )",
       [
         @select_bind,
         @where_bind,
@@ -792,7 +874,7 @@ my $tests = {
       ],
     ],
     limit_offset_prefetch => [
-      '(
+      "(
         SELECT me.name, books.id, books.source, books.owner, books.title, books.price
           FROM (
             SELECT me.name, me.id
@@ -819,7 +901,7 @@ my $tests = {
           LEFT JOIN books books
             ON books.owner = me.id
         ORDER BY me.name ASC, me.id DESC
-      )',
+      )",
       [
         [ { sqlt_datatype => 'integer' } => 1 ],
         [ { sqlt_datatype => 'integer' } => 3 ],
@@ -835,7 +917,25 @@ for my $limtype (sort keys %$tests) {
   delete $schema->storage->_sql_maker->{_cached_syntax};
   $schema->storage->_sql_maker->limit_dialect ($limtype);
 
-  my $can_run = ($limtype eq $native_limit_dialect or $limtype eq 'GenericSubQ');
+  # do the simplest thing possible first
+  if ($tests->{$limtype}{limit_plain}) {
+    is_same_sql_bind(
+      $schema->resultset('Artist')->search(
+        [ -and => [ {}, [] ], -or => [ {}, [] ] ],
+        {
+          columns => 'artistid',
+          join => [ {}, [ [ {}, {} ] ], {} ],
+          prefetch => [ [ [ {}, [] ], {} ], {}, [ {} ] ],
+          order_by => ( $limtype eq 'GenericSubQ' ? 'artistid' : [] ),
+          group_by => [],
+          rows => 5,
+          offset => 0,
+        }
+      )->as_query,
+      @{$tests->{$limtype}{limit_plain}},
+      "$limtype: Plain unordered ungrouped select with limit and no offset",
+    )
+  }
 
   # chained search is necessary to exercise the recursive {where} parser
   my $rs = $schema->resultset('BooksInLibrary')->search(
@@ -855,6 +955,7 @@ for my $limtype (sort keys %$tests) {
   #
   # not all tests run on all dialects (somewhere impossible, somewhere makes no sense)
   #
+  my $can_run = ($limtype eq $native_limit_dialect or $limtype eq 'GenericSubQ');
 
   # only limit, no offset, no order
   if ($tests->{$limtype}{limit}) {
@@ -2,8 +2,7 @@ use strict;
 use warnings;
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 # the entire point of the subclass is that parenthesis have to be
 # just right for ACCESS to be happy
@@ -87,7 +86,7 @@ my ($sql, @bind) = $sa->select(
         { me => "cd" },
         [
             { "-join_type" => "LEFT", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
+            { "artist.artistid" => { -ident => "me.artist" } },
         ],
     ],
     [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
@@ -105,11 +104,11 @@ is_same_sql_bind(
         { me => "cd" },
         [
             { "-join_type" => "LEFT", track => "track" },
-            { "track.cd" => "me.cdid" },
+            { "track.cd" => { -ident => "me.cdid" } },
         ],
         [
             { artist => "artist" },
-            { "artist.artistid" => "me.artist" },
+            { "artist.artistid" => { -ident => "me.artist" } },
         ],
     ],
     [ 'track.title', 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
@@ -4,66 +4,49 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBICTest::Schema;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest::Schema->connect (DBICTest->_database, { quote_char => '`' });
 # cheat
 require DBIx::Class::Storage::DBI::mysql;
+*DBIx::Class::Storage::DBI::mysql::_get_server_version = sub { 5 };
 bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' );
 
 # check that double-subqueries are properly wrapped
 {
-  my ($sql, @bind);
-  my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
-  my $orig_debugobj = $schema->storage->debugobj;
-  my $orig_debug = $schema->storage->debug;
-
-  $schema->storage->debugobj ($debugobj);
-  $schema->storage->debug (1);
-
   # the expected SQL may seem wastefully nonsensical - this is due to
   # CD's tablename being \'cd', which triggers the "this can be anything"
   # mode, and forces a subquery. This in turn forces *another* subquery
   # because mysql is being mysql
   # Also we know it will fail - never deployed. All we care about is the
-  # SQL to compare
-  eval { $schema->resultset ('CD')->update({ genreid => undef }) };
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  # SQL to compare, hence the eval
+  $schema->is_executed_sql_bind( sub {
+    eval { $schema->resultset ('CD')->update({ genreid => undef }) }
+  },[[
     'UPDATE cd SET `genreid` = ? WHERE `cdid` IN ( SELECT * FROM ( SELECT `me`.`cdid` FROM cd `me` ) `_forced_double_subquery` )',
-    [ 'NULL' ],
-    'Correct update-SQL with double-wrapped subquery',
-  );
+    [ { dbic_colname => "genreid", sqlt_datatype => "integer" }  => undef ],
+  ]], 'Correct update-SQL with double-wrapped subquery' );
 
   # same comment as above
-  eval { $schema->resultset ('CD')->delete };
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    eval { $schema->resultset ('CD')->delete }
+  }, [[
     'DELETE FROM cd WHERE `cdid` IN ( SELECT * FROM ( SELECT `me`.`cdid` FROM cd `me` ) `_forced_double_subquery` )',
-    [],
-    'Correct delete-SQL with double-wrapped subquery',
-  );
+  ]], 'Correct delete-SQL with double-wrapped subquery' );
 
   # and a couple of really contrived examples (we test them live in t/71mysql.t)
   my $rs = $schema->resultset('Artist')->search({ name => { -like => 'baby_%' } });
   my ($count_sql, @count_bind) = @${$rs->count_rs->as_query};
-  eval {
-    $schema->resultset('Artist')->search(
-      { artistid => {
-        -in => $rs->get_column('artistid')
-                    ->as_query
-      } },
-    )->update({ name => \[ "CONCAT( `name`, '_bell_out_of_', $count_sql )", @count_bind ] });
-  };
-
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    eval {
+      $schema->resultset('Artist')->search(
+        { artistid => {
+          -in => $rs->get_column('artistid')
+                      ->as_query
+        } },
+      )->update({ name => \[ "CONCAT( `name`, '_bell_out_of_', $count_sql )", @count_bind ] });
+    }
+  }, [[
     q(
       UPDATE `artist`
         SET `name` = CONCAT(`name`, '_bell_out_of_', (
@@ -83,18 +66,18 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' );
               WHERE `name` LIKE ?
             ) `_forced_double_subquery` )
     ),
-    [ ("'baby_%'") x 2 ],
-  );
+    ( [ { dbic_colname => "name", sqlt_datatype => "varchar", sqlt_size => 100 }
+        => 'baby_%' ]
+    ) x 2
+  ]]);
 
-  eval {
-    $schema->resultset('CD')->search_related('artist',
-      { 'artist.name' => { -like => 'baby_with_%' } }
-    )->delete
-  };
-
-  is_same_sql_bind (
-    $sql,
-    \@bind,
+  $schema->is_executed_sql_bind( sub {
+    eval {
+      $schema->resultset('CD')->search_related('artist',
+        { 'artist.name' => { -like => 'baby_with_%' } }
+      )->delete
+    }
+  }, [[
     q(
       DELETE FROM `artist`
       WHERE `artistid` IN (
@@ -102,17 +85,15 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' );
           FROM (
             SELECT `artist`.`artistid`
               FROM cd `me`
-              INNER JOIN `artist` `artist`
+              JOIN `artist` `artist`
                 ON `artist`.`artistid` = `me`.`artist`
             WHERE `artist`.`name` LIKE ?
           ) `_forced_double_subquery`
       )
     ),
-    [ "'baby_with_%'" ],
-  );
-
-  $schema->storage->debugobj ($orig_debugobj);
-  $schema->storage->debug ($orig_debug);
+    [ { dbic_colname => "artist.name", sqlt_datatype => "varchar", sqlt_size => 100 }
+        => 'baby_with_%' ],
+  ]] );
 }
 
 # Test support for straight joins
@@ -138,4 +119,37 @@ bless ( $schema->storage, 'DBIx::Class::Storage::DBI::mysql' );
   );
 }
 
+# Test support for inner joins on mysql v3
+for (
+  [ 3 => 'INNER JOIN' ],
+  [ 4 => 'JOIN' ],
+) {
+  my ($ver, $join_op) = @$_;
+
+  # we do not care at this point if data is available, just do a reconnect cycle
+  # to clear the server version cache and then get a new maker
+  {
+    $schema->storage->disconnect;
+    $schema->storage->_sql_maker(undef);
+
+    no warnings 'redefine';
+    local *DBIx::Class::Storage::DBI::mysql::_get_server_version = sub { $ver };
+
+    $schema->storage->ensure_connected;
+    $schema->storage->sql_maker;
+  }
+
+  is_same_sql_bind (
+    $schema->resultset('CD')->search ({}, { prefetch => 'artist' })->as_query,
+    "(
+      SELECT `me`.`cdid`, `me`.`artist`, `me`.`title`, `me`.`year`, `me`.`genreid`, `me`.`single_track`,
+             `artist`.`artistid`, `artist`.`name`, `artist`.`rank`, `artist`.`charfield`
+        FROM cd `me`
+        $join_op `artist` `artist` ON `artist`.`artistid` = `me`.`artist`
+    )",
+    [],
+    "default join type works for version $ver",
+  );
+}
+
 done_testing;
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Warn;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -11,8 +11,7 @@ BEGIN {
 use Test::Exception;
 use Data::Dumper::Concise;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::Oracle;
 
 #
@@ -10,9 +10,8 @@ BEGIN {
 }
 
 use lib qw(t/lib);
-use DBICTest;
+use DBICTest ':DiffSQL';
 use DBIx::Class::SQLMaker::OracleJoins;
-use DBIC::SqlMakerTest;
 
 my $sa = DBIx::Class::SQLMaker::OracleJoins->new;
 
@@ -5,8 +5,7 @@ use Test::More;
 use Test::Exception;
 use Data::Dumper::Concise;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 sub test_order {
     my $rs = shift;
@@ -3,8 +3,7 @@ use warnings;
 use Test::More;
 
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema();
 
@@ -1,66 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-
-use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
-
-my $schema = DBICTest->init_schema();
-
-$schema->storage->sql_maker->quote_char('`');
-$schema->storage->sql_maker->name_sep('.');
-
-my ($sql, @bind);
-$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind));
-$schema->storage->debug(1);
-
-my $rs;
-
-$rs = $schema->resultset('CD')->search(
-           { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
-           { join => 'artist' });
-eval { $rs->count };
-is_same_sql_bind(
-  $sql, \@bind,
-  "SELECT COUNT( * ) FROM cd `me`  JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", ["'Caterwauler McCrae'", "'2001'"],
-  'got correct SQL for count query with quoting'
-);
-
-my $order = 'year DESC';
-$rs = $schema->resultset('CD')->search({},
-            { 'order_by' => $order });
-eval { $rs->first };
-like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)');
-
-$rs = $schema->resultset('CD')->search({},
-            { 'order_by' => \$order });
-eval { $rs->first };
-like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref');
-
-$schema->storage->sql_maker->quote_char([qw/[ ]/]);
-$schema->storage->sql_maker->name_sep('.');
-
-$rs = $schema->resultset('CD')->search(
-           { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
-           { join => 'artist' });
-eval { $rs->count };
-is_same_sql_bind(
-  $sql, \@bind,
-  "SELECT COUNT( * ) FROM cd [me]  JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", ["'Caterwauler McCrae'", "'2001'"],
-  'got correct SQL for count query with bracket quoting'
-);
-
-my %data = (
-       name => 'Bill',
-       order => '12'
-);
-
-$schema->storage->sql_maker->quote_char('`');
-$schema->storage->sql_maker->name_sep('.');
-
-is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
-
-done_testing;
@@ -1,83 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-
-use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
-use DBIC::DebugObj;
-
-my $schema = DBICTest->init_schema();
-
-my $dsn = $schema->storage->_dbi_connect_info->[0];
-$schema->connection(
-  $dsn,
-  undef,
-  undef,
-  { AutoCommit => 1 },
-  { quote_char => '`', name_sep => '.' },
-);
-
-my ($sql, @bind);
-$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)),
-$schema->storage->debug(1);
-
-my $rs;
-
-$rs = $schema->resultset('CD')->search(
-           { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
-           { join => 'artist' });
-eval { $rs->count };
-is_same_sql_bind(
-  $sql, \@bind,
-  "SELECT COUNT( * ) FROM cd `me`  JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )", ["'Caterwauler McCrae'", "'2001'"],
-  'got correct SQL for count query with quoting'
-);
-
-my $order = 'year DESC';
-$rs = $schema->resultset('CD')->search({},
-            { 'order_by' => $order });
-eval { $rs->first };
-like($sql, qr/ORDER BY `\Q${order}\E`/, 'quoted ORDER BY with DESC (should use a scalarref anyway)');
-
-$rs = $schema->resultset('CD')->search({},
-            { 'order_by' => \$order });
-eval { $rs->first };
-like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref');
-
-$schema->connection(
-  $dsn,
-  undef,
-  undef,
-  { AutoCommit => 1, quote_char => [qw/[ ]/], name_sep => '.' }
-);
-
-$schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)),
-$schema->storage->debug(1);
-
-$rs = $schema->resultset('CD')->search(
-           { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
-           { join => 'artist' });
-eval { $rs->count };
-is_same_sql_bind(
-  $sql, \@bind,
-  "SELECT COUNT( * ) FROM cd [me]  JOIN [artist] [artist] ON ( [artist].[artistid] = [me].[artist] ) WHERE ( [artist].[name] = ? AND [me].[year] = ? )", ["'Caterwauler McCrae'", "'2001'"],
-  'got correct SQL for count query with bracket quoting'
-);
-
-my %data = (
-       name => 'Bill',
-       order => '12'
-);
-
-$schema->connection(
-  $dsn,
-  undef,
-  undef,
-  { AutoCommit => 1, quote_char => '`', name_sep => '.' }
-);
-
-is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
-
-done_testing;
@@ -0,0 +1,67 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest ':DiffSQL';
+
+my $schema = DBICTest->init_schema( no_deploy => 1 );
+
+$schema->connection(
+  @{ $schema->storage->_dbi_connect_info },
+  { AutoCommit => 1, quote_char => [qw/[ ]/] }
+);
+
+my $rs =  $schema->resultset('CD')->search(
+  { 'me.year' => 2001, 'artist.name' => 'Caterwauler McCrae' },
+  { join => 'artist' }
+)->count_rs;
+
+my $expected_bind = [
+  [ { dbic_colname => "artist.name", sqlt_datatype => "varchar", sqlt_size => 100 }
+    => 'Caterwauler McCrae' ],
+  [ { dbic_colname => "me.year", sqlt_datatype => "varchar", sqlt_size => 100 }
+    => 2001 ],
+];
+
+is_same_sql_bind(
+  $rs->as_query,
+  "(SELECT COUNT( * ) FROM cd [me] JOIN [artist] [artist] ON [artist].[artistid] = [me].[artist] WHERE ( [artist].[name] = ? AND [me].[year] = ? ))",
+  $expected_bind,
+  'got correct SQL for count query with bracket quoting'
+);
+
+$schema->storage->sql_maker->quote_char('`');
+$schema->storage->sql_maker->name_sep('.');
+
+is_same_sql_bind (
+  $rs->as_query,
+  "(SELECT COUNT( * ) FROM cd `me`  JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? ))",
+  $expected_bind,
+  'got correct SQL for count query with mysql quoting'
+);
+
+# !!! talk to ribasushi *explicitly* before modfying these tests !!!
+{
+  is_same_sql_bind(
+    $schema->resultset('CD')->search({}, { order_by => 'year DESC', columns => 'cdid' })->as_query,
+    '(SELECT `me`.`cdid` FROM cd `me` ORDER BY `year DESC`)',
+    [],
+    'quoted ORDER BY with DESC (should use a scalarref anyway)'
+  );
+
+  is_same_sql_bind(
+    $schema->resultset('CD')->search({}, { order_by => \'year DESC', columns => 'cdid' })->as_query,
+    '(SELECT `me`.`cdid` FROM cd `me` ORDER BY year DESC)',
+    [],
+    'did not quote ORDER BY with scalarref',
+  );
+}
+
+is_same_sql(
+  scalar $schema->storage->sql_maker->update('group', { order => 12, name => 'Bill' }),
+  'UPDATE `group` SET `name` = ?, `order` = ?',
+  'quoted table names for UPDATE' );
+
+done_testing;
@@ -3,8 +3,7 @@ use warnings;
 
 use Test::More;
 use lib qw(t/lib);
-use DBICTest;
-use DBIC::SqlMakerTest;
+use DBICTest ':DiffSQL';
 
 my $schema = DBICTest->init_schema;
 
@@ -4,10 +4,10 @@ no warnings 'once';
 
 use Test::More;
 use Test::Exception;
+use Try::Tiny;
+use File::Spec;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj;
-use DBIC::SqlMakerTest;
 use Path::Class qw/file/;
 
 BEGIN { delete @ENV{qw(DBIC_TRACE DBIC_TRACE_PROFILE DBICTEST_SQLITE_USE_FILE)} }
@@ -19,6 +19,7 @@ unlink $lfn or die $!
   if -e $lfn;
 
 # make sure we are testing the vanilla debugger and not ::PrettyPrint
+require DBIx::Class::Storage::Statistics;
 $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new);
 
 ok ( $schema->storage->debug(1), 'debug' );
@@ -54,32 +55,121 @@ END {
 }
 
 open(STDERRCOPY, '>&STDERR');
-close(STDERR);
-dies_ok {
+
+my $exception_line_number;
+# STDERR will be closed, no T::B diag in blocks
+my $exception = try {
+  close(STDERR);
+  $exception_line_number = __LINE__ + 1;  # important for test, do not reformat
   $schema->resultset('CD')->search({})->count;
-} 'Died on closed FH';
+} catch {
+  $_
+} finally {
+  # restore STDERR
+  open(STDERR, '>&STDERRCOPY');
+};
+
+like $exception, qr/
+  \QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)\E
+    .+
+  \Qat @{[__FILE__]} line $exception_line_number\E$
+/xms;
+
+my @warnings;
+$exception = try {
+  local $SIG{__WARN__} = sub { push @warnings, @_ if $_[0] =~ /character/i };
+  close STDERR;
+  open(STDERR, '>', File::Spec->devnull) or die $!;
+  $schema->resultset('CD')->search({ title => "\x{1f4a9}" })->count;
+  '';
+} catch {
+  $_;
+} finally {
+  # restore STDERR
+  close STDERR;
+  open(STDERR, '>&STDERRCOPY');
+};
+
+die "How did that fail... $exception"
+  if $exception;
+
+is_deeply(\@warnings, [], 'No warnings with unicode on STDERR');
+
+# test debugcb and debugobj protocol
+{
+  my $rs = $schema->resultset('CD')->search( {
+    artist => 1,
+    cdid => { -between => [ 1, 3 ] },
+    title => { '!=' => \[ '?', undef ] }
+  });
+
+  my $sql_trace = 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( artist = ? AND ( cdid BETWEEN ? AND ? ) AND title != ? ) )';
+  my @bind_trace = qw( '1' '1' '3' NULL );  # quotes are in fact part of the trace </facepalm>
+
+
+  my @args;
+  $schema->storage->debugcb(sub { push @args, @_ } );
+
+  $rs->all;
+
+  is_deeply( \@args, [
+    "SELECT",
+    sprintf( "%s: %s\n", $sql_trace, join ', ', @bind_trace ),
+  ]);
 
-open(STDERR, '>&STDERRCOPY');
+  {
+    package DBICTest::DebugObj;
+    our @ISA = 'DBIx::Class::Storage::Statistics';
+
+    sub query_start {
+      my $self = shift;
+      ( $self->{_traced_sql}, @{$self->{_traced_bind}} ) = @_;
+    }
+  }
+
+  my $do = $schema->storage->debugobj(DBICTest::DebugObj->new);
+
+  $rs->all;
+
+  is( $do->{_traced_sql}, $sql_trace );
+
+  is_deeply ( $do->{_traced_bind}, \@bind_trace );
+}
 
-# test trace output correctness for bind params
+# recreate test as seen in DBIx::Class::QueryLog
+# the rationale is that if someone uses a non-IO::Handle object
+# on CPAN, many are *bound* to use one on darkpan. Thus this
+# test to ensure there is no future silent breakage
 {
-    my ($sql, @bind);
-    $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind));
-
-    my @cds = $schema->resultset('CD')->search( { artist => 1, cdid => { -between => [ 1, 3 ] }, } );
-    is_same_sql_bind(
-        $sql, \@bind,
-        "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) )",
-        [qw/'1' '1' '3'/],
-        'got correct SQL with all bind parameters (debugcb)'
-    );
-
-    @cds = $schema->resultset('CD')->search( { artist => 1, cdid => { -between => [ 1, 3 ] }, } );
-    is_same_sql_bind(
-        $sql, \@bind,
-        "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) )", ["'1'", "'1'", "'3'"],
-        'got correct SQL with all bind parameters (debugobj)'
-    );
+  my $output = "";
+
+  {
+    package DBICTest::_Printable;
+
+    sub print {
+      my ($self, @args) = @_;
+      $output .= join('', @args);
+    }
+  }
+
+  $schema->storage->debugobj(undef);
+  $schema->storage->debug(1);
+  $schema->storage->debugfh( bless {}, "DBICTest::_Printable" );
+  $schema->storage->txn_do( sub { $schema->resultset('Artist')->count } );
+
+  like (
+    $output,
+    qr/
+      \A
+      ^ \QBEGIN WORK\E \s*?
+      ^ \QSELECT COUNT( * ) FROM artist me:\E \s*?
+      ^ \QCOMMIT\E \s*?
+      \z
+    /xm
+  );
+
+  $schema->storage->debug(0);
+  $schema->storage->debugfh(undef);
 }
 
 done_testing;
@@ -22,7 +22,7 @@ local $ENV{DBI_DSN};
 # there ought to be more code like this in the wild
 like(
   DBICTest::Schema->connect->deployment_statements('SQLite'),
-  qr/\bCREATE TABLE\b/i
+  qr/\bCREATE TABLE artist\b/i  # ensure quoting *is* disabled
 );
 
 lives_ok( sub {
@@ -31,18 +31,42 @@ lives_ok( sub {
     $parse_schema->resultset("Artist")->all();
 }, 'artist table deployed correctly' );
 
-my $schema = DBICTest->init_schema();
+my $schema = DBICTest->init_schema( quote_names => 1 );
 
 my $var = dir ("t/var/ddl_dir-$$");
 $var->mkpath unless -d $var;
 
 my $test_dir_1 = $var->subdir ('test1', 'foo', 'bar' );
 $test_dir_1->rmtree if -d $test_dir_1;
-$schema->create_ddl_dir( undef, undef, $test_dir_1 );
+$schema->create_ddl_dir( [qw(SQLite MySQL)], 1, $test_dir_1 );
 
 ok( -d $test_dir_1, 'create_ddl_dir did a make_path on its target dir' );
 ok( scalar( glob $test_dir_1.'/*.sql' ), 'there are sql files in there' );
 
+my $less = $schema->clone;
+$less->unregister_source('BindType');
+$less->create_ddl_dir( [qw(SQLite MySQL)], 2, $test_dir_1, 1 );
+
+for (
+  [ SQLite => '"' ],
+  [ MySQL => '`' ],
+) {
+  my $type = $_->[0];
+  my $q = quotemeta($_->[1]);
+
+  for my $f (map { $test_dir_1->file("DBICTest-Schema-${_}-$type.sql") } qw(1 2) ) {
+    like scalar $f->slurp, qr/CREATE TABLE ${q}track${q}/, "Proper quoting in $f";
+  }
+
+  {
+    local $TODO = 'SQLT::Producer::MySQL has no knowledge of the mythical beast of quoting...'
+      if $type eq 'MySQL';
+
+    my $f = $test_dir_1->file("DBICTest-Schema-1-2-$type.sql");
+    like scalar $f->slurp, qr/DROP TABLE ${q}bindtype_test${q}/, "Proper quoting in diff $f";
+  }
+}
+
 {
   local $TODO = 'we should probably add some tests here for actual deployability of the DDL?';
   ok( 0 );
@@ -38,7 +38,7 @@ throws_ok (
 # exception fallback:
 
 SKIP: {
-  if (DBIx::Class::_ENV_::PEEPEENESS) {
+  if ( !!DBIx::Class::_ENV_::PEEPEENESS ) {
     skip "Your perl version $] appears to leak like a sieve - skipping garbage collected \$schema test", 1;
   }
 
@@ -4,17 +4,14 @@ use warnings;
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::DebugObj;
-use DBIC::SqlMakerTest;
-use DBI::Const::GetInfoType;
 
 { # Fake storage driver for SQLite + no bind variables
   package DBICTest::SQLite::NoBindVars;
-    use Class::C3;
-    use base qw/
-        DBIx::Class::Storage::DBI::NoBindVars
-        DBIx::Class::Storage::DBI::SQLite
-    /;
+  use base qw(
+    DBIx::Class::Storage::DBI::NoBindVars
+    DBIx::Class::Storage::DBI::SQLite
+  );
+  use mro 'c3';
 }
 
 my $schema = DBICTest->init_schema (storage_type => 'DBICTest::SQLite::NoBindVars', no_populate => 1);
@@ -35,26 +32,13 @@ my $it = $schema->resultset('Artist')->search( {},
 
 is( $it->count, 3, "LIMIT count ok" );  # ask for 3 rows out of 7 artists
 
-my ($sql, @bind);
-my $orig_debugobj = $schema->storage->debugobj;
-my $orig_debug = $schema->storage->debug;
-$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) );
-$schema->storage->debug (1);
-
-is( $it->next->name, "Artist 2", "iterator->next ok" );
-$it->next;
-$it->next;
-is( $it->next, undef, "next past end of resultset ok" );
-
-$schema->storage->debugobj ($orig_debugobj);
-$schema->storage->debug ($orig_debug);
-
-is_same_sql_bind (
-  $sql,
-  \@bind,
-  'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me ORDER BY artistid LIMIT 3 OFFSET 2',
-  [],
-  'Correctly interpolated SQL'
-);
+$schema->is_executed_sql_bind( sub {
+  is( $it->next->name, "Artist 2", "iterator->next ok" );
+  $it->next;
+  $it->next;
+  is( $it->next, undef, "next past end of resultset ok" );
+}, [
+  [ 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me ORDER BY artistid LIMIT 3 OFFSET 2' ],
+], 'Correctly interpolated SQL' );
 
 done_testing;
@@ -36,6 +36,11 @@ $schema->storage->disconnect;
 ok $schema->connection(
     sub { DBI->connect(DBICTest->_database, undef, undef, { AutoCommit => 0 }) },
     {
+        # DO NOT REMOVE - this seems like an unrelated piece of info,
+        # but is in fact a test for a bug where setting an accessor-via-option
+        # would trigger an early connect *bypassing* the on_connect_* pieces
+        cursor_class => 'DBIx::Class::Storage::Cursor',
+
         on_connect_do       => [
             'CREATE TABLE TEST_empty (id INTEGER)',
             [ 'INSERT INTO TEST_empty VALUES (?)', {}, 2 ],
@@ -4,7 +4,6 @@ use warnings;
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use DBIC::SqlMakerTest;
 
 my $ping_count = 0;
 
@@ -0,0 +1,33 @@
+use warnings;
+use strict;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+{
+  package # hideee
+    DBICTest::CrazyInt;
+
+  use overload
+    '0+' => sub { 666 },
+    '""' => sub { 999 },
+    fallback => 1,
+  ;
+}
+
+# check DBI behavior when fed a stringifiable/nummifiable value
+{
+  my $crazynum = bless {}, 'DBICTest::CrazyInt';
+  cmp_ok( $crazynum, '==', 666 );
+  cmp_ok( $crazynum, 'eq', 999 );
+
+  my $schema = DBICTest->init_schema( no_populate => 1 );
+  $schema->storage->dbh_do(sub {
+    $_[1]->do('INSERT INTO artist (name) VALUES (?)', {}, $crazynum );
+  });
+
+  is( $schema->resultset('Artist')->next->name, 999, 'DBI preferred stringified version' );
+}
+done_testing;
@@ -24,7 +24,6 @@ use Test::Exception;
 use List::Util 'first';
 use Scalar::Util 'reftype';
 use File::Spec;
-use IO::Handle;
 use Moose();
 use MooseX::Types();
 note "Using Moose version $Moose::VERSION and MooseX::Types version $MooseX::Types::VERSION";
@@ -0,0 +1,240 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+{
+  package # moar hide
+    DBICTest::SVPTracerObj;
+
+  use base 'DBIx::Class::Storage::Statistics';
+
+  sub query_start { 'do notning'}
+  sub callback { 'dummy '}
+
+  for my $svpcall (map { "svp_$_" } qw(begin rollback release)) {
+    no strict 'refs';
+    *$svpcall = sub { $_[0]{uc $svpcall}++ };
+  }
+}
+
+my $env2optdep = {
+  DBICTEST_PG => 'test_rdbms_pg',
+  DBICTEST_MYSQL => 'test_rdbms_mysql',
+};
+
+my $schema;
+
+for ('', keys %$env2optdep) { SKIP: {
+
+  my $prefix;
+
+  if ($prefix = $_) {
+    my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
+
+    skip ("Skipping tests with $prefix: set \$ENV{${prefix}_DSN} _USER and _PASS", 1)
+      unless $dsn;
+
+    skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
+      unless  DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
+
+    $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 });
+
+    my $create_sql;
+    $schema->storage->ensure_connected;
+    if ($schema->storage->isa('DBIx::Class::Storage::DBI::Pg')) {
+      $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))";
+      $schema->storage->dbh->do('SET client_min_messages=WARNING');
+    }
+    elsif ($schema->storage->isa('DBIx::Class::Storage::DBI::mysql')) {
+      $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB";
+    }
+    else {
+      skip( 'Untested driver ' . $schema->storage, 1 );
+    }
+
+    $schema->storage->dbh_do (sub {
+      $_[1]->do('DROP TABLE IF EXISTS artist');
+      $_[1]->do($create_sql);
+    });
+  }
+  else {
+    $prefix = 'SQLite Internal DB';
+    $schema = DBICTest->init_schema( no_populate => 1, auto_savepoint => 1 );
+  }
+
+  note "Testing $prefix";
+
+  local $schema->storage->{debugobj} = my $stats = DBICTest::SVPTracerObj->new;
+  local $schema->storage->{debug} = 1;
+
+  $schema->resultset('Artist')->create({ name => 'foo' });
+
+  $schema->txn_begin;
+
+  my $arty = $schema->resultset('Artist')->find(1);
+
+  my $name = $arty->name;
+
+  # First off, test a generated savepoint name
+  $schema->svp_begin;
+
+  cmp_ok($stats->{'SVP_BEGIN'}, '==', 1, 'Statistics svp_begin tickled');
+
+  $arty->update({ name => 'Jheephizzy' });
+
+  $arty->discard_changes;
+
+  cmp_ok($arty->name, 'eq', 'Jheephizzy', 'Name changed');
+
+  # Rollback the generated name
+  # Active: 0
+  $schema->svp_rollback;
+
+  cmp_ok($stats->{'SVP_ROLLBACK'}, '==', 1, 'Statistics svp_rollback tickled');
+
+  $arty->discard_changes;
+
+  cmp_ok($arty->name, 'eq', $name, 'Name rolled back');
+
+  $arty->update({ name => 'Jheephizzy'});
+
+  # Active: 0 1
+  $schema->svp_begin('testing1');
+
+  $arty->update({ name => 'yourmom' });
+
+  # Active: 0 1 2
+  $schema->svp_begin('testing2');
+
+  $arty->update({ name => 'gphat' });
+  $arty->discard_changes;
+  cmp_ok($arty->name, 'eq', 'gphat', 'name changed');
+
+  # Active: 0 1 2
+  # Rollback doesn't DESTROY the savepoint, it just rolls back to the value
+  # at its conception
+  $schema->svp_rollback('testing2');
+  $arty->discard_changes;
+  cmp_ok($arty->name, 'eq', 'yourmom', 'testing2 reverted');
+
+  # Active: 0 1 2 3
+  $schema->svp_begin('testing3');
+  $arty->update({ name => 'coryg' });
+
+  # Active: 0 1 2 3 4
+  $schema->svp_begin('testing4');
+  $arty->update({ name => 'watson' });
+
+  # Release 3, which implicitly releases 4
+  # Active: 0 1 2
+  $schema->svp_release('testing3');
+
+  $arty->discard_changes;
+  cmp_ok($arty->name, 'eq', 'watson', 'release left data');
+
+  # This rolls back savepoint 2
+  # Active: 0 1 2
+  $schema->svp_rollback;
+
+  $arty->discard_changes;
+  cmp_ok($arty->name, 'eq', 'yourmom', 'rolled back to 2');
+
+  # Rollback the original savepoint, taking us back to the beginning, implicitly
+  # rolling back savepoint 1 and 2
+  $schema->svp_rollback('savepoint_0');
+  $arty->discard_changes;
+  cmp_ok($arty->name, 'eq', 'foo', 'rolled back to start');
+
+  $schema->txn_commit;
+
+  is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' );
+
+  # And now to see if txn_do will behave correctly
+  $schema->txn_do (sub {
+    my $artycp = $arty;
+
+    $schema->txn_do (sub {
+      $artycp->name ('Muff');
+      $artycp->update;
+    });
+
+    eval {
+      $schema->txn_do (sub {
+        $artycp->name ('Moff');
+        $artycp->update;
+        $artycp->discard_changes;
+        is($artycp->name,'Moff','Value updated in nested transaction');
+        $schema->storage->dbh->do ("GUARANTEED TO PHAIL");
+      });
+    };
+
+    ok ($@,'Nested transaction failed (good)');
+
+    $arty->discard_changes;
+
+    is($arty->name,'Muff','auto_savepoint rollback worked');
+
+    $arty->name ('Miff');
+
+    $arty->update;
+  });
+
+  is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' );
+
+  $arty->discard_changes;
+
+  is($arty->name,'Miff','auto_savepoint worked');
+
+  cmp_ok($stats->{'SVP_BEGIN'},'==',7,'Correct number of savepoints created');
+
+  cmp_ok($stats->{'SVP_RELEASE'},'==',3,'Correct number of savepoints released');
+
+  cmp_ok($stats->{'SVP_ROLLBACK'},'==',5,'Correct number of savepoint rollbacks');
+
+### test originally written for SQLite exclusively (git blame -w -C -M)
+  # test two-phase commit and inner transaction rollback from nested transactions
+  my $ars = $schema->resultset('Artist');
+
+  $schema->txn_do(sub {
+    $ars->create({ name => 'in_outer_transaction' });
+    $schema->txn_do(sub {
+      $ars->create({ name => 'in_inner_transaction' });
+    });
+    ok($ars->search({ name => 'in_inner_transaction' })->first,
+      'commit from inner transaction visible in outer transaction');
+    throws_ok {
+      $schema->txn_do(sub {
+        $ars->create({ name => 'in_inner_transaction_rolling_back' });
+        die 'rolling back inner transaction';
+      });
+    } qr/rolling back inner transaction/, 'inner transaction rollback executed';
+    $ars->create({ name => 'in_outer_transaction2' });
+  });
+
+  is_deeply( $schema->storage->savepoints, [], 'All savepoints forgotten' );
+
+  ok($ars->search({ name => 'in_outer_transaction' })->first,
+    'commit from outer transaction');
+  ok($ars->search({ name => 'in_outer_transaction2' })->first,
+    'second commit from outer transaction');
+  ok($ars->search({ name => 'in_inner_transaction' })->first,
+    'commit from inner transaction');
+  is $ars->search({ name => 'in_inner_transaction_rolling_back' })->first,
+    undef,
+    'rollback from inner transaction';
+
+### cleanupz
+  $schema->storage->dbh->do ("DROP TABLE artist");
+}}
+
+done_testing;
+
+END {
+  eval { $schema->storage->dbh->do ("DROP TABLE artist") } if defined $schema;
+  undef $schema;
+}
@@ -407,4 +407,41 @@ warnings_are {
 
 } [], 'No warnings on AutoCommit => 0 with txn_do';
 
+
+# make sure we are not fucking up the stacktrace on broken overloads
+{
+  package DBICTest::BrokenOverload;
+
+  use overload '""' => sub { $_[0] };
+}
+
+{
+  my @w;
+  local $SIG{__WARN__} = sub {
+    $_[0] =~ /\QExternal exception class DBICTest::BrokenOverload implements partial (broken) overloading preventing its instances from being used in simple (\E\$x eq \$y\Q) comparisons/
+      ? push @w, @_
+      : warn @_
+  };
+
+  my $s = DBICTest->init_schema(no_deploy => 1);
+  $s->stacktrace(0);
+  my $g = $s->storage->txn_scope_guard;
+  my $broken_exception = bless {}, 'DBICTest::BrokenOverload';
+
+  # FIXME - investigate what confuses the regex engine below
+
+  # do not reformat - line-num part of the test
+  my $ln = __LINE__ + 6;
+  throws_ok {
+    $s->txn_do( sub {
+      $s->txn_do( sub {
+        $s->storage->_dbh->disconnect;
+        die $broken_exception
+      });
+    })
+  } qr/\QTransaction aborted: $broken_exception. Rollback failed: lost connection to storage at @{[__FILE__]} line $ln\E\n/;  # FIXME wtf - ...\E$/m doesn't work here
+
+  is @w, 1, 'One matching warning only';
+}
+
 done_testing;
@@ -199,7 +199,7 @@ for my $post_poison (0,1) {
 
   my @w;
   local $SIG{__WARN__} = sub {
-    $_[0] =~ /External exception object .+? \Qimplements partial (broken) overloading/
+    $_[0] =~ /External exception class .+? \Qimplements partial (broken) overloading/
       ? push @w, @_
       : warn @_
   };
@@ -4,10 +4,10 @@ use warnings;
 use Test::More;
 
 use lib 't/lib';
-use DBICTest::RunMode;
-
-if ( DBICTest::RunMode->is_plain ) {
-  plan( skip_all => "Skipping test on plain module install" );
+BEGIN {
+  require DBICTest::RunMode;
+  plan( skip_all => "Skipping test on plain module install" )
+    if DBICTest::RunMode->is_plain;
 }
 
 use Test::Exception;
@@ -0,0 +1,83 @@
+use warnings;
+use strict;
+
+use Test::More;
+use Config;
+use File::Spec;
+
+my @known_authors = do {
+  # according to #p5p this is how one safely reads random unicode
+  # this set of boilerplate is insane... wasn't perl unicode-king...?
+  no warnings 'once';
+  require Encode;
+  require PerlIO::encoding;
+  local $PerlIO::encoding::fallback = Encode::FB_CROAK();
+
+  open (my $fh, '<:encoding(UTF-8)', 'AUTHORS') or die "Unable to open AUTHORS - can't happen: $!\n";
+  map { chomp; ( ( ! $_ or $_ =~ /^\s*\#/ ) ? () : $_ ) } <$fh>;
+
+} or die "Known AUTHORS file seems empty... can't happen...";
+
+is_deeply (
+  [ grep { /^\s/ or /\s\s/ } @known_authors ],
+  [],
+  "No entries with leading or doubled space",
+);
+
+is_deeply (
+  [ grep { / \:[^\s\/] /x or /^ [^:]*? \s+ \: /x } @known_authors ],
+  [],
+  "No entries with malformed nicks",
+);
+
+is_deeply (
+  \@known_authors,
+  [ sort { lc $a cmp lc $b } @known_authors ],
+  'Author list is case-insensitively sorted'
+);
+
+my $email_re = qr/( \< [^\<\>]+ \> ) $/x;
+
+my (%known_authors, $count);
+for (@known_authors) {
+  my ($name_email) = m/ ^ (?: [^\:]+ \: \s )? (.+) /x;
+  my ($email) = $name_email =~ $email_re;
+
+  if (
+    $known_authors{$name_email}++
+      or
+    ( $email and $known_authors{$email}++ )
+  ) {
+    fail "Duplicate found: $name_email";
+  }
+  else {
+    $count++;
+  }
+}
+
+# do not announce anything under travis - we are watching for STDERR silence
+diag "\n\n$count contributors made this library what it is today\n\n"
+  unless ($ENV{TRAVIS}||'') eq 'true';
+
+# augh taint mode
+if (length $ENV{PATH}) {
+  ( $ENV{PATH} ) = join ( $Config{path_sep},
+    map { length($_) ? File::Spec->rel2abs($_) : () }
+      split /\Q$Config{path_sep}/, $ENV{PATH}
+  ) =~ /\A(.+)\z/;
+}
+
+# this may fail - not every system has git
+if (my @git_authors = map
+  { my ($gitname) = m/^ \s* \d+ \s* (.+?) \s* $/mx; utf8::decode($gitname); $gitname }
+  qx( git shortlog -e -s )
+) {
+  for (@git_authors) {
+    my ($eml) = $_ =~ $email_re;
+
+    fail "Commit author '$_' (from git) not reflected in AUTHORS, perhaps a missing .mailmap entry?"
+      unless $known_authors{$eml};
+  }
+}
+
+done_testing;
@@ -0,0 +1,51 @@
+use warnings;
+use strict;
+
+use Test::More;
+use File::Find;
+
+my $boilerplate_headings = q{
+=head1 FURTHER QUESTIONS?
+
+Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
+
+=head1 COPYRIGHT AND LICENSE
+
+This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
+by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
+redistribute it and/or modify it under the same terms as the
+L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
+};
+
+find({
+  wanted => sub {
+    my $fn = $_;
+
+    return unless -f $fn;
+    return unless $fn =~ / \. (?: pm | pod ) $ /ix;
+
+    my $data = do { local (@ARGV, $/) = $fn; <> };
+
+    if ($data !~ /^=head1 NAME/m) {
+
+      # the generator is full of false positives, .pod is where it's at
+      return if $fn =~ qr{\Qlib/DBIx/Class/Optional/Dependencies.pm};
+
+      ok ( $data !~ /\bcopyright\b/i, "No copyright notices in $fn without apparent POD" );
+    }
+    elsif ($fn =~ qr{\Qlib/DBIx/Class.}) {
+      # nothing to check there - a static set of words
+    }
+    else {
+      ok ( $data !~ / ^ =head1 \s $_ /xmi, "No standalone $_ headings in $fn" )
+        for qw(AUTHOR CONTRIBUTOR LICENSE LICENCE);
+
+      ok ( $data !~ / ^ =head1 \s COPYRIGHT \s (?! AND \s LICENSE )/xmi, "No standalone COPYRIGHT headings in $fn" );
+
+      ok ($data =~ / \Q$boilerplate_headings\E (?! .*? ^ =head )/xms, "Expected headings found at the end of $fn");
+    }
+  },
+  no_chdir => 1,
+}, (qw(lib examples)) );
+
+done_testing;
@@ -119,7 +119,6 @@ is_deeply(
 is_deeply(
   DBIx::Class::Optional::Dependencies->req_list_for('test_rdbms_pg'),
   {
-    $^O ne 'MSWin32' ? ('Sys::SigAction' => '0') : (),
     'DBD::Pg'        => '2.009002',
   }, 'optional dependencies for testing Postgres with ENV var ok');
 
@@ -57,6 +57,7 @@ my $exceptions = {
             store_column
             get_column
             get_columns
+            has_column_loaded
         /],
     },
     'DBIx::Class::ResultSource' => {
@@ -0,0 +1,48 @@
+use warnings;
+use strict;
+
+use Test::More;
+use Test::Warn;
+
+use DBIx::Class::_Util 'quote_sub';
+
+my $q = do {
+  no strict 'vars';
+  quote_sub '$x = $x . "buh"; $x += 42';
+};
+
+warnings_exist {
+  is $q->(), 42, 'Expected result after uninit and string/num conversion'
+} [
+  qr/Use of uninitialized value/i,
+  qr/isn't numeric in addition/,
+], 'Expected warnings, strict did not leak inside the qsub'
+  or do {
+    require B::Deparse;
+    diag( B::Deparse->new->coderef2text( Sub::Quote::unquote_sub($q) ) )
+  }
+;
+
+my $no_nothing_q = do {
+  no strict;
+  no warnings;
+  quote_sub <<'EOC';
+    BEGIN { warn "-->${^WARNING_BITS}<--\n" };
+    my $n = "Test::Warn::warnings_exist";
+    warn "-->@{[ *{$n}{CODE} ]}<--\n";
+EOC
+};
+
+my $we_cref = Test::Warn->can('warnings_exist');
+
+warnings_exist { $no_nothing_q->() } [
+  qr/^\-\-\>\0+\<\-\-$/m,
+  qr/^\Q-->$we_cref<--\E$/m,
+], 'Expected warnings, strict did not leak inside the qsub'
+  or do {
+    require B::Deparse;
+    diag( B::Deparse->new->coderef2text( Sub::Quote::unquote_sub($no_nothing_q) ) )
+  }
+;
+
+done_testing;
@@ -12,9 +12,18 @@ unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_strictures') ) {
     : plan skip_all => "Test needs: $missing"
 }
 
-
 use File::Find;
 
+# The rationale is - if we can load all our optdeps
+# that are related to lib/ - then we should be able to run
+# perl -c checks (via syntax_ok), and all should just work
+my $missing_groupdeps_present = grep
+  { DBIx::Class::Optional::Dependencies->req_ok_for($_) }
+  grep
+    { $_ !~ /^ (?: test | rdbms | dist ) _ /x }
+    keys %{DBIx::Class::Optional::Dependencies->req_group_list}
+;
+
 find({
   wanted => sub {
     -f $_ or return;
@@ -31,7 +40,8 @@ find({
     Test::Strict::strict_ok($f);
     Test::Strict::warnings_ok($f);
 
-    #Test::Strict::syntax_ok($f) if $f =~ /^ (?: lib  )/x;
+    Test::Strict::syntax_ok($f)
+      if ! $missing_groupdeps_present and $f =~ /^ (?: lib  )/x;
   },
   no_chdir => 1,
 }, (qw(lib t examples maint)) );