New upstream version 0.082842
gregor herrmann
3 years ago
10 | 10 | |
11 | 11 | abraxxa: Alexander Hartmaier <abraxxa@cpan.org> |
12 | 12 | acca: Alexander Kuznetsov <acca@cpan.org> |
13 | acme: Leon Brocard <acme@astray.com> | |
13 | 14 | aherzog: Adam Herzog <adam@herzogdesigns.com> |
14 | 15 | Alexander Keusch <cpan@keusch.at> |
15 | 16 | alexrj: Alessandro Ranellucci <aar@cpan.org> |
211 | 212 | willert: Sebastian Willert <willert@cpan.org> |
212 | 213 | wintermute: Toby Corkindale <tjc@cpan.org> |
213 | 214 | wreis: Wallace Reis <wreis@cpan.org> |
215 | x86-64 <x86mail@gmail.com> | |
214 | 216 | xenoterracide: Caleb Cushing <xenoterracide@gmail.com> |
215 | 217 | xmikew: Mike Wisener <xmikew@32ths.com> |
216 | 218 | yrlnry: Mark Jason Dominus <mjd@plover.com> |
0 | 0 | Revision history for DBIx::Class |
1 | 1 | |
2 | 0.082842 2020-06-16 20:10 (UTC) | |
3 | * New Features | |
4 | - An on_connect rebase_sqlmaker call allowing experimentation with | |
5 | non-core SQL generators on a per-$schema-instance basis | |
6 | https://is.gd/DBIC_rebase_sqlmaker | |
7 | - Automatically detect and use multi-column IN on recent versions of | |
8 | libsqlite: ... WHERE ( foo, bar ) IN ( SELECT foo, bar FROM ... ) | |
9 | ||
10 | * Fixes | |
11 | - Fix silent failure to retrieve a primary key (RT#80283) or worse: | |
12 | returning an incorrect value (RT#115381) in case a rdbms-side autoinc | |
13 | column is declared as PK with the is_auto_increment attribute unset | |
14 | - Fix overly-aggressive condition unrolling, corrupting custom ops with | |
15 | array arguments (RT#132390) | |
16 | - Fix docs on how to properly use Moo(se) in ResultSet's, and fix a | |
17 | corner case of ->count not functioning properly when the old recipe | |
18 | was being used (GH#105) | |
19 | - Fix incorrect ::Storage->_ping() behavior under Sybase (RT#114214) | |
20 | - Work around breakage in Hash::Merge by soft-requiring Clone as part | |
21 | of the replicated subsystem (RT#124321) | |
22 | ||
23 | * Misc | |
24 | - DBIC_TRACE_PROFILE=... now uses a ::Storage::Statistics subclass | |
25 | DBIx::Class::Storage::Debug::PrettyTrace which properly ships as a | |
26 | part of this distrinbution | |
27 | - Switch out SQL::Abstract dependency with a slower moving dist | |
28 | - Remove Data::Page dependency by inlining its entirety into the core | |
29 | DBIx::Class::ResultSet::Pager (RT#130686) | |
2 | 30 | |
3 | 31 | 0.082841 2018-01-29 08:10 (UTC) |
4 | 32 | * Test-suite fixup changes only - no reason to upgrade, wait for 0.082850 |
0 | DBIx::Class is Copyright (c) 2005-2018 by mst, castaway, ribasushi, and others. | |
0 | DBIx::Class is Copyright (c) 2005-2020 by mst, castaway, ribasushi, and others. | |
1 | 1 | See AUTHORS and LICENSE included with this distribution. All rights reserved. |
2 | 2 | |
3 | 3 | This is free software; you can redistribute it and/or modify it under the |
135 | 135 | lib/DBIx/Class/SQLAHacks/OracleJoins.pm |
136 | 136 | lib/DBIx/Class/SQLAHacks/SQLite.pm |
137 | 137 | lib/DBIx/Class/SQLMaker.pm |
138 | lib/DBIx/Class/SQLMaker.pod | |
139 | 138 | lib/DBIx/Class/SQLMaker/ACCESS.pm |
139 | lib/DBIx/Class/SQLMaker/ClassicExtensions.pm | |
140 | 140 | lib/DBIx/Class/SQLMaker/LimitDialects.pm |
141 | 141 | lib/DBIx/Class/SQLMaker/MSSQL.pm |
142 | 142 | lib/DBIx/Class/SQLMaker/MySQL.pm |
143 | 143 | lib/DBIx/Class/SQLMaker/Oracle.pm |
144 | 144 | lib/DBIx/Class/SQLMaker/OracleJoins.pm |
145 | lib/DBIx/Class/SQLMaker/OracleJoins.pod | |
146 | 145 | lib/DBIx/Class/SQLMaker/SQLite.pm |
147 | 146 | lib/DBIx/Class/StartupCheck.pm |
148 | 147 | lib/DBIx/Class/Storage.pm |
197 | 196 | lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm |
198 | 197 | lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm |
199 | 198 | lib/DBIx/Class/Storage/DBIHacks.pm |
199 | lib/DBIx/Class/Storage/Debug/PrettyTrace.pm | |
200 | 200 | lib/DBIx/Class/Storage/Statistics.pm |
201 | 201 | lib/DBIx/Class/Storage/TxnScopeGuard.pm |
202 | 202 | lib/DBIx/Class/UTF8Columns.pm |
220 | 220 | maint/Makefile.PL.inc/56_autogen_schema_files.pl |
221 | 221 | maint/Makefile.PL.inc/61_inject_dbicadmin_pod.pl |
222 | 222 | maint/Makefile.PL.inc/91_inc_sanity_check.pl |
223 | maint/poisonsmoke.bash | |
224 | maint/travis_buildlog_downloader | |
223 | 225 | Makefile.PL |
224 | 226 | MANIFEST This list of files |
225 | 227 | META.yml |
268 | 270 | t/63register_source.t |
269 | 271 | t/64db.t |
270 | 272 | t/65multipk.t |
271 | t/67pager.t | |
272 | 273 | t/69update.t |
273 | 274 | t/70auto.t |
274 | 275 | t/71mysql.t |
388 | 389 | t/count/joined.t |
389 | 390 | t/count/prefetch.t |
390 | 391 | t/count/search_related.t |
392 | t/debug/bulk-insert.t | |
393 | t/debug/core.t | |
394 | t/debug/no-repeats.t | |
395 | t/debug/pretty.t | |
396 | t/debug/show-progress.t | |
391 | 397 | t/delete/cascade_missing.t |
392 | 398 | t/delete/complex.t |
393 | 399 | t/delete/m2m.t |
506 | 512 | t/lib/DBICTest/Schema/VaryingMAX.pm |
507 | 513 | t/lib/DBICTest/Schema/Year1999CDs.pm |
508 | 514 | t/lib/DBICTest/Schema/Year2000CDs.pm |
515 | t/lib/DBICTest/SQLMRebase.pm | |
509 | 516 | t/lib/DBICTest/SQLTracerObj.pm |
510 | 517 | t/lib/DBICTest/SyntaxErrorComponent1.pm |
511 | 518 | t/lib/DBICTest/SyntaxErrorComponent2.pm |
561 | 568 | t/multi_create/torture.t |
562 | 569 | t/ordered/cascade_delete.t |
563 | 570 | t/ordered/unordered_movement.t |
571 | t/pager/data_page_compat/constructor.t | |
572 | t/pager/data_page_compat/simple.t | |
573 | t/pager/dbic_core.t | |
564 | 574 | t/prefetch/attrs_untouched.t |
565 | 575 | t/prefetch/correlated.t |
566 | 576 | t/prefetch/count.t |
659 | 669 | t/sqlmaker/order_by_func.t |
660 | 670 | t/sqlmaker/pg.t |
661 | 671 | t/sqlmaker/quotes.t |
672 | t/sqlmaker/rebase.t | |
662 | 673 | t/sqlmaker/sqlite.t |
663 | 674 | t/storage/base.t |
664 | 675 | t/storage/cursor.t |
665 | 676 | t/storage/dbh_do.t |
666 | 677 | t/storage/dbi_coderef.t |
667 | 678 | t/storage/dbi_env.t |
668 | t/storage/dbic_pretty.t | |
669 | t/storage/debug.t | |
670 | 679 | t/storage/deploy.t |
671 | 680 | t/storage/deprecated_exception_source_bind_attrs.t |
672 | 681 | t/storage/disable_sth_caching.t |
36 | 36 | - DBIx::Class::Storage::BlockRunner |
37 | 37 | - DBIx::Class::Carp |
38 | 38 | - DBIx::Class::_Util |
39 | - DBIx::Class::ResultSet::Pager | |
40 | 39 | requires: |
41 | 40 | Class::Accessor::Grouped: 0.10012 |
42 | 41 | Class::C3::Componentised: 1.0009 |
45 | 44 | Context::Preserve: 0.01 |
46 | 45 | DBI: 1.57 |
47 | 46 | Data::Dumper::Concise: 2.020 |
48 | Data::Page: 2.00 | |
49 | 47 | Devel::GlobalDestruction: 0.09 |
50 | 48 | Hash::Merge: 0.12 |
51 | List::Util: 1.16 | |
52 | 49 | MRO::Compat: 0.12 |
53 | 50 | Module::Find: 0.07 |
54 | 51 | Moo: 2.000 |
55 | 52 | Path::Class: 0.18 |
56 | SQL::Abstract: 1.81 | |
53 | SQL::Abstract::Classic: 1.91 | |
57 | 54 | Scope::Guard: 0.03 |
58 | 55 | Sub::Name: 0.04 |
59 | 56 | Text::Balanced: 2.00 |
61 | 58 | namespace::clean: 0.24 |
62 | 59 | perl: 5.8.1 |
63 | 60 | resources: |
64 | IRC: irc://irc.perl.org/#dbix-class | |
65 | MailingList: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class | |
66 | bugtracker: http://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class | |
61 | bugtracker: https://rt.cpan.org/Public/Dist/Display.html?Name=DBIx-Class | |
67 | 62 | license: http://dev.perl.org/licenses/ |
68 | 63 | repository: https://github.com/Perl5/DBIx-Class |
69 | version: 0.082841 | |
64 | version: 0.082842 | |
70 | 65 | x_authority: cpan:RIBASUSHI |
71 | 66 | x_contributors: |
72 | 67 | - 'abraxxa: Alexander Hartmaier <abraxxa@cpan.org>' |
73 | 68 | - 'acca: Alexander Kuznetsov <acca@cpan.org>' |
69 | - 'acme: Leon Brocard <acme@astray.com>' | |
74 | 70 | - 'aherzog: Adam Herzog <adam@herzogdesigns.com>' |
75 | 71 | - 'Alexander Keusch <cpan@keusch.at>' |
76 | 72 | - 'alexrj: Alessandro Ranellucci <aar@cpan.org>' |
272 | 268 | - 'willert: Sebastian Willert <willert@cpan.org>' |
273 | 269 | - 'wintermute: Toby Corkindale <tjc@cpan.org>' |
274 | 270 | - 'wreis: Wallace Reis <wreis@cpan.org>' |
271 | - 'x86-64 <x86mail@gmail.com>' | |
275 | 272 | - 'xenoterracide: Caleb Cushing <xenoterracide@gmail.com>' |
276 | 273 | - 'xmikew: Mike Wisener <xmikew@32ths.com>' |
277 | 274 | - 'yrlnry: Mark Jason Dominus <mjd@plover.com>' |
56 | 56 | ### |
57 | 57 | 'DBI' => '1.57', |
58 | 58 | |
59 | # on older versions first() leaks | |
60 | # for the time being make it a hard dep - when we get | |
61 | # rid of Sub::Name will revisit this (possibility is | |
62 | # to use Devel::HideXS to force the pure-perl version | |
63 | # or something like that) | |
64 | 'List::Util' => '1.16', | |
65 | ||
66 | 59 | # XS (or XS-dependent) libs |
67 | 60 | 'Sub::Name' => '0.04', |
68 | 61 | |
73 | 66 | 'Config::Any' => '0.20', |
74 | 67 | 'Context::Preserve' => '0.01', |
75 | 68 | 'Data::Dumper::Concise' => '2.020', |
76 | 'Data::Page' => '2.00', | |
77 | 69 | 'Devel::GlobalDestruction' => '0.09', |
78 | 70 | 'Hash::Merge' => '0.12', |
79 | 71 | 'Moo' => '2.000', |
82 | 74 | 'namespace::clean' => '0.24', |
83 | 75 | 'Path::Class' => '0.18', |
84 | 76 | 'Scope::Guard' => '0.03', |
85 | 'SQL::Abstract' => '1.81', | |
77 | 'SQL::Abstract::Classic' => '1.91', | |
86 | 78 | 'Try::Tiny' => '0.07', |
87 | 79 | |
88 | 80 | # Technically this is not a core dependency - it is only required |
144 | 136 | ( |
145 | 137 | ( $ENV{TRAVIS}||'' ) eq 'true' |
146 | 138 | and |
147 | ($ENV{TRAVIS_REPO_SLUG}||'') =~ m|\w+/dbix-class$| | |
139 | ($ENV{TRAVIS_REPO_SLUG}||'') =~ m|\w+/DBIx-Class$| | |
148 | 140 | ) |
149 | 141 | or |
150 | 142 | ( $ENV{AUTOMATED_TESTING} and ! $ENV{PERL5_CPANM_IS_RUNNING} and ! $ENV{RELEASE_TESTING} ) |
0 | DBIx::Class is Copyright (c) 2005-2018 by mst, castaway, ribasushi, and others. | |
0 | DBIx::Class is Copyright (c) 2005-2020 by mst, castaway, ribasushi, and others. | |
1 | 1 | See AUTHORS and LICENSE included with this distribution. All rights reserved. |
2 | 2 | |
3 | 3 | NAME |
16 | 16 | particular approach do not hesitate to contact us via any of the |
17 | 17 | following options (the list is sorted by "fastest response time"): |
18 | 18 | |
19 | * IRC: irc.perl.org#dbix-class | |
20 | ||
21 | * Mailing list: <http://lists.scsys.co.uk/mailman/listinfo/dbix-class> | |
22 | ||
23 | 19 | * RT Bug Tracker: |
24 | <https://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class> | |
25 | ||
26 | * Twitter: <https://www.twitter.com/dbix_class> | |
27 | ||
28 | * Web Site: <http://www.dbix-class.org/> | |
20 | <https://rt.cpan.org/Public/Dist/Display.html?Name=DBIx-Class> | |
21 | ||
22 | * Email: <mailto:bug-DBIx-Class@rt.cpan.org> | |
23 | ||
24 | * Twitter: | |
25 | <https://twitter.com/intent/tweet?text=%40ribasushi%20%23DBIC> | |
29 | 26 | |
30 | 27 | SYNOPSIS |
31 | 28 | For the very impatient: DBIx::Class::Manual::QuickStart |
96 | 93 | # Create a result set to search for artists. |
97 | 94 | # This does not query the DB. |
98 | 95 | my $johns_rs = $schema->resultset('Artist')->search( |
99 | # Build your WHERE using an SQL::Abstract structure: | |
96 | # Build your WHERE using an SQL::Abstract::Classic-compatible structure: | |
100 | 97 | { name => { like => 'John%' } } |
101 | 98 | ); |
102 | 99 | |
180 | 177 | |
181 | 178 | * Current git repository: <https://github.com/Perl5/DBIx-Class> |
182 | 179 | |
183 | * Travis-CI log: <https://travis-ci.org/Perl5/DBIx-Class/branches> | |
180 | * Travis-CI log: | |
181 | <https://travis-ci.com/github/Perl5/DBIx-Class/branches> | |
184 | 182 | |
185 | 183 | AUTHORS |
186 | 184 | Even though a large portion of the source *appears* to be written by |
192 | 190 | questions and suggestions have been shown to catalyze monumental |
193 | 191 | improvements in consistency, accuracy and performance. |
194 | 192 | |
195 | List of the awesome contributors who made DBIC v0.082841 possible | |
193 | List of the awesome contributors who made DBIC v0.082842 possible | |
196 | 194 | |
197 | 195 | abraxxa: Alexander Hartmaier <abraxxa@cpan.org> |
198 | 196 | |
199 | 197 | acca: Alexander Kuznetsov <acca@cpan.org> |
200 | 198 | |
199 | acme: Leon Brocard <acme@astray.com> | |
200 | ||
201 | 201 | aherzog: Adam Herzog <adam@herzogdesigns.com> |
202 | 202 | |
203 | 203 | Alexander Keusch <cpan@keusch.at> |
599 | 599 | wintermute: Toby Corkindale <tjc@cpan.org> |
600 | 600 | |
601 | 601 | wreis: Wallace Reis <wreis@cpan.org> |
602 | ||
603 | x86-64 <x86mail@gmail.com> | |
602 | 604 | |
603 | 605 | xenoterracide: Caleb Cushing <xenoterracide@gmail.com> |
604 | 606 |
Binary diff not shown
81 | 81 | |
82 | 82 | =head3 Class::DBI::Sweet |
83 | 83 | |
84 | The features of CDBI::Sweet are better done using DBIC methods which are almost exactly the same. It even uses L<Data::Page>. | |
84 | The features of CDBI::Sweet are better done using DBIC methods which are almost exactly the same. | |
85 | 85 | |
86 | 86 | =head3 Class::DBI::Plugin::DeepAbstractSearch |
87 | 87 |
2 | 2 | use warnings; |
3 | 3 | |
4 | 4 | use base 'DBIx::Class::Row'; |
5 | use SQL::Abstract 'is_literal_value'; | |
5 | use SQL::Abstract::Util 'is_literal_value'; | |
6 | 6 | use namespace::clean; |
7 | 7 | |
8 | 8 | sub filter_column { |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base 'DBIx::Class::Row'; |
6 | use SQL::Abstract 'is_literal_value'; | |
6 | use SQL::Abstract::Util 'is_literal_value'; | |
7 | 7 | use namespace::clean; |
8 | 8 | |
9 | 9 | =head1 NAME |
37 | 37 | |
38 | 38 | It will handle all types of references except scalar references. It |
39 | 39 | will not handle scalar values, these are ignored and thus passed |
40 | through to L<SQL::Abstract>. This is to allow setting raw values to | |
41 | "just work". Scalar references are passed through to the database to | |
42 | deal with, to allow such settings as C< \'year + 1'> and C< \'DEFAULT' > | |
43 | to work. | |
40 | through to L<SQL::Abstract::Classic>. This is to allow setting raw | |
41 | values to "just work". Scalar references are passed through to the | |
42 | database to deal with, to allow such settings as C< \'year + 1'> and | |
43 | C< \'DEFAULT' > to work. | |
44 | 44 | |
45 | 45 | If you want to filter plain scalar values and replace them with |
46 | 46 | something else, see L<DBIx::Class::FilterColumn>. |
38 | 38 | |
39 | 39 | It will handle all types of references except scalar references. It |
40 | 40 | will not handle scalar values, these are ignored and thus passed |
41 | through to L<SQL::Abstract>. This is to allow setting raw values to | |
42 | "just work". Scalar references are passed through to the database to | |
43 | deal with, to allow such settings as C< \'year + 1'> and C< \'DEFAULT' > | |
44 | to work. | |
41 | through to L<SQL::Abstract::Classic>. This is to allow setting raw | |
42 | values to "just work". Scalar references are passed through to the | |
43 | database to deal with, to allow such settings as C< \'year + 1'> and | |
44 | C< \'DEFAULT' > to work. | |
45 | 45 | |
46 | 46 | If you want to filter plain scalar values and replace them with |
47 | 47 | something else, see L<DBIx::Class::FilterColumn>. |
20 | 20 | |
21 | 21 | return $rs->page(2); # records for page 2 |
22 | 22 | |
23 | You can get a L<Data::Page> object for the resultset (suitable for use | |
24 | in e.g. a template) using the C<pager> method: | |
23 | You can get a L<DBIx::Class::ResultSet::Pager> object for the resultset | |
24 | (suitable for use in e.g. a template) using the C<pager> method: | |
25 | 25 | |
26 | 26 | return $rs->pager(); |
27 | 27 | |
59 | 59 | OR artist = 'Starchildren' |
60 | 60 | |
61 | 61 | For more information on generating complex queries, see |
62 | L<SQL::Abstract/WHERE CLAUSES>. | |
62 | L<SQL::Abstract::Classic/WHERE CLAUSES>. | |
63 | 63 | |
64 | 64 | =head2 Retrieve one and only one row from a resultset |
65 | 65 | |
442 | 442 | Note: the syntax for specifying the bind value's datatype and value is |
443 | 443 | explained in L<DBIx::Class::ResultSet/DBIC BIND VALUES>. |
444 | 444 | |
445 | See also L<SQL::Abstract/Literal SQL with placeholders and bind values | |
445 | See also L<SQL::Abstract::Classic/Literal SQL with placeholders and bind values | |
446 | 446 | (subqueries)>. |
447 | 447 | |
448 | 448 | =head2 Software Limits |
1755 | 1755 | |
1756 | 1756 | $resultset->search( |
1757 | 1757 | { |
1758 | numbers => \[ '= ?', [numbers => [1, 2, 3]] ] | |
1759 | } | |
1760 | ); | |
1761 | ||
1762 | See L<SQL::Abstract/array_datatypes> and L<SQL::Abstract/Literal SQL with | |
1763 | placeholders and bind values (subqueries)> for more explanation. Note that | |
1764 | L<DBIx::Class> sets L<SQL::Abstract/bindtype> to C<columns>, so you must pass | |
1765 | the bind values (the C<[1, 2, 3]> arrayref in the above example) wrapped in | |
1766 | arrayrefs together with the column name, like this: | |
1767 | C<< [column_name => value] >>. | |
1758 | numbers => { -value => [1, 2, 3] } | |
1759 | } | |
1760 | ); | |
1768 | 1761 | |
1769 | 1762 | =head2 Formatting DateTime objects in queries |
1770 | 1763 |
36 | 36 | =item L<DBIx::Class::ResultSet/search> - Selecting and manipulating sets. |
37 | 37 | |
38 | 38 | The DSL (mini-language) for query composition is only partially explained there, |
39 | see L<SQL::Abstract/WHERE CLAUSES> for the complete details. | |
39 | see L<SQL::Abstract::Classic/WHERE CLAUSES> for the complete details. | |
40 | 40 | |
41 | 41 | =item L<C<$schema>::Result::C<$resultclass>|DBIx::Class::Manual::ResultClass> |
42 | 42 | - Classes representing a single result (row) from a DB query. |
245 | 245 | |
246 | 246 | =item .. find more help on constructing searches? |
247 | 247 | |
248 | Behind the scenes, DBIx::Class uses L<SQL::Abstract> to help construct | |
248 | Behind the scenes, DBIx::Class uses L<SQL::Abstract::Classic> to help construct | |
249 | 249 | its SQL searches. So if you fail to find help in the |
250 | L<DBIx::Class::Manual::Cookbook>, try looking in the SQL::Abstract | |
250 | L<DBIx::Class::Manual::Cookbook>, try looking in the L<SQL::Abstract::Classic> | |
251 | 251 | documentation. |
252 | 252 | |
253 | 253 | =item .. make searches in Oracle (10gR2 and newer) case-insensitive? |
360 | 360 | |
361 | 361 | =item .. get a count of all rows even when paging? |
362 | 362 | |
363 | Call C<pager> on the paged resultset, it will return a L<Data::Page> | |
364 | object. Calling C<total_entries> on the pager will return the correct | |
365 | total. | |
363 | Call C<pager> on the paged resultset, it will return a L<pager object | |
364 | |DBIx::Class::ResultSet::Pager> with an API/behavior identical to that | |
365 | of L<Data::Page from late 2009 through late 2019 | |
366 | |https://metacpan.org/pod/release/LBROCARD/Data-Page-2.02/lib/Data/Page.pm>. | |
367 | Calling C<total_entries> on the pager will return the correct total. | |
366 | 368 | |
367 | 369 | C<count> on the resultset will only return the total number in the page. |
368 | 370 | |
508 | 510 | __PACKAGE__->table('foo'); #etc |
509 | 511 | __PACKAGE__->mk_group_accessors('simple' => qw/non_column_data/); # must use simple group |
510 | 512 | |
511 | An another method is to use L<Moose> with your L<DBIx::Class> package. | |
513 | And another method is to use L<Moose> with your L<DBIx::Class> package. | |
512 | 514 | |
513 | 515 | package App::Schema::Result::MyTable; |
514 | 516 |
162 | 162 | |
163 | 163 | =over 1 |
164 | 164 | |
165 | =item See L<SQL::Abstract>, L<DBIx::Class::ResultSet/next>, and L<DBIx::Class::ResultSet/search> | |
165 | =item See L<SQL::Abstract::Classic>, L<DBIx::Class::ResultSet/next>, and L<DBIx::Class::ResultSet/search> | |
166 | 166 | |
167 | 167 | =item (kinda) introspectible |
168 | 168 | |
662 | 662 | price => \['price + ?', [inc => $inc]], |
663 | 663 | }); |
664 | 664 | |
665 | See L<SQL::Abstract/Literal SQL with placeholders and bind values (subqueries)> | |
665 | See L<SQL::Abstract::Classic/Literal SQL with placeholders and bind values (subqueries)> | |
666 | 666 | |
667 | 667 | =head1 FURTHER QUESTIONS? |
668 | 668 |
374 | 374 | my @bind = ( 'Peter Frampton', 1986 ); |
375 | 375 | my $rs = $schema->resultset('Album')->search_literal( $where, @bind ); |
376 | 376 | |
377 | The preferred way to generate complex queries is to provide a L<SQL::Abstract> | |
378 | construct to C<search>: | |
377 | The preferred way to generate complex queries is to provide a | |
378 | L<SQL::Abstract::Classic>-compatible construct to C<search>: | |
379 | 379 | |
380 | 380 | my $rs = $schema->resultset('Album')->search({ |
381 | 381 | artist => { '!=', 'Janis Joplin' }, |
105 | 105 | |
106 | 106 | $rs->search( {}, { order_by => [ 'name DESC' ] } ); |
107 | 107 | |
108 | Since L<DBIx::Class> >= 0.08100 and L<SQL::Abstract> >= 1.50 the above | |
109 | should be written as: | |
108 | The above should be written as: | |
110 | 109 | |
111 | 110 | $rs->search( {}, { order_by => { -desc => 'name' } } ); |
112 | 111 | |
113 | 112 | For more ways to express order clauses refer to |
114 | L<SQL::Abstract/ORDER BY CLAUSES> | |
113 | L<SQL::Abstract::Classic/ORDER BY CLAUSES> | |
115 | 114 | |
116 | 115 | =head2 Perl Performance Issues on Red Hat Systems |
117 | 116 |
27 | 27 | }; |
28 | 28 | |
29 | 29 | my $replicated = { |
30 | 'Clone' => 0, | |
30 | 31 | %$moose_basic, |
31 | 32 | }; |
32 | 33 | |
618 | 619 | |
619 | 620 | dist_dir => { |
620 | 621 | req => { |
622 | %$admin_script, | |
621 | 623 | %$test_and_dist_json_any, |
622 | 624 | 'ExtUtils::MakeMaker' => '6.64', |
623 | 625 | 'Pod::Inherit' => '0.91', |
17 | 17 | |
18 | 18 | ... |
19 | 19 | |
20 | configure_requires 'DBIx::Class' => '0.082841'; | |
20 | configure_requires 'DBIx::Class' => '0.082842'; | |
21 | 21 | |
22 | 22 | require DBIx::Class::Optional::Dependencies; |
23 | 23 | |
338 | 338 | |
339 | 339 | =over |
340 | 340 | |
341 | =item * Clone | |
342 | ||
341 | 343 | =item * Moose >= 0.98 |
342 | 344 | |
343 | 345 | =item * MooseX::Types >= 0.21 |
1 | 1 | use strict; |
2 | 2 | use warnings; |
3 | 3 | use base qw( DBIx::Class ); |
4 | ||
5 | use List::Util 'first'; | |
6 | use namespace::clean; | |
7 | 4 | |
8 | 5 | =head1 NAME |
9 | 6 | |
563 | 560 | if (! keys %$changed_ordering_cols) { |
564 | 561 | return $self->next::method( undef, @_ ); |
565 | 562 | } |
566 | elsif (defined first { exists $changed_ordering_cols->{$_} } @group_columns ) { | |
563 | elsif (grep { exists $changed_ordering_cols->{$_} } @group_columns ) { | |
567 | 564 | $self->move_to_group( |
568 | 565 | # since the columns are already re-set the _grouping_clause is correct |
569 | 566 | # move_to_group() knows how to get the original storage values |
613 | 610 | # add the current position/group to the things we track old values for |
614 | 611 | sub _track_storage_value { |
615 | 612 | my ($self, $col) = @_; |
616 | return $self->next::method($col) || defined first { $_ eq $col } ($self->position_column, $self->_grouping_columns); | |
613 | return ( | |
614 | $self->next::method($col) | |
615 | || | |
616 | grep { $_ eq $col } ($self->position_column, $self->_grouping_columns) | |
617 | ); | |
617 | 618 | } |
618 | 619 | |
619 | 620 | =head1 METHODS FOR EXTENDING ORDERED |
739 | 740 | local $rsrc->schema->{_ORDERED_INTERNAL_UPDATE} = 1; |
740 | 741 | my @pcols = $rsrc->primary_columns; |
741 | 742 | if ( |
742 | first { $_ eq $position_column } ( map { @$_ } (values %{{ $rsrc->unique_constraints }} ) ) | |
743 | grep { $_ eq $position_column } ( map { @$_ } (values %{{ $rsrc->unique_constraints }} ) ) | |
743 | 744 | ) { |
744 | 745 | my $clean_rs = $rsrc->resultset; |
745 | 746 |
108 | 108 | |
109 | 109 | =head4 Multiple groups of simple equality conditions |
110 | 110 | |
111 | As is the default in L<SQL::Abstract>, the key-value pairs will be | |
111 | As is the default in L<SQL::Abstract::Classic>, the key-value pairs will be | |
112 | 112 | C<AND>ed in the resulting C<JOIN> clause. An C<OR> can be achieved with |
113 | 113 | an arrayref. For example a condition like: |
114 | 114 | |
173 | 173 | same values that would be otherwise substituted for C<foreign> and C<self> |
174 | 174 | in the simple hashref syntax case. |
175 | 175 | |
176 | The coderef is expected to return a valid L<SQL::Abstract> query-structure, just | |
177 | like what one would supply as the first argument to | |
176 | The coderef is expected to return a valid L<SQL::Abstract::Classic> | |
177 | query-structure, just like what one would supply as the first argument to | |
178 | 178 | L<DBIx::Class::ResultSet/search>. The return value will be passed directly to |
179 | L<SQL::Abstract> and the resulting SQL will be used verbatim as the C<ON> | |
180 | clause of the C<JOIN> statement associated with this relationship. | |
179 | L<DBIx::Class::SQLMaker> and the resulting SQL will be used verbatim as the | |
180 | C<ON> clause of the C<JOIN> statement associated with this relationship. | |
181 | 181 | |
182 | 182 | While every coderef-based condition must return a valid C<ON> clause, it may |
183 | 183 | elect to additionally return a simplified B<optional> join-free condition |
0 | package # hide from pause | |
1 | DBIx::Class::ResultSet::Pager; | |
0 | package DBIx::Class::ResultSet::Pager; | |
2 | 1 | |
3 | 2 | use warnings; |
4 | 3 | use strict; |
5 | 4 | |
6 | # temporary, to load MRO::Compat, will be soon entirely rewritten anyway | |
7 | use DBIx::Class::_Util; | |
8 | ||
9 | use base 'Data::Page'; | |
10 | use mro 'c3'; | |
11 | ||
12 | # simple support for lazy totals | |
13 | sub _total_entries_accessor { | |
14 | if (@_ == 1 and ref $_[0]->{total_entries} eq 'CODE') { | |
15 | return $_[0]->{total_entries} = $_[0]->{total_entries}->(); | |
16 | } | |
17 | ||
18 | return shift->next::method(@_); | |
19 | } | |
20 | ||
21 | sub _skip_namespace_frames { qr/^Data::Page/ } | |
5 | use DBIx::Class::Exception; | |
6 | ||
7 | sub new { | |
8 | my( $proto, $total_entries, $entries_per_page, $current_page ) = @_; | |
9 | ||
10 | my $self = {}; | |
11 | bless( $self, ( ref $proto || $proto ) ); | |
12 | ||
13 | $self->total_entries( $total_entries || 0 ); | |
14 | $self->entries_per_page( $entries_per_page || 10 ); | |
15 | $self->current_page( $current_page || 1 ); | |
16 | ||
17 | return $self; | |
18 | } | |
19 | ||
20 | ||
21 | sub entries_per_page { | |
22 | my $self = shift; | |
23 | ||
24 | return $self->{entries_per_page} | |
25 | unless @_; | |
26 | ||
27 | DBIx::Class::Exception->throw( "Fewer than one entry per page!" ) | |
28 | if $_[0] < 1; | |
29 | ||
30 | $self->{entries_per_page} = $_[0]; | |
31 | ||
32 | $self; | |
33 | } | |
34 | ||
35 | sub current_page { | |
36 | my $self = shift; | |
37 | ||
38 | if( @_ ) { | |
39 | $self->{current_page} = $_[0]; | |
40 | return $self; | |
41 | } | |
42 | ||
43 | return $self->first_page | |
44 | unless defined $self->{current_page}; | |
45 | ||
46 | return $self->first_page | |
47 | if $self->{current_page} < $self->first_page; | |
48 | ||
49 | return $self->last_page | |
50 | if $self->{current_page} > $self->last_page; | |
51 | ||
52 | $self->{current_page}; | |
53 | } | |
54 | ||
55 | sub total_entries { | |
56 | my $self = shift; | |
57 | ||
58 | if( @_ ) { | |
59 | $self->{total_entries} = $_[0]; | |
60 | return $self; | |
61 | } | |
62 | ||
63 | # lazification for DBIC's benefit | |
64 | if( ref $self->{total_entries} eq 'CODE' ) { | |
65 | $self->{total_entries} = $self->{total_entries}->(); | |
66 | } | |
67 | ||
68 | $self->{total_entries}; | |
69 | } | |
70 | ||
71 | ||
72 | sub entries_on_this_page { | |
73 | my $self = shift; | |
74 | ||
75 | if ( $self->total_entries == 0 ) { | |
76 | return 0; | |
77 | } else { | |
78 | return $self->last - $self->first + 1; | |
79 | } | |
80 | } | |
81 | ||
82 | sub first_page { | |
83 | return 1; | |
84 | } | |
85 | ||
86 | sub last_page { | |
87 | my $self = shift; | |
88 | ||
89 | my $pages = $self->total_entries / $self->entries_per_page; | |
90 | my $last_page; | |
91 | ||
92 | if ( $pages == int $pages ) { | |
93 | $last_page = $pages; | |
94 | } else { | |
95 | $last_page = 1 + int($pages); | |
96 | } | |
97 | ||
98 | $last_page = 1 if $last_page < 1; | |
99 | return $last_page; | |
100 | } | |
101 | ||
102 | sub first { | |
103 | my $self = shift; | |
104 | ||
105 | if ( $self->total_entries == 0 ) { | |
106 | return 0; | |
107 | } else { | |
108 | return ( ( $self->current_page - 1 ) * $self->entries_per_page ) + 1; | |
109 | } | |
110 | } | |
111 | ||
112 | sub last { | |
113 | my $self = shift; | |
114 | ||
115 | if ( $self->current_page == $self->last_page ) { | |
116 | return $self->total_entries; | |
117 | } else { | |
118 | return ( $self->current_page * $self->entries_per_page ); | |
119 | } | |
120 | } | |
121 | ||
122 | sub previous_page { | |
123 | my $self = shift; | |
124 | ||
125 | if ( $self->current_page > 1 ) { | |
126 | return $self->current_page - 1; | |
127 | } else { | |
128 | return undef; | |
129 | } | |
130 | } | |
131 | ||
132 | sub next_page { | |
133 | my $self = shift; | |
134 | ||
135 | $self->current_page < $self->last_page ? $self->current_page + 1 : undef; | |
136 | } | |
137 | ||
138 | # This method would probably be better named 'select' or 'slice' or | |
139 | # something, because it doesn't modify the array the way | |
140 | # CORE::splice() does. | |
141 | sub splice { | |
142 | my ( $self, $array ) = @_; | |
143 | my $top = @$array > $self->last ? $self->last : @$array; | |
144 | return () if $top == 0; # empty | |
145 | return @{$array}[ $self->first - 1 .. $top - 1 ]; | |
146 | } | |
147 | ||
148 | sub skipped { | |
149 | my $self = shift; | |
150 | ||
151 | my $skipped = $self->first - 1; | |
152 | return 0 if $skipped < 0; | |
153 | return $skipped; | |
154 | } | |
155 | ||
156 | sub change_entries_per_page { | |
157 | my ( $self, $new_epp ) = @_; | |
158 | ||
159 | use integer; | |
160 | croak("Fewer than one entry per page!") if $new_epp < 1; | |
161 | my $new_page = 1 + ( $self->first / $new_epp ); | |
162 | $self->entries_per_page($new_epp); | |
163 | $self->current_page($new_page); | |
164 | } | |
22 | 165 | |
23 | 166 | 1; |
167 | ||
168 | __END__ | |
169 | ||
170 | =head1 NAME | |
171 | ||
172 | DBIx::Class::ResultSet::Pager - help when paging through sets of results | |
173 | ||
174 | =head1 SYNOPSIS | |
175 | ||
176 | use DBIx::Class::ResultSet::Pager; | |
177 | ||
178 | my $page = DBIx::Class::ResultSet::Pager->new(); | |
179 | $page->total_entries($total_entries); | |
180 | $page->entries_per_page($entries_per_page); | |
181 | $page->current_page($current_page); | |
182 | ||
183 | print " First page: ", $page->first_page, "\n"; | |
184 | print " Last page: ", $page->last_page, "\n"; | |
185 | print "First entry on page: ", $page->first, "\n"; | |
186 | print " Last entry on page: ", $page->last, "\n"; | |
187 | ||
188 | =head1 DESCRIPTION | |
189 | ||
190 | This module is a near-verbatim copy of L<Data::Page 2.02 | |
191 | |https://metacpan.org/pod/release/LBROCARD/Data-Page-2.02/lib/Data/Page.pm>, | |
192 | which remained unchanged on CPAN from late 2009 through late 2019. The only | |
193 | differences are dropping a number of accessor generators in lieu of direct | |
194 | method implementations, and the incorporation of the lazily evaluated | |
195 | L</total_entries> which was the only part originally provided by | |
196 | L<DBIx::Class::ResultSet::Pager>. This module passes the entire contemporary | |
197 | test suite of L<Data::Page> unmodified. | |
198 | ||
199 | ||
200 | ||
201 | ||
202 | B<WHAT FOLLOWS IS A VERBATIM COPY OF Data::Page's 2.02 DOCUMENTATION> | |
203 | ||
204 | ||
205 | ||
206 | ||
207 | When searching through large amounts of data, it is often the case | |
208 | that a result set is returned that is larger than we want to display | |
209 | on one page. This results in wanting to page through various pages of | |
210 | data. The maths behind this is unfortunately fiddly, hence this | |
211 | module. | |
212 | ||
213 | The main concept is that you pass in the number of total entries, the | |
214 | number of entries per page, and the current page number. You can then | |
215 | call methods to find out how many pages of information there are, and | |
216 | what number the first and last entries on the current page really are. | |
217 | ||
218 | For example, say we wished to page through the integers from 1 to 100 | |
219 | with 20 entries per page. The first page would consist of 1-20, the | |
220 | second page from 21-40, the third page from 41-60, the fourth page | |
221 | from 61-80 and the fifth page from 81-100. This module would help you | |
222 | work this out. | |
223 | ||
224 | =head1 METHODS | |
225 | ||
226 | =head2 new | |
227 | ||
228 | This is the constructor, which takes no arguments. | |
229 | ||
230 | my $page = DBIx::Class::ResultSet::Pager->new(); | |
231 | ||
232 | There is also an old, deprecated constructor, which currently takes | |
233 | two mandatory arguments, the total number of entries and the number of | |
234 | entries per page. It also optionally takes the current page number: | |
235 | ||
236 | my $page = DBIx::Class::ResultSet::Pager->new($total_entries, $entries_per_page, $current_page); | |
237 | ||
238 | =head2 total_entries | |
239 | ||
240 | This method get or sets the total number of entries: | |
241 | ||
242 | print "Entries:", $page->total_entries, "\n"; | |
243 | ||
244 | =head2 entries_per_page | |
245 | ||
246 | This method gets or sets the total number of entries per page (which | |
247 | defaults to 10): | |
248 | ||
249 | print "Per page:", $page->entries_per_page, "\n"; | |
250 | ||
251 | =head2 current_page | |
252 | ||
253 | This method gets or sets the current page number (which defaults to 1): | |
254 | ||
255 | print "Page: ", $page->current_page, "\n"; | |
256 | ||
257 | =head2 entries_on_this_page | |
258 | ||
259 | This methods returns the number of entries on the current page: | |
260 | ||
261 | print "There are ", $page->entries_on_this_page, " entries displayed\n"; | |
262 | ||
263 | =head2 first_page | |
264 | ||
265 | This method returns the first page. This is put in for reasons of | |
266 | symmetry with last_page, as it always returns 1: | |
267 | ||
268 | print "Pages range from: ", $page->first_page, "\n"; | |
269 | ||
270 | =head2 last_page | |
271 | ||
272 | This method returns the total number of pages of information: | |
273 | ||
274 | print "Pages range to: ", $page->last_page, "\n"; | |
275 | ||
276 | =head2 first | |
277 | ||
278 | This method returns the number of the first entry on the current page: | |
279 | ||
280 | print "Showing entries from: ", $page->first, "\n"; | |
281 | ||
282 | =head2 last | |
283 | ||
284 | This method returns the number of the last entry on the current page: | |
285 | ||
286 | print "Showing entries to: ", $page->last, "\n"; | |
287 | ||
288 | =head2 previous_page | |
289 | ||
290 | This method returns the previous page number, if one exists. Otherwise | |
291 | it returns undefined: | |
292 | ||
293 | if ($page->previous_page) { | |
294 | print "Previous page number: ", $page->previous_page, "\n"; | |
295 | } | |
296 | ||
297 | =head2 next_page | |
298 | ||
299 | This method returns the next page number, if one exists. Otherwise | |
300 | it returns undefined: | |
301 | ||
302 | if ($page->next_page) { | |
303 | print "Next page number: ", $page->next_page, "\n"; | |
304 | } | |
305 | ||
306 | =head2 splice | |
307 | ||
308 | This method takes in a listref, and returns only the values which are | |
309 | on the current page: | |
310 | ||
311 | @visible_holidays = $page->splice(\@holidays); | |
312 | ||
313 | =head2 skipped | |
314 | ||
315 | This method is useful paging through data in a database using SQL | |
316 | LIMIT clauses. It is simply $page->first - 1: | |
317 | ||
318 | $sth = $dbh->prepare( | |
319 | q{SELECT * FROM table ORDER BY rec_date LIMIT ?, ?} | |
320 | ); | |
321 | $sth->execute($page->skipped, $page->entries_per_page); | |
322 | ||
323 | =head2 change_entries_per_page | |
324 | ||
325 | This method changes the number of entries per page and the current page number | |
326 | such that the L<first> item on the current page will be present on the new page. | |
327 | ||
328 | $page->total_entries(50); | |
329 | $page->entries_per_page(20); | |
330 | $page->current_page(3); | |
331 | print $page->first; # 41 | |
332 | $page->change_entries_per_page(30); | |
333 | print $page->current_page; # 2 - the page that item 41 will show in | |
334 | ||
335 | =head1 NOTES | |
336 | ||
337 | It has been said before that this code is "too simple" for CPAN, but I | |
338 | must disagree. I have seen people write this kind of code over and | |
339 | over again and they always get it wrong. Perhaps now they will spend | |
340 | more time getting the rest of their code right... | |
341 | ||
342 | Based on code originally by Leo Lapworth, with many changes added by by | |
343 | Leon Brocard <acme@astray.com>, and few enhancements by James Laver (ELPENGUIN) | |
344 | ||
345 | =head1 FURTHER QUESTIONS? | |
346 | ||
347 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. | |
348 | ||
349 | =head1 COPYRIGHT AND LICENSE | |
350 | ||
351 | This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> | |
352 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can | |
353 | redistribute it and/or modify it under the same terms as the | |
354 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. |
10 | 10 | ); |
11 | 11 | use Try::Tiny; |
12 | 12 | |
13 | # not importing first() as it will clash with our own method | |
14 | use List::Util (); | |
15 | ||
16 | 13 | BEGIN { |
17 | 14 | # De-duplication in _merge_attr() is disabled, but left in for reference |
18 | 15 | # (the merger is used for other things that ought not to be de-duped) |
222 | 219 | |
223 | 220 | use Moo; |
224 | 221 | extends 'DBIx::Class::ResultSet'; |
225 | sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_ | |
222 | sub BUILDARGS { $_[2] || {} } # ::RS::new() expects my ($class, $rsrc, $args) = @_ | |
226 | 223 | |
227 | 224 | ...your code... |
228 | 225 | |
240 | 237 | use MooseX::NonMoose; |
241 | 238 | extends 'DBIx::Class::ResultSet'; |
242 | 239 | |
243 | sub BUILDARGS { $_[2] } # ::RS::new() expects my ($class, $rsrc, $args) = @_ | |
240 | sub BUILDARGS { $_[2] || {} } # ::RS::new() expects my ($class, $rsrc, $args) = @_ | |
244 | 241 | |
245 | 242 | ...your code... |
246 | 243 | |
372 | 369 | For a list of attributes that can be passed to C<search>, see |
373 | 370 | L</ATTRIBUTES>. For more examples of using this function, see |
374 | 371 | L<Searching|DBIx::Class::Manual::Cookbook/SEARCHING>. For a complete |
375 | documentation for the first argument, see L<SQL::Abstract/"WHERE CLAUSES"> | |
376 | and its extension L<DBIx::Class::SQLMaker>. | |
372 | documentation for the first argument, see | |
373 | L<SQL::Abstract::Classic/"WHERE CLAUSES"> and its extension | |
374 | L<DBIx::Class::SQLMaker>. | |
377 | 375 | |
378 | 376 | For more help on using joins with search, see L<DBIx::Class::Manual::Joining>. |
379 | 377 | |
380 | 378 | =head3 CAVEAT |
381 | 379 | |
382 | 380 | Note that L</search> does not process/deflate any of the values passed in the |
383 | L<SQL::Abstract>-compatible search condition structure. This is unlike other | |
384 | condition-bound methods L</new_result>, L</create> and L</find>. The user must ensure | |
385 | manually that any value passed to this method will stringify to something the | |
386 | RDBMS knows how to deal with. A notable example is the handling of L<DateTime> | |
387 | objects, for more info see: | |
381 | L<SQL::Abstract::Classic>-compatible search condition structure. This is unlike | |
382 | other condition-bound methods L</new_result>, L</create> and L</find>. The user | |
383 | must ensure manually that any value passed to this method will stringify to | |
384 | something the RDBMS knows how to deal with. A notable example is the handling | |
385 | of L<DateTime> objects, for more info see: | |
388 | 386 | L<DBIx::Class::Manual::Cookbook/Formatting DateTime objects in queries>. |
389 | 387 | |
390 | 388 | =cut |
466 | 464 | # see if we can keep the cache (no $rs changes) |
467 | 465 | my $cache; |
468 | 466 | my %safe = (alias => 1, cache => 1); |
469 | if ( ! List::Util::first { !$safe{$_} } keys %$call_attrs and ( | |
467 | if ( ! grep { !$safe{$_} } keys %$call_attrs and ( | |
470 | 468 | ! defined $call_cond |
471 | 469 | or |
472 | 470 | ref $call_cond eq 'HASH' && ! keys %$call_cond |
490 | 488 | my @selector_attrs = qw/select as columns cols +select +as +columns include_columns/; |
491 | 489 | |
492 | 490 | # reset the current selector list if new selectors are supplied |
493 | if (List::Util::first { exists $call_attrs->{$_} } qw/columns cols select as/) { | |
494 | delete @{$old_attrs}{(@selector_attrs, '_dark_selector')}; | |
495 | } | |
491 | delete @{$old_attrs}{(@selector_attrs, '_dark_selector')} | |
492 | if grep { exists $call_attrs->{$_} } qw(columns cols select as); | |
496 | 493 | |
497 | 494 | # Normalize the new selector list (operates on the passed-in attr structure) |
498 | 495 | # Need to do it on every chain instead of only once on _resolved_attrs, in |
1751 | 1748 | |
1752 | 1749 | # unqualify join-based group_by's. Arcane but possible query |
1753 | 1750 | # also horrible horrible hack to alias a column (not a func.) |
1754 | # (probably need to introduce SQLA syntax) | |
1755 | 1751 | if ($colpiece =~ /\./ && $colpiece !~ /^$attrs->{alias}\./) { |
1756 | 1752 | my $as = $colpiece; |
1757 | 1753 | $as =~ s/\./__/; |
1908 | 1904 | $storage->_prune_unused_joins ($attrs); |
1909 | 1905 | |
1910 | 1906 | # any non-pruneable non-local restricting joins imply subq |
1911 | $needs_subq = defined List::Util::first { $_ ne $attrs->{alias} } keys %{ $join_classifications->{restricting} || {} }; | |
1907 | $needs_subq = grep { $_ ne $attrs->{alias} } keys %{ $join_classifications->{restricting} || {} }; | |
1912 | 1908 | } |
1913 | 1909 | |
1914 | 1910 | # check if the head is composite (by now all joins are thrown out unless $needs_subq) |
2500 | 2496 | |
2501 | 2497 | =item Arguments: none |
2502 | 2498 | |
2503 | =item Return Value: L<$pager|Data::Page> | |
2504 | ||
2505 | =back | |
2506 | ||
2507 | Returns a L<Data::Page> object for the current resultset. Only makes | |
2508 | sense for queries with a C<page> attribute. | |
2499 | =item Return Value: L<$pager|DBIx::Class::ResultSet::Pager> | |
2500 | ||
2501 | =back | |
2502 | ||
2503 | Returns a L<DBIx::Class::ResultSet::Pager> object tied to the current | |
2504 | resultset. Requires the C<page> attribute to have been previously set on | |
2505 | the resultset object, usually via a call to L</page>. | |
2509 | 2506 | |
2510 | 2507 | To get the full count of entries for a paged resultset, call |
2511 | C<total_entries> on the L<Data::Page> object. | |
2508 | L<total_entries|DBIx::Class::ResultSet::Pager/total_entries> on the pager | |
2509 | object. | |
2512 | 2510 | |
2513 | 2511 | =cut |
2514 | 2512 | |
2519 | 2517 | |
2520 | 2518 | my $attrs = $self->{attrs}; |
2521 | 2519 | if (!defined $attrs->{page}) { |
2522 | $self->throw_exception("Can't create pager for non-paged rs"); | |
2520 | $self->throw_exception("Can't create pager for non-paged rs, you need to call page(\$num) first"); | |
2523 | 2521 | } |
2524 | 2522 | elsif ($attrs->{page} <= 0) { |
2525 | 2523 | $self->throw_exception('Invalid page number (page-numbers are 1-based)'); |
3389 | 3387 | my $attrs = $self->_resolved_attrs; |
3390 | 3388 | |
3391 | 3389 | my $fresh_rs = (ref $self)->new ( |
3392 | $self->result_source | |
3390 | $self->result_source, | |
3391 | {}, | |
3393 | 3392 | ); |
3394 | 3393 | |
3395 | 3394 | # these pieces will be locked in the subquery |
3526 | 3525 | |
3527 | 3526 | # default selection list |
3528 | 3527 | $attrs->{columns} = [ $source->columns ] |
3529 | unless List::Util::first { exists $attrs->{$_} } qw/columns cols select as/; | |
3528 | unless grep { exists $attrs->{$_} } qw/columns cols select as/; | |
3530 | 3529 | |
3531 | 3530 | # merge selectors together |
3532 | 3531 | for (qw/columns select as/) { |
3714 | 3713 | if ( |
3715 | 3714 | ! $attrs->{_main_source_premultiplied} |
3716 | 3715 | and |
3717 | ! List::Util::first { ! $_->[0]{-is_single} } @fromlist | |
3716 | ! grep { ! $_->[0]{-is_single} } @fromlist | |
3718 | 3717 | ) { |
3719 | 3718 | $attrs->{collapse} = 0; |
3720 | 3719 | } |
3912 | 3911 | }, |
3913 | 3912 | ARRAY => sub { |
3914 | 3913 | return $_[1] if !defined $_[0]; |
3915 | return $_[1] if __HM_DEDUP and List::Util::first { $_ eq $_[0] } @{$_[1]}; | |
3914 | return $_[1] if __HM_DEDUP and grep { $_ eq $_[0] } @{$_[1]}; | |
3916 | 3915 | return [$_[0], @{$_[1]}] |
3917 | 3916 | }, |
3918 | 3917 | HASH => sub { |
3925 | 3924 | ARRAY => { |
3926 | 3925 | SCALAR => sub { |
3927 | 3926 | return $_[0] if !defined $_[1]; |
3928 | return $_[0] if __HM_DEDUP and List::Util::first { $_ eq $_[1] } @{$_[0]}; | |
3927 | return $_[0] if __HM_DEDUP and grep { $_ eq $_[1] } @{$_[0]}; | |
3929 | 3928 | return [@{$_[0]}, $_[1]] |
3930 | 3929 | }, |
3931 | 3930 | ARRAY => sub { |
3938 | 3937 | HASH => sub { |
3939 | 3938 | return [ $_[1] ] if ! @{$_[0]}; |
3940 | 3939 | return $_[0] if !keys %{$_[1]}; |
3941 | return $_[0] if __HM_DEDUP and List::Util::first { $_ eq $_[1] } @{$_[0]}; | |
3940 | return $_[0] if __HM_DEDUP and grep { $_ eq $_[1] } @{$_[0]}; | |
3942 | 3941 | return [ @{$_[0]}, $_[1] ]; |
3943 | 3942 | }, |
3944 | 3943 | }, |
3953 | 3952 | return [] if !keys %{$_[0]} and !@{$_[1]}; |
3954 | 3953 | return [ $_[0] ] if !@{$_[1]}; |
3955 | 3954 | return $_[1] if !keys %{$_[0]}; |
3956 | return $_[1] if __HM_DEDUP and List::Util::first { $_ eq $_[0] } @{$_[1]}; | |
3955 | return $_[1] if __HM_DEDUP and grep { $_ eq $_[0] } @{$_[1]}; | |
3957 | 3956 | return [ $_[0], @{$_[1]} ]; |
3958 | 3957 | }, |
3959 | 3958 | HASH => sub { |
4045 | 4044 | Which column(s) to order the results by. |
4046 | 4045 | |
4047 | 4046 | [The full list of suitable values is documented in |
4048 | L<SQL::Abstract/"ORDER BY CLAUSES">; the following is a summary of | |
4047 | L<SQL::Abstract::Classic/"ORDER BY CLAUSES">; the following is a summary of | |
4049 | 4048 | common options.] |
4050 | 4049 | |
4051 | 4050 | If a single column name, or an arrayref of names is supplied, the |
4106 | 4105 | Like elsewhere, literal SQL or literal values can be included by using a |
4107 | 4106 | scalar reference or a literal bind value, and these values will be available |
4108 | 4107 | in the result with C<get_column> (see also |
4109 | L<SQL::Abstract/Literal SQL and value type operators>): | |
4108 | L<SQL::Abstract::Classic>/Literal SQL and value type operators>): | |
4110 | 4109 | |
4111 | 4110 | # equivalent SQL: SELECT 1, 'a string', IF(my_column,?,?) ... |
4112 | 4111 | # bind values: $true_value, $false_value |
4566 | 4565 | The HAVING operator specifies a B<secondary> condition applied to the set |
4567 | 4566 | after the grouping calculations have been done. In other words it is a |
4568 | 4567 | constraint just like L</where> (and accepting the same |
4569 | L<SQL::Abstract syntax|SQL::Abstract/WHERE CLAUSES>) applied to the data | |
4570 | as it exists after GROUP BY has taken place. Specifying L</having> without | |
4571 | L</group_by> is a logical mistake, and a fatal error on most RDBMS engines. | |
4568 | L<SQL::Abstract::Classic syntax|SQL::Abstract::Classic/WHERE CLAUSES>) applied | |
4569 | to the data as it exists after GROUP BY has taken place. Specifying L</having> | |
4570 | without L</group_by> is a logical mistake, and a fatal error on most RDBMS | |
4571 | engines. | |
4572 | 4572 | |
4573 | 4573 | E.g. |
4574 | 4574 | |
4610 | 4610 | Can be overridden by passing C<< { where => undef } >> as an attribute |
4611 | 4611 | to a resultset. |
4612 | 4612 | |
4613 | For more complicated where clauses see L<SQL::Abstract/WHERE CLAUSES>. | |
4613 | For more complicated where clauses see L<SQL::Abstract::Classic/WHERE CLAUSES>. | |
4614 | 4614 | |
4615 | 4615 | =back |
4616 | 4616 |
6 | 6 | use DBIx::Class::Carp; |
7 | 7 | use DBIx::Class::_Util 'fail_on_internal_wantarray'; |
8 | 8 | use namespace::clean; |
9 | ||
10 | # not importing first() as it will clash with our own method | |
11 | use List::Util (); | |
12 | 9 | |
13 | 10 | =head1 NAME |
14 | 11 | |
55 | 52 | # (to create a new column definition on-the-fly). |
56 | 53 | my $as_list = $orig_attrs->{as} || []; |
57 | 54 | my $select_list = $orig_attrs->{select} || []; |
58 | my $as_index = List::Util::first { ($as_list->[$_] || "") eq $column } 0..$#$as_list; | |
55 | my ($as_index) = grep { ($as_list->[$_] || "") eq $column } 0..$#$as_list; | |
59 | 56 | my $select = defined $as_index ? $select_list->[$as_index] : $column; |
60 | 57 | |
61 | 58 | my $colmap; |
3 | 3 | use strict; |
4 | 4 | use warnings; |
5 | 5 | |
6 | use List::Util 'first'; | |
7 | 6 | use DBIx::Class::_Util 'perlstring'; |
8 | 7 | |
9 | 8 | use constant HAS_DOR => ( $] < 5.010 ? 0 : 1 ); |
315 | 314 | if ( |
316 | 315 | $relinfo->{-is_optional} |
317 | 316 | and |
318 | defined ( my $first_distinct_child_idcol = first | |
317 | scalar( my ($first_distinct_child_idcol) = grep | |
319 | 318 | { ! $known_present_ids->{$_} } |
320 | 319 | @{$relinfo->{-identifying_columns}} |
321 | 320 | ) |
6 | 6 | use base 'DBIx::Class'; |
7 | 7 | |
8 | 8 | use Try::Tiny; |
9 | use List::Util qw(first max); | |
10 | 9 | |
11 | 10 | use DBIx::Class::ResultSource::RowParser::Util qw( |
12 | 11 | assemble_simple_parser |
192 | 191 | and |
193 | 192 | keys %$cond |
194 | 193 | and |
195 | ! defined first { $_ !~ /^foreign\./ } (keys %$cond) | |
194 | ! grep { $_ !~ /^foreign\./ } (keys %$cond) | |
196 | 195 | and |
197 | ! defined first { $_ !~ /^self\./ } (values %$cond) | |
196 | ! grep { $_ !~ /^self\./ } (values %$cond) | |
198 | 197 | ) { |
199 | 198 | for my $f (keys %$cond) { |
200 | 199 | my $s = $cond->{$f}; |
370 | 369 | # coderef later |
371 | 370 | $collapse_map->{-identifying_columns} = []; |
372 | 371 | $collapse_map->{-identifying_columns_variants} = [ sort { |
373 | (scalar @$a) <=> (scalar @$b) or max(@$a) <=> max(@$b) | |
372 | (scalar @$a) <=> (scalar @$b) | |
373 | or | |
374 | ( | |
375 | # Poor man's max() | |
376 | ( sort { $b <=> $a } @$a )[0] | |
377 | <=> | |
378 | ( sort { $b <=> $a } @$b )[0] | |
379 | ) | |
374 | 380 | } @collapse_sets ]; |
375 | 381 | } |
376 | 382 | } |
417 | 423 | |
418 | 424 | # if there is at least one *inner* reverse relationship which is HASH-based (equality only) |
419 | 425 | # we can safely assume that the child can not exist without us |
420 | rev_rel_is_optional => ( first | |
426 | rev_rel_is_optional => ( grep | |
421 | 427 | { ref $_->{cond} eq 'HASH' and ($_->{attrs}{join_type}||'') !~ /^left/i } |
422 | 428 | values %{ $self->reverse_relationship_info($rel) }, |
423 | 429 | ) ? 0 : 1, |
9 | 9 | |
10 | 10 | use DBIx::Class::Carp; |
11 | 11 | use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION'; |
12 | use SQL::Abstract 'is_literal_value'; | |
12 | use SQL::Abstract::Util 'is_literal_value'; | |
13 | 13 | use Devel::GlobalDestruction; |
14 | 14 | use Try::Tiny; |
15 | use List::Util 'first'; | |
16 | 15 | use Scalar::Util qw/blessed weaken isweak/; |
17 | 16 | |
18 | 17 | use namespace::clean; |
475 | 474 | my $colinfo = $self->_columns; |
476 | 475 | |
477 | 476 | if ( |
478 | first { ! $_->{data_type} } values %$colinfo | |
477 | grep { ! $_->{data_type} } values %$colinfo | |
479 | 478 | and |
480 | 479 | ! $self->{_columns_info_loaded} |
481 | 480 | and |
802 | 801 | my $self = shift; |
803 | 802 | my @constraints = @_; |
804 | 803 | |
805 | if ( !(@constraints % 2) && first { ref $_ ne 'ARRAY' } @constraints ) { | |
804 | if ( !(@constraints % 2) && grep { ref $_ ne 'ARRAY' } @constraints ) { | |
806 | 805 | # with constraint name |
807 | 806 | while (my ($name, $constraint) = splice @constraints, 0, 2) { |
808 | 807 | $self->add_unique_constraint($name => $constraint); |
1282 | 1281 | 'foreign.book_id' => 'self.id', |
1283 | 1282 | }); |
1284 | 1283 | |
1285 | The condition C<$cond> needs to be an L<SQL::Abstract>-style | |
1284 | The condition C<$cond> needs to be an L<SQL::Abstract::Classic>-style | |
1286 | 1285 | representation of the join between the tables. For example, if you're |
1287 | 1286 | creating a relation from Author to Book, |
1288 | 1287 | |
1706 | 1705 | : $rel_info->{attrs}{join_type} |
1707 | 1706 | , |
1708 | 1707 | -join_path => [@$jpath, { $join => $as } ], |
1709 | -is_single => ( | |
1708 | -is_single => !!( | |
1710 | 1709 | (! $rel_info->{attrs}{accessor}) |
1711 | 1710 | or |
1712 | first { $rel_info->{attrs}{accessor} eq $_ } (qw/single filter/) | |
1711 | grep { $rel_info->{attrs}{accessor} eq $_ } (qw/single filter/) | |
1713 | 1712 | ), |
1714 | 1713 | -alias => $as, |
1715 | 1714 | -relation_chain_depth => ( $seen->{-relation_chain_depth} || 0 ) + 1, |
5 | 5 | use base qw/DBIx::Class/; |
6 | 6 | |
7 | 7 | use Scalar::Util 'blessed'; |
8 | use List::Util 'first'; | |
9 | 8 | use Try::Tiny; |
10 | 9 | use DBIx::Class::Carp; |
11 | use SQL::Abstract 'is_literal_value'; | |
10 | use SQL::Abstract::Util 'is_literal_value'; | |
12 | 11 | |
13 | 12 | ### |
14 | 13 | ### Internal method |
523 | 522 | $result->update({ last_modified => \'NOW()' }); |
524 | 523 | |
525 | 524 | The update will pass the values verbatim into SQL. (See |
526 | L<SQL::Abstract> docs). The values in your Result object will NOT change | |
527 | as a result of the update call, if you want the object to be updated | |
528 | with the actual values from the database, call L</discard_changes> | |
529 | after the update. | |
525 | L<SQL::Abstract::Classic> docs). The values in your Result object will NOT | |
526 | change as a result of the update call, if you want the object to be updated | |
527 | with the actual values from the database, call L</discard_changes> after the | |
528 | update. | |
530 | 529 | |
531 | 530 | $result->update()->discard_changes(); |
532 | 531 | |
1025 | 1024 | # value tracked between column changes and commitment to storage |
1026 | 1025 | sub _track_storage_value { |
1027 | 1026 | my ($self, $col) = @_; |
1028 | return defined first { $col eq $_ } ($self->result_source->primary_columns); | |
1027 | return scalar grep | |
1028 | { $col eq $_ } | |
1029 | $self->result_source->primary_columns | |
1030 | ; | |
1029 | 1031 | } |
1030 | 1032 | |
1031 | 1033 | =head2 set_columns |
0 | package DBIx::Class::SQLMaker::ClassicExtensions; | |
1 | ||
2 | use strict; | |
3 | use warnings; | |
4 | ||
5 | =head1 NAME | |
6 | ||
7 | DBIx::Class::SQLMaker::ClassicExtensions - Class containing generic enhancements to SQL::Abstract::Classic | |
8 | ||
9 | =head1 DESCRIPTION | |
10 | ||
11 | This module is not intended to be used standalone. Instead it represents | |
12 | a quasi-role, that one would "mix in" via classic C<@ISA> inheritance into | |
13 | a DBIx::Class::SQLMaker-like provider. See | |
14 | L<DBIx::Class::Storage::DBI/connect_call_rebase_sqlmaker> for more info. | |
15 | ||
16 | Currently the enhancements over L<SQL::Abstract::Classic> are: | |
17 | ||
18 | =over | |
19 | ||
20 | =item * Support for C<JOIN> statements (via extended C<table/from> support) | |
21 | ||
22 | =item * Support of functions in C<SELECT> lists | |
23 | ||
24 | =item * C<GROUP BY>/C<HAVING> support (via extensions to the order_by parameter) | |
25 | ||
26 | =item * A rudimentary multicolumn IN operator | |
27 | ||
28 | =item * Support of C<...FOR UPDATE> type of select statement modifiers | |
29 | ||
30 | =back | |
31 | ||
32 | =cut | |
33 | ||
34 | # to pull in CAG and the frame-boundary-markers | |
35 | use base 'DBIx::Class'; | |
36 | use DBIx::Class::Carp; | |
37 | use namespace::clean; | |
38 | ||
39 | __PACKAGE__->mk_group_accessors (simple => qw/quote_char name_sep limit_dialect/); | |
40 | ||
41 | sub _quoting_enabled { | |
42 | ( defined $_[0]->{quote_char} and length $_[0]->{quote_char} ) ? 1 : 0 | |
43 | } | |
44 | ||
45 | # for when I need a normalized l/r pair | |
46 | sub _quote_chars { | |
47 | ||
48 | # in case we are called in the old !!$sm->_quote_chars fashion | |
49 | return () if !wantarray and ( ! defined $_[0]->{quote_char} or ! length $_[0]->{quote_char} ); | |
50 | ||
51 | map | |
52 | { defined $_ ? $_ : '' } | |
53 | ( ref $_[0]->{quote_char} ? (@{$_[0]->{quote_char}}) : ( ($_[0]->{quote_char}) x 2 ) ) | |
54 | ; | |
55 | } | |
56 | ||
57 | # FIXME when we bring in the storage weaklink, check its schema | |
58 | # weaklink and channel through $schema->throw_exception | |
59 | sub throw_exception { DBIx::Class::Exception->throw($_[1]) } | |
60 | ||
61 | sub belch { | |
62 | shift; # throw away $self | |
63 | carp( "Warning: ", @_ ); | |
64 | }; | |
65 | ||
66 | sub puke { | |
67 | shift->throw_exception("Fatal: " . join ('', @_)); | |
68 | }; | |
69 | ||
70 | # constants-methods are used not only here, but also in comparison tests | |
71 | sub __rows_bindtype () { | |
72 | +{ sqlt_datatype => 'integer' } | |
73 | } | |
74 | sub __offset_bindtype () { | |
75 | +{ sqlt_datatype => 'integer' } | |
76 | } | |
77 | sub __total_bindtype () { | |
78 | +{ sqlt_datatype => 'integer' } | |
79 | } | |
80 | ||
81 | # the "oh noes offset/top without limit" constant | |
82 | # limited to 31 bits for sanity (and consistency, | |
83 | # since it may be handed to the like of sprintf %u) | |
84 | # | |
85 | # Also *some* builds of SQLite fail the test | |
86 | # some_column BETWEEN ? AND ?: 1, 4294967295 | |
87 | # with the proper integer bind attrs | |
88 | # | |
89 | # Implemented as a method, since ::Storage::DBI also | |
90 | # refers to it (i.e. for the case of software_limit or | |
91 | # as the value to abuse with MSSQL ordered subqueries) | |
92 | sub __max_int () { 0x7FFFFFFF }; | |
93 | ||
94 | # we ne longer need to check this - DBIC has ways of dealing with it | |
95 | # specifically ::Storage::DBI::_resolve_bindattrs() | |
96 | sub _assert_bindval_matches_bindtype () { 1 }; | |
97 | ||
98 | # poor man's de-qualifier | |
99 | sub _quote { | |
100 | $_[0]->next::method( ( $_[0]{_dequalify_idents} and ! ref $_[1] ) | |
101 | ? $_[1] =~ / ([^\.]+) $ /x | |
102 | : $_[1] | |
103 | ); | |
104 | } | |
105 | ||
106 | sub _where_op_NEST { | |
107 | carp_unique ("-nest in search conditions is deprecated, you most probably wanted:\n" | |
108 | .q|{..., -and => [ \%cond0, \@cond1, \'cond2', \[ 'cond3', [ col => bind ] ], etc. ], ... }| | |
109 | ); | |
110 | ||
111 | shift->next::method(@_); | |
112 | } | |
113 | ||
114 | # Handle limit-dialect selection | |
115 | sub select { | |
116 | my ($self, $table, $fields, $where, $rs_attrs, $limit, $offset) = @_; | |
117 | ||
118 | ||
119 | ($fields, @{$self->{select_bind}}) = $self->_recurse_fields($fields); | |
120 | ||
121 | if (defined $offset) { | |
122 | $self->throw_exception('A supplied offset must be a non-negative integer') | |
123 | if ( $offset =~ /\D/ or $offset < 0 ); | |
124 | } | |
125 | $offset ||= 0; | |
126 | ||
127 | if (defined $limit) { | |
128 | $self->throw_exception('A supplied limit must be a positive integer') | |
129 | if ( $limit =~ /\D/ or $limit <= 0 ); | |
130 | } | |
131 | elsif ($offset) { | |
132 | $limit = $self->__max_int; | |
133 | } | |
134 | ||
135 | ||
136 | my ($sql, @bind); | |
137 | if ($limit) { | |
138 | # this is legacy code-flow from SQLA::Limit, it is not set in stone | |
139 | ||
140 | ($sql, @bind) = $self->next::method ($table, $fields, $where); | |
141 | ||
142 | my $limiter; | |
143 | ||
144 | if( $limiter = $self->can ('emulate_limit') ) { | |
145 | carp_unique( | |
146 | 'Support for the legacy emulate_limit() mechanism inherited from ' | |
147 | . 'SQL::Abstract::Limit has been deprecated, and will be removed at ' | |
148 | . 'some future point, as it gets in the way of architectural and/or ' | |
149 | . 'performance advances within DBIC. If your code uses this type of ' | |
150 | . 'limit specification please file an RT and provide the source of ' | |
151 | . 'your emulate_limit() implementation, so an acceptable upgrade-path ' | |
152 | . 'can be devised' | |
153 | ); | |
154 | } | |
155 | else { | |
156 | my $dialect = $self->limit_dialect | |
157 | or $self->throw_exception( "Unable to generate SQL-limit - no limit dialect specified on $self" ); | |
158 | ||
159 | $limiter = $self->can ("_$dialect") | |
160 | or $self->throw_exception(__PACKAGE__ . " does not implement the requested dialect '$dialect'"); | |
161 | } | |
162 | ||
163 | $sql = $self->$limiter ( | |
164 | $sql, | |
165 | { %{$rs_attrs||{}}, _selector_sql => $fields }, | |
166 | $limit, | |
167 | $offset | |
168 | ); | |
169 | } | |
170 | else { | |
171 | ($sql, @bind) = $self->next::method ($table, $fields, $where, $rs_attrs); | |
172 | } | |
173 | ||
174 | push @{$self->{where_bind}}, @bind; | |
175 | ||
176 | # this *must* be called, otherwise extra binds will remain in the sql-maker | |
177 | my @all_bind = $self->_assemble_binds; | |
178 | ||
179 | $sql .= $self->_lock_select ($rs_attrs->{for}) | |
180 | if $rs_attrs->{for}; | |
181 | ||
182 | return wantarray ? ($sql, @all_bind) : $sql; | |
183 | } | |
184 | ||
185 | sub _assemble_binds { | |
186 | my $self = shift; | |
187 | return map { @{ (delete $self->{"${_}_bind"}) || [] } } (qw/pre_select select from where group having order limit/); | |
188 | } | |
189 | ||
190 | my $for_syntax = { | |
191 | update => 'FOR UPDATE', | |
192 | shared => 'FOR SHARE', | |
193 | }; | |
194 | sub _lock_select { | |
195 | my ($self, $type) = @_; | |
196 | ||
197 | my $sql; | |
198 | if (ref($type) eq 'SCALAR') { | |
199 | $sql = "FOR $$type"; | |
200 | } | |
201 | else { | |
202 | $sql = $for_syntax->{$type} || $self->throw_exception( "Unknown SELECT .. FOR type '$type' requested" ); | |
203 | } | |
204 | ||
205 | return " $sql"; | |
206 | } | |
207 | ||
208 | # Handle default inserts | |
209 | sub insert { | |
210 | # optimized due to hotttnesss | |
211 | # my ($self, $table, $data, $options) = @_; | |
212 | ||
213 | # FIXME SQLMaker will emit INSERT INTO $table ( ) VALUES ( ) | |
214 | # which is sadly understood only by MySQL. Change default behavior here, | |
215 | # until we fold the extra pieces into SQLMaker properly | |
216 | if (! $_[2] or (ref $_[2] eq 'HASH' and !keys %{$_[2]} ) ) { | |
217 | my @bind; | |
218 | my $sql = sprintf( | |
219 | 'INSERT INTO %s DEFAULT VALUES', $_[0]->_quote($_[1]) | |
220 | ); | |
221 | ||
222 | if ( ($_[3]||{})->{returning} ) { | |
223 | my $s; | |
224 | ($s, @bind) = $_[0]->_insert_returning ($_[3]); | |
225 | $sql .= $s; | |
226 | } | |
227 | ||
228 | return ($sql, @bind); | |
229 | } | |
230 | ||
231 | next::method(@_); | |
232 | } | |
233 | ||
234 | sub _recurse_fields { | |
235 | my ($self, $fields) = @_; | |
236 | my $ref = ref $fields; | |
237 | return $self->_quote($fields) unless $ref; | |
238 | return $$fields if $ref eq 'SCALAR'; | |
239 | ||
240 | if ($ref eq 'ARRAY') { | |
241 | my (@select, @bind); | |
242 | for my $field (@$fields) { | |
243 | my ($select, @new_bind) = $self->_recurse_fields($field); | |
244 | push @select, $select; | |
245 | push @bind, @new_bind; | |
246 | } | |
247 | return (join(', ', @select), @bind); | |
248 | } | |
249 | elsif ($ref eq 'HASH') { | |
250 | my %hash = %$fields; # shallow copy | |
251 | ||
252 | my $as = delete $hash{-as}; # if supplied | |
253 | ||
254 | my ($func, $rhs, @toomany) = %hash; | |
255 | ||
256 | # there should be only one pair | |
257 | if (@toomany) { | |
258 | $self->throw_exception( "Malformed select argument - too many keys in hash: " . join (',', keys %$fields ) ); | |
259 | } | |
260 | ||
261 | if (lc ($func) eq 'distinct' && ref $rhs eq 'ARRAY' && @$rhs > 1) { | |
262 | $self->throw_exception ( | |
263 | 'The select => { distinct => ... } syntax is not supported for multiple columns.' | |
264 | .' Instead please use { group_by => [ qw/' . (join ' ', @$rhs) . '/ ] }' | |
265 | .' or { select => [ qw/' . (join ' ', @$rhs) . '/ ], distinct => 1 }' | |
266 | ); | |
267 | } | |
268 | ||
269 | my ($rhs_sql, @rhs_bind) = $self->_recurse_fields($rhs); | |
270 | my $select = sprintf ('%s( %s )%s', | |
271 | $self->_sqlcase($func), | |
272 | $rhs_sql, | |
273 | $as | |
274 | ? sprintf (' %s %s', $self->_sqlcase('as'), $self->_quote ($as) ) | |
275 | : '' | |
276 | ); | |
277 | ||
278 | return ($select, @rhs_bind); | |
279 | } | |
280 | elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) { | |
281 | return @{$$fields}; | |
282 | } | |
283 | else { | |
284 | $self->throw_exception( $ref . qq{ unexpected in _recurse_fields()} ); | |
285 | } | |
286 | } | |
287 | ||
288 | ||
289 | # this used to be a part of _order_by but is broken out for clarity. | |
290 | # What we have been doing forever is hijacking the $order arg of | |
291 | # SQLAC::select to pass in arbitrary pieces of data (first the group_by, | |
292 | # then pretty much the entire resultset attr-hash, as more and more | |
293 | # things in the SQLMaker space need to have more info about the $rs they | |
294 | # create SQL for. The alternative would be to keep expanding the | |
295 | # signature of _select with more and more positional parameters, which | |
296 | # is just gross. | |
297 | # | |
298 | # FIXME - this will have to transition out to a subclass when the effort | |
299 | # of folding the SQL generating machinery into SQLMaker takes place | |
300 | sub _parse_rs_attrs { | |
301 | my ($self, $arg) = @_; | |
302 | ||
303 | my $sql = ''; | |
304 | ||
305 | if ($arg->{group_by}) { | |
306 | if ( my ($group_sql, @group_bind) = $self->_recurse_fields($arg->{group_by}) ) { | |
307 | $sql .= $self->_sqlcase(' group by ') . $group_sql; | |
308 | push @{$self->{group_bind}}, @group_bind; | |
309 | } | |
310 | } | |
311 | ||
312 | if (defined $arg->{having}) { | |
313 | my ($frag, @bind) = $self->_recurse_where($arg->{having}); | |
314 | push(@{$self->{having_bind}}, @bind); | |
315 | $sql .= $self->_sqlcase(' having ') . $frag; | |
316 | } | |
317 | ||
318 | if (defined $arg->{order_by}) { | |
319 | $sql .= $self->_order_by ($arg->{order_by}); | |
320 | } | |
321 | ||
322 | return $sql; | |
323 | } | |
324 | ||
325 | sub _order_by { | |
326 | my ($self, $arg) = @_; | |
327 | ||
328 | # check that we are not called in legacy mode (order_by as 4th argument) | |
329 | if (ref $arg eq 'HASH' and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) { | |
330 | return $self->_parse_rs_attrs ($arg); | |
331 | } | |
332 | else { | |
333 | my ($sql, @bind) = $self->next::method($arg); | |
334 | push @{$self->{order_bind}}, @bind; | |
335 | return $sql; | |
336 | } | |
337 | } | |
338 | ||
339 | sub _split_order_chunk { | |
340 | my ($self, $chunk) = @_; | |
341 | ||
342 | # strip off sort modifiers, but always succeed, so $1 gets reset | |
343 | $chunk =~ s/ (?: \s+ (ASC|DESC) )? \s* $//ix; | |
344 | ||
345 | return ( | |
346 | $chunk, | |
347 | ( $1 and uc($1) eq 'DESC' ) ? 1 : 0, | |
348 | ); | |
349 | } | |
350 | ||
351 | sub _table { | |
352 | # optimized due to hotttnesss | |
353 | # my ($self, $from) = @_; | |
354 | if (my $ref = ref $_[1] ) { | |
355 | if ($ref eq 'ARRAY') { | |
356 | return $_[0]->_recurse_from(@{$_[1]}); | |
357 | } | |
358 | elsif ($ref eq 'HASH') { | |
359 | return $_[0]->_recurse_from($_[1]); | |
360 | } | |
361 | elsif ($ref eq 'REF' && ref ${$_[1]} eq 'ARRAY') { | |
362 | my ($sql, @bind) = @{ ${$_[1]} }; | |
363 | push @{$_[0]->{from_bind}}, @bind; | |
364 | return $sql | |
365 | } | |
366 | } | |
367 | return $_[0]->next::method ($_[1]); | |
368 | } | |
369 | ||
370 | sub _generate_join_clause { | |
371 | my ($self, $join_type) = @_; | |
372 | ||
373 | $join_type = $self->{_default_jointype} | |
374 | if ! defined $join_type; | |
375 | ||
376 | return sprintf ('%s JOIN ', | |
377 | $join_type ? $self->_sqlcase($join_type) : '' | |
378 | ); | |
379 | } | |
380 | ||
381 | sub _recurse_from { | |
382 | my $self = shift; | |
383 | return join (' ', $self->_gen_from_blocks(@_) ); | |
384 | } | |
385 | ||
386 | sub _gen_from_blocks { | |
387 | my ($self, $from, @joins) = @_; | |
388 | ||
389 | my @fchunks = $self->_from_chunk_to_sql($from); | |
390 | ||
391 | for (@joins) { | |
392 | my ($to, $on) = @$_; | |
393 | ||
394 | # check whether a join type exists | |
395 | my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to; | |
396 | my $join_type; | |
397 | if (ref($to_jt) eq 'HASH' and defined($to_jt->{-join_type})) { | |
398 | $join_type = $to_jt->{-join_type}; | |
399 | $join_type =~ s/^\s+ | \s+$//xg; | |
400 | } | |
401 | ||
402 | my @j = $self->_generate_join_clause( $join_type ); | |
403 | ||
404 | if (ref $to eq 'ARRAY') { | |
405 | push(@j, '(', $self->_recurse_from(@$to), ')'); | |
406 | } | |
407 | else { | |
408 | push(@j, $self->_from_chunk_to_sql($to)); | |
409 | } | |
410 | ||
411 | my ($sql, @bind) = $self->_join_condition($on); | |
412 | push(@j, ' ON ', $sql); | |
413 | push @{$self->{from_bind}}, @bind; | |
414 | ||
415 | push @fchunks, join '', @j; | |
416 | } | |
417 | ||
418 | return @fchunks; | |
419 | } | |
420 | ||
421 | sub _from_chunk_to_sql { | |
422 | my ($self, $fromspec) = @_; | |
423 | ||
424 | return join (' ', do { | |
425 | if (! ref $fromspec) { | |
426 | $self->_quote($fromspec); | |
427 | } | |
428 | elsif (ref $fromspec eq 'SCALAR') { | |
429 | $$fromspec; | |
430 | } | |
431 | elsif (ref $fromspec eq 'REF' and ref $$fromspec eq 'ARRAY') { | |
432 | push @{$self->{from_bind}}, @{$$fromspec}[1..$#$$fromspec]; | |
433 | $$fromspec->[0]; | |
434 | } | |
435 | elsif (ref $fromspec eq 'HASH') { | |
436 | my ($as, $table, $toomuch) = ( map | |
437 | { $_ => $fromspec->{$_} } | |
438 | ( grep { $_ !~ /^\-/ } keys %$fromspec ) | |
439 | ); | |
440 | ||
441 | $self->throw_exception( "Only one table/as pair expected in from-spec but an exra '$toomuch' key present" ) | |
442 | if defined $toomuch; | |
443 | ||
444 | ($self->_from_chunk_to_sql($table), $self->_quote($as) ); | |
445 | } | |
446 | else { | |
447 | $self->throw_exception('Unsupported from refkind: ' . ref $fromspec ); | |
448 | } | |
449 | }); | |
450 | } | |
451 | ||
452 | sub _join_condition { | |
453 | my ($self, $cond) = @_; | |
454 | ||
455 | # Backcompat for the old days when a plain hashref | |
456 | # { 't1.col1' => 't2.col2' } meant ON t1.col1 = t2.col2 | |
457 | if ( | |
458 | ref $cond eq 'HASH' | |
459 | and | |
460 | keys %$cond == 1 | |
461 | and | |
462 | (keys %$cond)[0] =~ /\./ | |
463 | and | |
464 | ! ref ( (values %$cond)[0] ) | |
465 | ) { | |
466 | carp_unique( | |
467 | "ResultSet {from} structures with conditions not conforming to the " | |
468 | . "SQL::Abstract::Classic syntax are deprecated: you either need to stop " | |
469 | . "abusing {from} altogether, or express the condition properly using the " | |
470 | . "{ -ident => ... } operator" | |
471 | ); | |
472 | $cond = { keys %$cond => { -ident => values %$cond } } | |
473 | } | |
474 | elsif ( ref $cond eq 'ARRAY' ) { | |
475 | # do our own ORing so that the hashref-shim above is invoked | |
476 | my @parts; | |
477 | my @binds; | |
478 | foreach my $c (@$cond) { | |
479 | my ($sql, @bind) = $self->_join_condition($c); | |
480 | push @binds, @bind; | |
481 | push @parts, $sql; | |
482 | } | |
483 | return join(' OR ', @parts), @binds; | |
484 | } | |
485 | ||
486 | return $self->_recurse_where($cond); | |
487 | } | |
488 | ||
489 | # !!! EXPERIMENTAL API !!! WILL CHANGE !!! | |
490 | # | |
491 | # This is rather odd, but vanilla SQLA* variants do not have support for | |
492 | # multicolumn-IN expressions | |
493 | # Currently has only one callsite in ResultSet, body moved into this subclass | |
494 | # to raise API questions like: | |
495 | # - how do we convey a list of idents...? | |
496 | # - can binds reside on lhs? | |
497 | # | |
498 | # !!! EXPERIMENTAL API !!! WILL CHANGE !!! | |
499 | sub _where_op_multicolumn_in { | |
500 | my ($self, $lhs, $rhs) = @_; | |
501 | ||
502 | if (! ref $lhs or ref $lhs eq 'ARRAY') { | |
503 | my (@sql, @bind); | |
504 | for (ref $lhs ? @$lhs : $lhs) { | |
505 | if (! ref $_) { | |
506 | push @sql, $self->_quote($_); | |
507 | } | |
508 | elsif (ref $_ eq 'SCALAR') { | |
509 | push @sql, $$_; | |
510 | } | |
511 | elsif (ref $_ eq 'REF' and ref $$_ eq 'ARRAY') { | |
512 | my ($s, @b) = @$$_; | |
513 | push @sql, $s; | |
514 | push @bind, @b; | |
515 | } | |
516 | else { | |
517 | $self->throw_exception("ARRAY of @{[ ref $_ ]}es unsupported for multicolumn IN lhs..."); | |
518 | } | |
519 | } | |
520 | $lhs = \[ join(', ', @sql), @bind]; | |
521 | } | |
522 | elsif (ref $lhs eq 'SCALAR') { | |
523 | $lhs = \[ $$lhs ]; | |
524 | } | |
525 | elsif (ref $lhs eq 'REF' and ref $$lhs eq 'ARRAY' ) { | |
526 | # noop | |
527 | } | |
528 | else { | |
529 | $self->throw_exception( ref($lhs) . "es unsupported for multicolumn IN lhs..."); | |
530 | } | |
531 | ||
532 | # is this proper...? | |
533 | $rhs = \[ $self->_recurse_where($rhs) ]; | |
534 | ||
535 | for ($lhs, $rhs) { | |
536 | $$_->[0] = "( $$_->[0] )" | |
537 | unless $$_->[0] =~ /^ \s* \( .* \) \s* $/xs; | |
538 | } | |
539 | ||
540 | \[ join( ' IN ', shift @$$lhs, shift @$$rhs ), @$$lhs, @$$rhs ]; | |
541 | } | |
542 | ||
543 | ||
544 | ### | |
545 | ### Code that mostly used to be in DBIC::SQLMaker::LimitDialects | |
546 | ### | |
547 | ||
548 | sub _LimitOffset { | |
549 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
550 | $sql .= $self->_parse_rs_attrs( $rs_attrs ) . " LIMIT ?"; | |
551 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
552 | if ($offset) { | |
553 | $sql .= " OFFSET ?"; | |
554 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset ]; | |
555 | } | |
556 | return $sql; | |
557 | } | |
558 | ||
559 | sub _LimitXY { | |
560 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
561 | $sql .= $self->_parse_rs_attrs( $rs_attrs ) . " LIMIT "; | |
562 | if ($offset) { | |
563 | $sql .= '?, '; | |
564 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset ]; | |
565 | } | |
566 | $sql .= '?'; | |
567 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
568 | ||
569 | return $sql; | |
570 | } | |
571 | ||
572 | sub _RowNumberOver { | |
573 | my ($self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
574 | ||
575 | # get selectors, and scan the order_by (if any) | |
576 | my $sq_attrs = $self->_subqueried_limit_attrs ( $sql, $rs_attrs ); | |
577 | ||
578 | # make up an order if none exists | |
579 | my $requested_order = (delete $rs_attrs->{order_by}) || $self->_rno_default_order; | |
580 | ||
581 | # the order binds (if any) will need to go at the end of the entire inner select | |
582 | local $self->{order_bind}; | |
583 | my $rno_ord = $self->_order_by ($requested_order); | |
584 | push @{$self->{select_bind}}, @{$self->{order_bind}}; | |
585 | ||
586 | # this is the order supplement magic | |
587 | my $mid_sel = $sq_attrs->{selection_outer}; | |
588 | if (my $extra_order_sel = $sq_attrs->{order_supplement}) { | |
589 | for my $extra_col (sort | |
590 | { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} } | |
591 | keys %$extra_order_sel | |
592 | ) { | |
593 | $sq_attrs->{selection_inner} .= sprintf (', %s AS %s', | |
594 | $extra_col, | |
595 | $extra_order_sel->{$extra_col}, | |
596 | ); | |
597 | } | |
598 | } | |
599 | ||
600 | # and this is order re-alias magic | |
601 | for my $map ($sq_attrs->{order_supplement}, $sq_attrs->{outer_renames}) { | |
602 | for my $col (sort { (length $b) <=> (length $a) } keys %{$map||{}} ) { | |
603 | my $re_col = quotemeta ($col); | |
604 | $rno_ord =~ s/$re_col/$map->{$col}/; | |
605 | } | |
606 | } | |
607 | ||
608 | # whatever is left of the order_by (only where is processed at this point) | |
609 | my $group_having = $self->_parse_rs_attrs($rs_attrs); | |
610 | ||
611 | my $qalias = $self->_quote ($rs_attrs->{alias}); | |
612 | my $idx_name = $self->_quote ('rno__row__index'); | |
613 | ||
614 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset + 1], [ $self->__total_bindtype => $offset + $rows ]; | |
615 | ||
616 | return <<EOS; | |
617 | ||
618 | SELECT $sq_attrs->{selection_outer} FROM ( | |
619 | SELECT $mid_sel, ROW_NUMBER() OVER( $rno_ord ) AS $idx_name FROM ( | |
620 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${group_having} | |
621 | ) $qalias | |
622 | ) $qalias WHERE $idx_name >= ? AND $idx_name <= ? | |
623 | ||
624 | EOS | |
625 | ||
626 | } | |
627 | ||
628 | # some databases are happy with OVER (), some need OVER (ORDER BY (SELECT (1)) ) | |
629 | sub _rno_default_order { | |
630 | return undef; | |
631 | } | |
632 | ||
633 | sub _SkipFirst { | |
634 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
635 | ||
636 | $sql =~ s/^ \s* SELECT \s+ //ix | |
637 | or $self->throw_exception("Unrecognizable SELECT: $sql"); | |
638 | ||
639 | return sprintf ('SELECT %s%s%s%s', | |
640 | $offset | |
641 | ? do { | |
642 | push @{$self->{pre_select_bind}}, [ $self->__offset_bindtype => $offset]; | |
643 | 'SKIP ? ' | |
644 | } | |
645 | : '' | |
646 | , | |
647 | do { | |
648 | push @{$self->{pre_select_bind}}, [ $self->__rows_bindtype => $rows ]; | |
649 | 'FIRST ? ' | |
650 | }, | |
651 | $sql, | |
652 | $self->_parse_rs_attrs ($rs_attrs), | |
653 | ); | |
654 | } | |
655 | ||
656 | sub _FirstSkip { | |
657 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
658 | ||
659 | $sql =~ s/^ \s* SELECT \s+ //ix | |
660 | or $self->throw_exception("Unrecognizable SELECT: $sql"); | |
661 | ||
662 | return sprintf ('SELECT %s%s%s%s', | |
663 | do { | |
664 | push @{$self->{pre_select_bind}}, [ $self->__rows_bindtype => $rows ]; | |
665 | 'FIRST ? ' | |
666 | }, | |
667 | $offset | |
668 | ? do { | |
669 | push @{$self->{pre_select_bind}}, [ $self->__offset_bindtype => $offset]; | |
670 | 'SKIP ? ' | |
671 | } | |
672 | : '' | |
673 | , | |
674 | $sql, | |
675 | $self->_parse_rs_attrs ($rs_attrs), | |
676 | ); | |
677 | } | |
678 | ||
679 | sub _RowNum { | |
680 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
681 | ||
682 | my $sq_attrs = $self->_subqueried_limit_attrs ($sql, $rs_attrs); | |
683 | ||
684 | my $qalias = $self->_quote ($rs_attrs->{alias}); | |
685 | my $idx_name = $self->_quote ('rownum__index'); | |
686 | my $order_group_having = $self->_parse_rs_attrs($rs_attrs); | |
687 | ||
688 | ||
689 | # if no offset (e.g. first page) - we can skip one of the subqueries | |
690 | if (! $offset) { | |
691 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
692 | ||
693 | return <<EOS; | |
694 | SELECT $sq_attrs->{selection_outer} FROM ( | |
695 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
696 | ) $qalias WHERE ROWNUM <= ? | |
697 | EOS | |
698 | } | |
699 | ||
700 | # | |
701 | # There are two ways to limit in Oracle, one vastly faster than the other | |
702 | # on large resultsets: https://decipherinfosys.wordpress.com/2007/08/09/paging-and-countstopkey-optimization/ | |
703 | # However Oracle is retarded and does not preserve stable ROWNUM() values | |
704 | # when called twice in the same scope. Therefore unless the resultset is | |
705 | # ordered by a unique set of columns, it is not safe to use the faster | |
706 | # method, and the slower BETWEEN query is used instead | |
707 | # | |
708 | # FIXME - this is quite expensive, and does not perform caching of any sort | |
709 | # as soon as some of the SQLMaker-inlining work becomes viable consider adding | |
710 | # some rudimentary caching support | |
711 | if ( | |
712 | $rs_attrs->{order_by} | |
713 | and | |
714 | $rs_attrs->{result_source}->storage->_order_by_is_stable( | |
715 | @{$rs_attrs}{qw/from order_by where/} | |
716 | ) | |
717 | ) { | |
718 | push @{$self->{limit_bind}}, [ $self->__total_bindtype => $offset + $rows ], [ $self->__offset_bindtype => $offset + 1 ]; | |
719 | ||
720 | return <<EOS; | |
721 | SELECT $sq_attrs->{selection_outer} FROM ( | |
722 | SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( | |
723 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
724 | ) $qalias WHERE ROWNUM <= ? | |
725 | ) $qalias WHERE $idx_name >= ? | |
726 | EOS | |
727 | } | |
728 | else { | |
729 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset + 1 ], [ $self->__total_bindtype => $offset + $rows ]; | |
730 | ||
731 | return <<EOS; | |
732 | SELECT $sq_attrs->{selection_outer} FROM ( | |
733 | SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( | |
734 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
735 | ) $qalias | |
736 | ) $qalias WHERE $idx_name BETWEEN ? AND ? | |
737 | EOS | |
738 | } | |
739 | } | |
740 | ||
741 | # used by _Top and _FetchFirst below | |
742 | sub _prep_for_skimming_limit { | |
743 | my ( $self, $sql, $rs_attrs ) = @_; | |
744 | ||
745 | # get selectors | |
746 | my $sq_attrs = $self->_subqueried_limit_attrs ($sql, $rs_attrs); | |
747 | ||
748 | my $requested_order = delete $rs_attrs->{order_by}; | |
749 | $sq_attrs->{order_by_requested} = $self->_order_by ($requested_order); | |
750 | $sq_attrs->{grpby_having} = $self->_parse_rs_attrs ($rs_attrs); | |
751 | ||
752 | # without an offset things are easy | |
753 | if (! $rs_attrs->{offset}) { | |
754 | $sq_attrs->{order_by_inner} = $sq_attrs->{order_by_requested}; | |
755 | } | |
756 | else { | |
757 | $sq_attrs->{quoted_rs_alias} = $self->_quote ($rs_attrs->{alias}); | |
758 | ||
759 | # localise as we already have all the bind values we need | |
760 | local $self->{order_bind}; | |
761 | ||
762 | # make up an order unless supplied or sanity check what we are given | |
763 | my $inner_order; | |
764 | if ($sq_attrs->{order_by_requested}) { | |
765 | $self->throw_exception ( | |
766 | 'Unable to safely perform "skimming type" limit with supplied unstable order criteria' | |
767 | ) unless ($rs_attrs->{result_source}->schema->storage->_order_by_is_stable( | |
768 | $rs_attrs->{from}, | |
769 | $requested_order, | |
770 | $rs_attrs->{where}, | |
771 | )); | |
772 | ||
773 | $inner_order = $requested_order; | |
774 | } | |
775 | else { | |
776 | $inner_order = [ map | |
777 | { "$rs_attrs->{alias}.$_" } | |
778 | ( @{ | |
779 | $rs_attrs->{result_source}->_identifying_column_set | |
780 | || | |
781 | $self->throw_exception(sprintf( | |
782 | 'Unable to auto-construct stable order criteria for "skimming type" limit ' | |
783 | . "dialect based on source '%s'", $rs_attrs->{result_source}->name) ); | |
784 | } ) | |
785 | ]; | |
786 | } | |
787 | ||
788 | $sq_attrs->{order_by_inner} = $self->_order_by ($inner_order); | |
789 | ||
790 | my @out_chunks; | |
791 | for my $ch ($self->_order_by_chunks ($inner_order)) { | |
792 | $ch = $ch->[0] if ref $ch eq 'ARRAY'; | |
793 | ||
794 | ($ch, my $is_desc) = $self->_split_order_chunk($ch); | |
795 | ||
796 | # !NOTE! outside chunks come in reverse order ( !$is_desc ) | |
797 | push @out_chunks, { ($is_desc ? '-asc' : '-desc') => \$ch }; | |
798 | } | |
799 | ||
800 | $sq_attrs->{order_by_middle} = $self->_order_by (\@out_chunks); | |
801 | ||
802 | # this is the order supplement magic | |
803 | $sq_attrs->{selection_middle} = $sq_attrs->{selection_outer}; | |
804 | if (my $extra_order_sel = $sq_attrs->{order_supplement}) { | |
805 | for my $extra_col (sort | |
806 | { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} } | |
807 | keys %$extra_order_sel | |
808 | ) { | |
809 | $sq_attrs->{selection_inner} .= sprintf (', %s AS %s', | |
810 | $extra_col, | |
811 | $extra_order_sel->{$extra_col}, | |
812 | ); | |
813 | ||
814 | $sq_attrs->{selection_middle} .= ', ' . $extra_order_sel->{$extra_col}; | |
815 | } | |
816 | ||
817 | # Whatever order bindvals there are, they will be realiased and | |
818 | # reselected, and need to show up at end of the initial inner select | |
819 | push @{$self->{select_bind}}, @{$self->{order_bind}}; | |
820 | } | |
821 | ||
822 | # and this is order re-alias magic | |
823 | for my $map ($sq_attrs->{order_supplement}, $sq_attrs->{outer_renames}) { | |
824 | for my $col (sort { (length $b) <=> (length $a) } keys %{$map||{}}) { | |
825 | my $re_col = quotemeta ($col); | |
826 | $_ =~ s/$re_col/$map->{$col}/ | |
827 | for ($sq_attrs->{order_by_middle}, $sq_attrs->{order_by_requested}); | |
828 | } | |
829 | } | |
830 | } | |
831 | ||
832 | $sq_attrs; | |
833 | } | |
834 | ||
835 | sub _Top { | |
836 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
837 | ||
838 | my $lim = $self->_prep_for_skimming_limit($sql, $rs_attrs); | |
839 | ||
840 | $sql = sprintf ('SELECT TOP %u %s %s %s %s', | |
841 | $rows + ($offset||0), | |
842 | $offset ? $lim->{selection_inner} : $lim->{selection_original}, | |
843 | $lim->{query_leftover}, | |
844 | $lim->{grpby_having}, | |
845 | $lim->{order_by_inner}, | |
846 | ); | |
847 | ||
848 | $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s', | |
849 | $rows, | |
850 | $lim->{selection_middle}, | |
851 | $sql, | |
852 | $lim->{quoted_rs_alias}, | |
853 | $lim->{order_by_middle}, | |
854 | ) if $offset; | |
855 | ||
856 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s', | |
857 | $lim->{selection_outer}, | |
858 | $sql, | |
859 | $lim->{quoted_rs_alias}, | |
860 | $lim->{order_by_requested}, | |
861 | ) if $offset and ( | |
862 | $lim->{order_by_requested} or $lim->{selection_middle} ne $lim->{selection_outer} | |
863 | ); | |
864 | ||
865 | return $sql; | |
866 | } | |
867 | ||
868 | sub _FetchFirst { | |
869 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
870 | ||
871 | my $lim = $self->_prep_for_skimming_limit($sql, $rs_attrs); | |
872 | ||
873 | $sql = sprintf ('SELECT %s %s %s %s FETCH FIRST %u ROWS ONLY', | |
874 | $offset ? $lim->{selection_inner} : $lim->{selection_original}, | |
875 | $lim->{query_leftover}, | |
876 | $lim->{grpby_having}, | |
877 | $lim->{order_by_inner}, | |
878 | $rows + ($offset||0), | |
879 | ); | |
880 | ||
881 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s FETCH FIRST %u ROWS ONLY', | |
882 | $lim->{selection_middle}, | |
883 | $sql, | |
884 | $lim->{quoted_rs_alias}, | |
885 | $lim->{order_by_middle}, | |
886 | $rows, | |
887 | ) if $offset; | |
888 | ||
889 | ||
890 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s', | |
891 | $lim->{selection_outer}, | |
892 | $sql, | |
893 | $lim->{quoted_rs_alias}, | |
894 | $lim->{order_by_requested}, | |
895 | ) if $offset and ( | |
896 | $lim->{order_by_requested} or $lim->{selection_middle} ne $lim->{selection_outer} | |
897 | ); | |
898 | ||
899 | return $sql; | |
900 | } | |
901 | ||
902 | sub _GenericSubQ { | |
903 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
904 | ||
905 | my $main_rsrc = $rs_attrs->{result_source}; | |
906 | ||
907 | # Explicitly require an order_by | |
908 | # GenSubQ is slow enough as it is, just emulating things | |
909 | # like in other cases is not wise - make the user work | |
910 | # to shoot their DBA in the foot | |
911 | $self->throw_exception ( | |
912 | 'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, ' | |
913 | . 'main-table-based order criteria.' | |
914 | ) unless $rs_attrs->{order_by}; | |
915 | ||
916 | my $usable_order_colinfo = $main_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( | |
917 | $rs_attrs | |
918 | ); | |
919 | ||
920 | $self->throw_exception( | |
921 | 'Generic Subquery Limit can not work with order criteria based on sources other than the main one' | |
922 | ) if ( | |
923 | ! keys %{$usable_order_colinfo||{}} | |
924 | or | |
925 | grep | |
926 | { $_->{-source_alias} ne $rs_attrs->{alias} } | |
927 | (values %$usable_order_colinfo) | |
928 | ); | |
929 | ||
930 | ### | |
931 | ### | |
932 | ### we need to know the directions after we figured out the above - reextract *again* | |
933 | ### this is eyebleed - trying to get it to work at first | |
934 | my $supplied_order = delete $rs_attrs->{order_by}; | |
935 | ||
936 | my @order_bits = do { | |
937 | local $self->{quote_char}; | |
938 | local $self->{order_bind}; | |
939 | map { ref $_ ? $_->[0] : $_ } $self->_order_by_chunks ($supplied_order) | |
940 | }; | |
941 | ||
942 | # truncate to what we'll use | |
943 | $#order_bits = ( (keys %$usable_order_colinfo) - 1 ); | |
944 | ||
945 | # @order_bits likely will come back quoted (due to how the prefetch | |
946 | # rewriter operates | |
947 | # Hence supplement the column_info lookup table with quoted versions | |
948 | if ($self->quote_char) { | |
949 | $usable_order_colinfo->{$self->_quote($_)} = $usable_order_colinfo->{$_} | |
950 | for keys %$usable_order_colinfo; | |
951 | } | |
952 | ||
953 | # calculate the condition | |
954 | my $count_tbl_alias = 'rownum__emulation'; | |
955 | my $main_alias = $rs_attrs->{alias}; | |
956 | my $main_tbl_name = $main_rsrc->name; | |
957 | ||
958 | my (@unqualified_names, @qualified_names, @is_desc, @new_order_by); | |
959 | ||
960 | for my $bit (@order_bits) { | |
961 | ||
962 | ($bit, my $is_desc) = $self->_split_order_chunk($bit); | |
963 | ||
964 | push @is_desc, $is_desc; | |
965 | push @unqualified_names, $usable_order_colinfo->{$bit}{-colname}; | |
966 | push @qualified_names, $usable_order_colinfo->{$bit}{-fq_colname}; | |
967 | ||
968 | push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_colinfo->{$bit}{-fq_colname} }; | |
969 | }; | |
970 | ||
971 | my (@where_cond, @skip_colpair_stack); | |
972 | for my $i (0 .. $#order_bits) { | |
973 | my $ci = $usable_order_colinfo->{$order_bits[$i]}; | |
974 | ||
975 | my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $main_alias); | |
976 | my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } }; | |
977 | ||
978 | push @skip_colpair_stack, [ | |
979 | { $main_col => { -ident => $subq_col } }, | |
980 | ]; | |
981 | ||
982 | # we can trust the nullability flag because | |
983 | # we already used it during _id_col_set resolution | |
984 | # | |
985 | if ($ci->{is_nullable}) { | |
986 | push @{$skip_colpair_stack[-1]}, { $main_col => undef, $subq_col=> undef }; | |
987 | ||
988 | $cur_cond = [ | |
989 | { | |
990 | ($is_desc[$i] ? $subq_col : $main_col) => { '!=', undef }, | |
991 | ($is_desc[$i] ? $main_col : $subq_col) => undef, | |
992 | }, | |
993 | { | |
994 | $subq_col => { '!=', undef }, | |
995 | $main_col => { '!=', undef }, | |
996 | -and => $cur_cond, | |
997 | }, | |
998 | ]; | |
999 | } | |
1000 | ||
1001 | push @where_cond, { '-and', => [ @skip_colpair_stack[0..$i-1], $cur_cond ] }; | |
1002 | } | |
1003 | ||
1004 | # reuse the sqlmaker WHERE, this will not be returning binds | |
1005 | my $counted_where = do { | |
1006 | local $self->{where_bind}; | |
1007 | $self->where(\@where_cond); | |
1008 | }; | |
1009 | ||
1010 | # construct the rownum condition by hand | |
1011 | my $rownum_cond; | |
1012 | if ($offset) { | |
1013 | $rownum_cond = 'BETWEEN ? AND ?'; | |
1014 | push @{$self->{limit_bind}}, | |
1015 | [ $self->__offset_bindtype => $offset ], | |
1016 | [ $self->__total_bindtype => $offset + $rows - 1] | |
1017 | ; | |
1018 | } | |
1019 | else { | |
1020 | $rownum_cond = '< ?'; | |
1021 | push @{$self->{limit_bind}}, | |
1022 | [ $self->__rows_bindtype => $rows ] | |
1023 | ; | |
1024 | } | |
1025 | ||
1026 | # and what we will order by inside | |
1027 | my $inner_order_sql = do { | |
1028 | local $self->{order_bind}; | |
1029 | ||
1030 | my $s = $self->_order_by (\@new_order_by); | |
1031 | ||
1032 | $self->throw_exception('Inner gensubq order may not contain binds... something went wrong') | |
1033 | if @{$self->{order_bind}}; | |
1034 | ||
1035 | $s; | |
1036 | }; | |
1037 | ||
1038 | ### resume originally scheduled programming | |
1039 | ### | |
1040 | ### | |
1041 | ||
1042 | # we need to supply the order for the supplements to be properly calculated | |
1043 | my $sq_attrs = $self->_subqueried_limit_attrs ( | |
1044 | $sql, { %$rs_attrs, order_by => \@new_order_by } | |
1045 | ); | |
1046 | ||
1047 | my $in_sel = $sq_attrs->{selection_inner}; | |
1048 | ||
1049 | # add the order supplement (if any) as this is what will be used for the outer WHERE | |
1050 | $in_sel .= ", $_" for sort keys %{$sq_attrs->{order_supplement}}; | |
1051 | ||
1052 | my $group_having_sql = $self->_parse_rs_attrs($rs_attrs); | |
1053 | ||
1054 | ||
1055 | return sprintf (" | |
1056 | SELECT $sq_attrs->{selection_outer} | |
1057 | FROM ( | |
1058 | SELECT $in_sel $sq_attrs->{query_leftover}${group_having_sql} | |
1059 | ) %s | |
1060 | WHERE ( SELECT COUNT(*) FROM %s %s $counted_where ) $rownum_cond | |
1061 | $inner_order_sql | |
1062 | ", map { $self->_quote ($_) } ( | |
1063 | $rs_attrs->{alias}, | |
1064 | $main_tbl_name, | |
1065 | $count_tbl_alias, | |
1066 | )); | |
1067 | } | |
1068 | ||
1069 | ||
1070 | # !!! THIS IS ALSO HORRIFIC !!! /me ashamed | |
1071 | # | |
1072 | # Generates inner/outer select lists for various limit dialects | |
1073 | # which result in one or more subqueries (e.g. RNO, Top, RowNum) | |
1074 | # Any non-main-table columns need to have their table qualifier | |
1075 | # turned into a column alias (otherwise names in subqueries clash | |
1076 | # and/or lose their source table) | |
1077 | # | |
1078 | # Returns mangled proto-sql, inner/outer strings of SQL QUOTED selectors | |
1079 | # with aliases (to be used in whatever select statement), and an alias | |
1080 | # index hashref of QUOTED SEL => QUOTED ALIAS pairs (to maybe be used | |
1081 | # for string-subst higher up). | |
1082 | # If an order_by is supplied, the inner select needs to bring out columns | |
1083 | # used in implicit (non-selected) orders, and the order condition itself | |
1084 | # needs to be realiased to the proper names in the outer query. Thus we | |
1085 | # also return a hashref (order doesn't matter) of QUOTED EXTRA-SEL => | |
1086 | # QUOTED ALIAS pairs, which is a list of extra selectors that do *not* | |
1087 | # exist in the original select list | |
1088 | sub _subqueried_limit_attrs { | |
1089 | my ($self, $proto_sql, $rs_attrs) = @_; | |
1090 | ||
1091 | $self->throw_exception( | |
1092 | 'Limit dialect implementation usable only in the context of DBIC (missing $rs_attrs)' | |
1093 | ) unless ref ($rs_attrs) eq 'HASH'; | |
1094 | ||
1095 | # mangle the input sql as we will be replacing the selector entirely | |
1096 | unless ( | |
1097 | $rs_attrs->{_selector_sql} | |
1098 | and | |
1099 | $proto_sql =~ s/^ \s* SELECT \s* \Q$rs_attrs->{_selector_sql}//ix | |
1100 | ) { | |
1101 | $self->throw_exception("Unrecognizable SELECT: $proto_sql"); | |
1102 | } | |
1103 | ||
1104 | my ($re_sep, $re_alias) = map { quotemeta $_ } ( $self->{name_sep}, $rs_attrs->{alias} ); | |
1105 | ||
1106 | # correlate select and as, build selection index | |
1107 | my (@sel, $in_sel_index); | |
1108 | for my $i (0 .. $#{$rs_attrs->{select}}) { | |
1109 | ||
1110 | my $s = $rs_attrs->{select}[$i]; | |
1111 | my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef; | |
1112 | ||
1113 | # we throw away the @bind here deliberately | |
1114 | my ($sql_sel) = $self->_recurse_fields ($s); | |
1115 | ||
1116 | push @sel, { | |
1117 | arg => $s, | |
1118 | sql => $sql_sel, | |
1119 | unquoted_sql => do { | |
1120 | local $self->{quote_char}; | |
1121 | ($self->_recurse_fields ($s))[0]; # ignore binds again | |
1122 | }, | |
1123 | as => | |
1124 | $sql_alias | |
1125 | || | |
1126 | $rs_attrs->{as}[$i] | |
1127 | || | |
1128 | $self->throw_exception("Select argument $i ($s) without corresponding 'as'") | |
1129 | , | |
1130 | }; | |
1131 | ||
1132 | # anything with a placeholder in it needs re-selection | |
1133 | $in_sel_index->{$sql_sel}++ unless $sql_sel =~ / (?: ^ | \W ) \? (?: \W | $ ) /x; | |
1134 | ||
1135 | $in_sel_index->{$self->_quote ($sql_alias)}++ if $sql_alias; | |
1136 | ||
1137 | # record unqualified versions too, so we do not have | |
1138 | # to reselect the same column twice (in qualified and | |
1139 | # unqualified form) | |
1140 | if (! ref $s && $sql_sel =~ / $re_sep (.+) $/x) { | |
1141 | $in_sel_index->{$1}++; | |
1142 | } | |
1143 | } | |
1144 | ||
1145 | ||
1146 | # re-alias and remove any name separators from aliases, | |
1147 | # unless we are dealing with the current source alias | |
1148 | # (which will transcend the subqueries as it is necessary | |
1149 | # for possible further chaining) | |
1150 | # same for anything we do not recognize | |
1151 | my ($sel, $renamed); | |
1152 | for my $node (@sel) { | |
1153 | push @{$sel->{original}}, $node->{sql}; | |
1154 | ||
1155 | if ( | |
1156 | ! $in_sel_index->{$node->{sql}} | |
1157 | or | |
1158 | $node->{as} =~ / (?<! ^ $re_alias ) \. /x | |
1159 | or | |
1160 | $node->{unquoted_sql} =~ / (?<! ^ $re_alias ) $re_sep /x | |
1161 | ) { | |
1162 | $node->{as} = $self->_unqualify_colname($node->{as}); | |
1163 | my $quoted_as = $self->_quote($node->{as}); | |
1164 | push @{$sel->{inner}}, sprintf '%s AS %s', $node->{sql}, $quoted_as; | |
1165 | push @{$sel->{outer}}, $quoted_as; | |
1166 | $renamed->{$node->{sql}} = $quoted_as; | |
1167 | } | |
1168 | else { | |
1169 | push @{$sel->{inner}}, $node->{sql}; | |
1170 | push @{$sel->{outer}}, $self->_quote (ref $node->{arg} ? $node->{as} : $node->{arg}); | |
1171 | } | |
1172 | } | |
1173 | ||
1174 | # see if the order gives us anything | |
1175 | my $extra_order_sel; | |
1176 | for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) { | |
1177 | # order with bind | |
1178 | $chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY'; | |
1179 | ($chunk) = $self->_split_order_chunk($chunk); | |
1180 | ||
1181 | next if $in_sel_index->{$chunk}; | |
1182 | ||
1183 | $extra_order_sel->{$chunk} ||= $self->_quote ( | |
1184 | 'ORDER__BY__' . sprintf '%03d', scalar keys %{$extra_order_sel||{}} | |
1185 | ); | |
1186 | } | |
1187 | ||
1188 | return { | |
1189 | query_leftover => $proto_sql, | |
1190 | (map {( "selection_$_" => join (', ', @{$sel->{$_}} ) )} keys %$sel ), | |
1191 | outer_renames => $renamed, | |
1192 | order_supplement => $extra_order_sel, | |
1193 | }; | |
1194 | } | |
1195 | ||
1196 | sub _unqualify_colname { | |
1197 | my ($self, $fqcn) = @_; | |
1198 | $fqcn =~ s/ \. /__/xg; | |
1199 | return $fqcn; | |
1200 | } | |
1201 | ||
1202 | =head1 FURTHER QUESTIONS? | |
1203 | ||
1204 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. | |
1205 | ||
1206 | =head1 COPYRIGHT AND LICENSE | |
1207 | ||
1208 | This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> | |
1209 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can | |
1210 | redistribute it and/or modify it under the same terms as the | |
1211 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. | |
1212 | ||
1213 | =cut | |
1214 | ||
1215 | 1; |
0 | # because of how loose dep specs are, we need to keep squatting | |
1 | # on the CPAN face - FOREVER. | |
0 | 2 | package DBIx::Class::SQLMaker::LimitDialects; |
1 | 3 | |
2 | 4 | use warnings; |
3 | 5 | use strict; |
4 | 6 | |
5 | use List::Util 'first'; | |
6 | use namespace::clean; | |
7 | ## | |
8 | ## Compat in case someone is using these in the wild... | |
9 | ## | |
7 | 10 | |
8 | # constants are used not only here, but also in comparison tests | |
9 | sub __rows_bindtype () { | |
10 | +{ sqlt_datatype => 'integer' } | |
11 | } | |
12 | sub __offset_bindtype () { | |
13 | +{ sqlt_datatype => 'integer' } | |
14 | } | |
15 | sub __total_bindtype () { | |
16 | +{ sqlt_datatype => 'integer' } | |
17 | } | |
11 | my $sigh = sub { | |
12 | require DBIx::Class::_Util; | |
13 | require DBIx::Class::SQLMaker; | |
14 | ||
15 | my( $meth ) = (caller(1))[3] =~ /([^:]+)$/; | |
16 | ||
17 | DBIx::Class::_Util::emit_loud_diag( | |
18 | skip_frames => 1, | |
19 | msg => "The $meth() constant is now provided by DBIx::Class::SQLMaker::ClassicExtensions: please adjust your code" | |
20 | ); | |
21 | ||
22 | DBIx::Class::SQLMaker::ClassicExtensions->$meth; | |
23 | }; | |
24 | ||
25 | sub __rows_bindtype { $sigh->() } | |
26 | sub __offset_bindtype { $sigh->() } | |
27 | sub __total_bindtype { $sigh->() } | |
28 | ||
29 | 1; | |
30 | ||
31 | __END__ | |
18 | 32 | |
19 | 33 | =head1 NAME |
20 | 34 | |
21 | DBIx::Class::SQLMaker::LimitDialects - SQL::Abstract::Limit-like functionality for DBIx::Class::SQLMaker | |
35 | DBIx::Class::SQLMaker::LimitDialects - SQL::Abstract::Limit-like functionality in DBIx::Class::SQLMaker | |
22 | 36 | |
23 | 37 | =head1 DESCRIPTION |
24 | 38 | |
25 | This module replicates a lot of the functionality originally found in | |
26 | L<SQL::Abstract::Limit>. While simple limits would work as-is, the more | |
27 | complex dialects that require e.g. subqueries could not be reliably | |
28 | implemented without taking full advantage of the metadata locked within | |
29 | L<DBIx::Class::ResultSource> classes. After reimplementation of close to | |
30 | 80% of the L<SQL::Abstract::Limit> functionality it was deemed more | |
31 | practical to simply make an independent DBIx::Class-specific limit-dialect | |
32 | provider. | |
39 | DBIC's SQLMaker stack replicates and surpasses all of the functionality | |
40 | originally found in L<SQL::Abstract::Limit>. While simple limits would | |
41 | work as-is, the more complex dialects that require e.g. subqueries could | |
42 | not be reliably implemented without taking full advantage of the metadata | |
43 | locked within L<DBIx::Class::ResultSource> classes. After reimplementation | |
44 | of close to 80% of the L<SQL::Abstract::Limit> functionality it was deemed | |
45 | more practical to simply make an independent DBIx::Class-specific | |
46 | limit-dialect provider. | |
33 | 47 | |
34 | 48 | =head1 SQL LIMIT DIALECTS |
35 | 49 | |
46 | 60 | |
47 | 61 | Supported by B<PostgreSQL> and B<SQLite> |
48 | 62 | |
49 | =cut | |
50 | sub _LimitOffset { | |
51 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
52 | $sql .= $self->_parse_rs_attrs( $rs_attrs ) . " LIMIT ?"; | |
53 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
54 | if ($offset) { | |
55 | $sql .= " OFFSET ?"; | |
56 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset ]; | |
57 | } | |
58 | return $sql; | |
59 | } | |
60 | ||
61 | 63 | =head2 LimitXY |
62 | 64 | |
63 | 65 | SELECT ... LIMIT $offset, $limit |
64 | 66 | |
65 | 67 | Supported by B<MySQL> and any L<SQL::Statement> based DBD |
66 | ||
67 | =cut | |
68 | sub _LimitXY { | |
69 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
70 | $sql .= $self->_parse_rs_attrs( $rs_attrs ) . " LIMIT "; | |
71 | if ($offset) { | |
72 | $sql .= '?, '; | |
73 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset ]; | |
74 | } | |
75 | $sql .= '?'; | |
76 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
77 | ||
78 | return $sql; | |
79 | } | |
80 | 68 | |
81 | 69 | =head2 RowNumberOver |
82 | 70 | |
90 | 78 | ANSI standard Limit/Offset implementation. Supported by B<DB2> and |
91 | 79 | B<< MSSQL >= 2005 >>. |
92 | 80 | |
93 | =cut | |
94 | sub _RowNumberOver { | |
95 | my ($self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
96 | ||
97 | # get selectors, and scan the order_by (if any) | |
98 | my $sq_attrs = $self->_subqueried_limit_attrs ( $sql, $rs_attrs ); | |
99 | ||
100 | # make up an order if none exists | |
101 | my $requested_order = (delete $rs_attrs->{order_by}) || $self->_rno_default_order; | |
102 | ||
103 | # the order binds (if any) will need to go at the end of the entire inner select | |
104 | local $self->{order_bind}; | |
105 | my $rno_ord = $self->_order_by ($requested_order); | |
106 | push @{$self->{select_bind}}, @{$self->{order_bind}}; | |
107 | ||
108 | # this is the order supplement magic | |
109 | my $mid_sel = $sq_attrs->{selection_outer}; | |
110 | if (my $extra_order_sel = $sq_attrs->{order_supplement}) { | |
111 | for my $extra_col (sort | |
112 | { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} } | |
113 | keys %$extra_order_sel | |
114 | ) { | |
115 | $sq_attrs->{selection_inner} .= sprintf (', %s AS %s', | |
116 | $extra_col, | |
117 | $extra_order_sel->{$extra_col}, | |
118 | ); | |
119 | } | |
120 | } | |
121 | ||
122 | # and this is order re-alias magic | |
123 | for my $map ($sq_attrs->{order_supplement}, $sq_attrs->{outer_renames}) { | |
124 | for my $col (sort { (length $b) <=> (length $a) } keys %{$map||{}} ) { | |
125 | my $re_col = quotemeta ($col); | |
126 | $rno_ord =~ s/$re_col/$map->{$col}/; | |
127 | } | |
128 | } | |
129 | ||
130 | # whatever is left of the order_by (only where is processed at this point) | |
131 | my $group_having = $self->_parse_rs_attrs($rs_attrs); | |
132 | ||
133 | my $qalias = $self->_quote ($rs_attrs->{alias}); | |
134 | my $idx_name = $self->_quote ('rno__row__index'); | |
135 | ||
136 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset + 1], [ $self->__total_bindtype => $offset + $rows ]; | |
137 | ||
138 | return <<EOS; | |
139 | ||
140 | SELECT $sq_attrs->{selection_outer} FROM ( | |
141 | SELECT $mid_sel, ROW_NUMBER() OVER( $rno_ord ) AS $idx_name FROM ( | |
142 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${group_having} | |
143 | ) $qalias | |
144 | ) $qalias WHERE $idx_name >= ? AND $idx_name <= ? | |
145 | ||
146 | EOS | |
147 | ||
148 | } | |
149 | ||
150 | # some databases are happy with OVER (), some need OVER (ORDER BY (SELECT (1)) ) | |
151 | sub _rno_default_order { | |
152 | return undef; | |
153 | } | |
154 | ||
155 | 81 | =head2 SkipFirst |
156 | 82 | |
157 | 83 | SELECT SKIP $offset FIRST $limit * FROM ... |
159 | 85 | Supported by B<Informix>, almost like LimitOffset. According to |
160 | 86 | L<SQL::Abstract::Limit> C<... SKIP $offset LIMIT $limit ...> is also supported. |
161 | 87 | |
162 | =cut | |
163 | sub _SkipFirst { | |
164 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
165 | ||
166 | $sql =~ s/^ \s* SELECT \s+ //ix | |
167 | or $self->throw_exception("Unrecognizable SELECT: $sql"); | |
168 | ||
169 | return sprintf ('SELECT %s%s%s%s', | |
170 | $offset | |
171 | ? do { | |
172 | push @{$self->{pre_select_bind}}, [ $self->__offset_bindtype => $offset]; | |
173 | 'SKIP ? ' | |
174 | } | |
175 | : '' | |
176 | , | |
177 | do { | |
178 | push @{$self->{pre_select_bind}}, [ $self->__rows_bindtype => $rows ]; | |
179 | 'FIRST ? ' | |
180 | }, | |
181 | $sql, | |
182 | $self->_parse_rs_attrs ($rs_attrs), | |
183 | ); | |
184 | } | |
185 | ||
186 | 88 | =head2 FirstSkip |
187 | 89 | |
188 | 90 | SELECT FIRST $limit SKIP $offset * FROM ... |
189 | 91 | |
190 | 92 | Supported by B<Firebird/Interbase>, reverse of SkipFirst. According to |
191 | 93 | L<SQL::Abstract::Limit> C<... ROWS $limit TO $offset ...> is also supported. |
192 | ||
193 | =cut | |
194 | sub _FirstSkip { | |
195 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
196 | ||
197 | $sql =~ s/^ \s* SELECT \s+ //ix | |
198 | or $self->throw_exception("Unrecognizable SELECT: $sql"); | |
199 | ||
200 | return sprintf ('SELECT %s%s%s%s', | |
201 | do { | |
202 | push @{$self->{pre_select_bind}}, [ $self->__rows_bindtype => $rows ]; | |
203 | 'FIRST ? ' | |
204 | }, | |
205 | $offset | |
206 | ? do { | |
207 | push @{$self->{pre_select_bind}}, [ $self->__offset_bindtype => $offset]; | |
208 | 'SKIP ? ' | |
209 | } | |
210 | : '' | |
211 | , | |
212 | $sql, | |
213 | $self->_parse_rs_attrs ($rs_attrs), | |
214 | ); | |
215 | } | |
216 | ||
217 | 94 | |
218 | 95 | =head2 RowNum |
219 | 96 | |
241 | 118 | |
242 | 119 | Supported by B<Oracle>. |
243 | 120 | |
244 | =cut | |
245 | sub _RowNum { | |
246 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
247 | ||
248 | my $sq_attrs = $self->_subqueried_limit_attrs ($sql, $rs_attrs); | |
249 | ||
250 | my $qalias = $self->_quote ($rs_attrs->{alias}); | |
251 | my $idx_name = $self->_quote ('rownum__index'); | |
252 | my $order_group_having = $self->_parse_rs_attrs($rs_attrs); | |
253 | ||
254 | ||
255 | # if no offset (e.g. first page) - we can skip one of the subqueries | |
256 | if (! $offset) { | |
257 | push @{$self->{limit_bind}}, [ $self->__rows_bindtype => $rows ]; | |
258 | ||
259 | return <<EOS; | |
260 | SELECT $sq_attrs->{selection_outer} FROM ( | |
261 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
262 | ) $qalias WHERE ROWNUM <= ? | |
263 | EOS | |
264 | } | |
265 | ||
266 | # | |
267 | # There are two ways to limit in Oracle, one vastly faster than the other | |
268 | # on large resultsets: https://decipherinfosys.wordpress.com/2007/08/09/paging-and-countstopkey-optimization/ | |
269 | # However Oracle is retarded and does not preserve stable ROWNUM() values | |
270 | # when called twice in the same scope. Therefore unless the resultset is | |
271 | # ordered by a unique set of columns, it is not safe to use the faster | |
272 | # method, and the slower BETWEEN query is used instead | |
273 | # | |
274 | # FIXME - this is quite expensive, and does not perform caching of any sort | |
275 | # as soon as some of the DQ work becomes viable consider switching this | |
276 | # over | |
277 | if ( | |
278 | $rs_attrs->{order_by} | |
279 | and | |
280 | $rs_attrs->{result_source}->storage->_order_by_is_stable( | |
281 | @{$rs_attrs}{qw/from order_by where/} | |
282 | ) | |
283 | ) { | |
284 | push @{$self->{limit_bind}}, [ $self->__total_bindtype => $offset + $rows ], [ $self->__offset_bindtype => $offset + 1 ]; | |
285 | ||
286 | return <<EOS; | |
287 | SELECT $sq_attrs->{selection_outer} FROM ( | |
288 | SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( | |
289 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
290 | ) $qalias WHERE ROWNUM <= ? | |
291 | ) $qalias WHERE $idx_name >= ? | |
292 | EOS | |
293 | } | |
294 | else { | |
295 | push @{$self->{limit_bind}}, [ $self->__offset_bindtype => $offset + 1 ], [ $self->__total_bindtype => $offset + $rows ]; | |
296 | ||
297 | return <<EOS; | |
298 | SELECT $sq_attrs->{selection_outer} FROM ( | |
299 | SELECT $sq_attrs->{selection_outer}, ROWNUM AS $idx_name FROM ( | |
300 | SELECT $sq_attrs->{selection_inner} $sq_attrs->{query_leftover}${order_group_having} | |
301 | ) $qalias | |
302 | ) $qalias WHERE $idx_name BETWEEN ? AND ? | |
303 | EOS | |
304 | } | |
305 | } | |
306 | ||
307 | # used by _Top and _FetchFirst below | |
308 | sub _prep_for_skimming_limit { | |
309 | my ( $self, $sql, $rs_attrs ) = @_; | |
310 | ||
311 | # get selectors | |
312 | my $sq_attrs = $self->_subqueried_limit_attrs ($sql, $rs_attrs); | |
313 | ||
314 | my $requested_order = delete $rs_attrs->{order_by}; | |
315 | $sq_attrs->{order_by_requested} = $self->_order_by ($requested_order); | |
316 | $sq_attrs->{grpby_having} = $self->_parse_rs_attrs ($rs_attrs); | |
317 | ||
318 | # without an offset things are easy | |
319 | if (! $rs_attrs->{offset}) { | |
320 | $sq_attrs->{order_by_inner} = $sq_attrs->{order_by_requested}; | |
321 | } | |
322 | else { | |
323 | $sq_attrs->{quoted_rs_alias} = $self->_quote ($rs_attrs->{alias}); | |
324 | ||
325 | # localise as we already have all the bind values we need | |
326 | local $self->{order_bind}; | |
327 | ||
328 | # make up an order unless supplied or sanity check what we are given | |
329 | my $inner_order; | |
330 | if ($sq_attrs->{order_by_requested}) { | |
331 | $self->throw_exception ( | |
332 | 'Unable to safely perform "skimming type" limit with supplied unstable order criteria' | |
333 | ) unless ($rs_attrs->{result_source}->schema->storage->_order_by_is_stable( | |
334 | $rs_attrs->{from}, | |
335 | $requested_order, | |
336 | $rs_attrs->{where}, | |
337 | )); | |
338 | ||
339 | $inner_order = $requested_order; | |
340 | } | |
341 | else { | |
342 | $inner_order = [ map | |
343 | { "$rs_attrs->{alias}.$_" } | |
344 | ( @{ | |
345 | $rs_attrs->{result_source}->_identifying_column_set | |
346 | || | |
347 | $self->throw_exception(sprintf( | |
348 | 'Unable to auto-construct stable order criteria for "skimming type" limit ' | |
349 | . "dialect based on source '%s'", $rs_attrs->{result_source}->name) ); | |
350 | } ) | |
351 | ]; | |
352 | } | |
353 | ||
354 | $sq_attrs->{order_by_inner} = $self->_order_by ($inner_order); | |
355 | ||
356 | my @out_chunks; | |
357 | for my $ch ($self->_order_by_chunks ($inner_order)) { | |
358 | $ch = $ch->[0] if ref $ch eq 'ARRAY'; | |
359 | ||
360 | ($ch, my $is_desc) = $self->_split_order_chunk($ch); | |
361 | ||
362 | # !NOTE! outside chunks come in reverse order ( !$is_desc ) | |
363 | push @out_chunks, { ($is_desc ? '-asc' : '-desc') => \$ch }; | |
364 | } | |
365 | ||
366 | $sq_attrs->{order_by_middle} = $self->_order_by (\@out_chunks); | |
367 | ||
368 | # this is the order supplement magic | |
369 | $sq_attrs->{selection_middle} = $sq_attrs->{selection_outer}; | |
370 | if (my $extra_order_sel = $sq_attrs->{order_supplement}) { | |
371 | for my $extra_col (sort | |
372 | { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} } | |
373 | keys %$extra_order_sel | |
374 | ) { | |
375 | $sq_attrs->{selection_inner} .= sprintf (', %s AS %s', | |
376 | $extra_col, | |
377 | $extra_order_sel->{$extra_col}, | |
378 | ); | |
379 | ||
380 | $sq_attrs->{selection_middle} .= ', ' . $extra_order_sel->{$extra_col}; | |
381 | } | |
382 | ||
383 | # Whatever order bindvals there are, they will be realiased and | |
384 | # reselected, and need to show up at end of the initial inner select | |
385 | push @{$self->{select_bind}}, @{$self->{order_bind}}; | |
386 | } | |
387 | ||
388 | # and this is order re-alias magic | |
389 | for my $map ($sq_attrs->{order_supplement}, $sq_attrs->{outer_renames}) { | |
390 | for my $col (sort { (length $b) <=> (length $a) } keys %{$map||{}}) { | |
391 | my $re_col = quotemeta ($col); | |
392 | $_ =~ s/$re_col/$map->{$col}/ | |
393 | for ($sq_attrs->{order_by_middle}, $sq_attrs->{order_by_requested}); | |
394 | } | |
395 | } | |
396 | } | |
397 | ||
398 | $sq_attrs; | |
399 | } | |
400 | ||
401 | 121 | =head2 Top |
402 | 122 | |
403 | 123 | SELECT * FROM |
414 | 134 | |
415 | 135 | Due to its implementation, this limit dialect returns B<incorrect results> |
416 | 136 | when $limit+$offset > total amount of rows in the resultset. |
417 | ||
418 | =cut | |
419 | ||
420 | sub _Top { | |
421 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
422 | ||
423 | my $lim = $self->_prep_for_skimming_limit($sql, $rs_attrs); | |
424 | ||
425 | $sql = sprintf ('SELECT TOP %u %s %s %s %s', | |
426 | $rows + ($offset||0), | |
427 | $offset ? $lim->{selection_inner} : $lim->{selection_original}, | |
428 | $lim->{query_leftover}, | |
429 | $lim->{grpby_having}, | |
430 | $lim->{order_by_inner}, | |
431 | ); | |
432 | ||
433 | $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s', | |
434 | $rows, | |
435 | $lim->{selection_middle}, | |
436 | $sql, | |
437 | $lim->{quoted_rs_alias}, | |
438 | $lim->{order_by_middle}, | |
439 | ) if $offset; | |
440 | ||
441 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s', | |
442 | $lim->{selection_outer}, | |
443 | $sql, | |
444 | $lim->{quoted_rs_alias}, | |
445 | $lim->{order_by_requested}, | |
446 | ) if $offset and ( | |
447 | $lim->{order_by_requested} or $lim->{selection_middle} ne $lim->{selection_outer} | |
448 | ); | |
449 | ||
450 | return $sql; | |
451 | } | |
452 | 137 | |
453 | 138 | =head2 FetchFirst |
454 | 139 | |
469 | 154 | |
470 | 155 | Due to its implementation, this limit dialect returns B<incorrect results> |
471 | 156 | when $limit+$offset > total amount of rows in the resultset. |
472 | ||
473 | =cut | |
474 | ||
475 | sub _FetchFirst { | |
476 | my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_; | |
477 | ||
478 | my $lim = $self->_prep_for_skimming_limit($sql, $rs_attrs); | |
479 | ||
480 | $sql = sprintf ('SELECT %s %s %s %s FETCH FIRST %u ROWS ONLY', | |
481 | $offset ? $lim->{selection_inner} : $lim->{selection_original}, | |
482 | $lim->{query_leftover}, | |
483 | $lim->{grpby_having}, | |
484 | $lim->{order_by_inner}, | |
485 | $rows + ($offset||0), | |
486 | ); | |
487 | ||
488 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s FETCH FIRST %u ROWS ONLY', | |
489 | $lim->{selection_middle}, | |
490 | $sql, | |
491 | $lim->{quoted_rs_alias}, | |
492 | $lim->{order_by_middle}, | |
493 | $rows, | |
494 | ) if $offset; | |
495 | ||
496 | ||
497 | $sql = sprintf ('SELECT %s FROM ( %s ) %s %s', | |
498 | $lim->{selection_outer}, | |
499 | $sql, | |
500 | $lim->{quoted_rs_alias}, | |
501 | $lim->{order_by_requested}, | |
502 | ) if $offset and ( | |
503 | $lim->{order_by_requested} or $lim->{selection_middle} ne $lim->{selection_outer} | |
504 | ); | |
505 | ||
506 | return $sql; | |
507 | } | |
508 | 157 | |
509 | 158 | =head2 GenericSubQ |
510 | 159 | |
527 | 176 | |
528 | 177 | Currently used by B<Sybase ASE>, due to lack of any other option. |
529 | 178 | |
530 | =cut | |
531 | sub _GenericSubQ { | |
532 | my ($self, $sql, $rs_attrs, $rows, $offset) = @_; | |
533 | ||
534 | my $main_rsrc = $rs_attrs->{result_source}; | |
535 | ||
536 | # Explicitly require an order_by | |
537 | # GenSubQ is slow enough as it is, just emulating things | |
538 | # like in other cases is not wise - make the user work | |
539 | # to shoot their DBA in the foot | |
540 | $self->throw_exception ( | |
541 | 'Generic Subquery Limit does not work on resultsets without an order. Provide a stable, ' | |
542 | . 'main-table-based order criteria.' | |
543 | ) unless $rs_attrs->{order_by}; | |
544 | ||
545 | my $usable_order_colinfo = $main_rsrc->storage->_extract_colinfo_of_stable_main_source_order_by_portion( | |
546 | $rs_attrs | |
547 | ); | |
548 | ||
549 | $self->throw_exception( | |
550 | 'Generic Subquery Limit can not work with order criteria based on sources other than the main one' | |
551 | ) if ( | |
552 | ! keys %{$usable_order_colinfo||{}} | |
553 | or | |
554 | grep | |
555 | { $_->{-source_alias} ne $rs_attrs->{alias} } | |
556 | (values %$usable_order_colinfo) | |
557 | ); | |
558 | ||
559 | ### | |
560 | ### | |
561 | ### we need to know the directions after we figured out the above - reextract *again* | |
562 | ### this is eyebleed - trying to get it to work at first | |
563 | my $supplied_order = delete $rs_attrs->{order_by}; | |
564 | ||
565 | my @order_bits = do { | |
566 | local $self->{quote_char}; | |
567 | local $self->{order_bind}; | |
568 | map { ref $_ ? $_->[0] : $_ } $self->_order_by_chunks ($supplied_order) | |
569 | }; | |
570 | ||
571 | # truncate to what we'll use | |
572 | $#order_bits = ( (keys %$usable_order_colinfo) - 1 ); | |
573 | ||
574 | # @order_bits likely will come back quoted (due to how the prefetch | |
575 | # rewriter operates | |
576 | # Hence supplement the column_info lookup table with quoted versions | |
577 | if ($self->quote_char) { | |
578 | $usable_order_colinfo->{$self->_quote($_)} = $usable_order_colinfo->{$_} | |
579 | for keys %$usable_order_colinfo; | |
580 | } | |
581 | ||
582 | # calculate the condition | |
583 | my $count_tbl_alias = 'rownum__emulation'; | |
584 | my $main_alias = $rs_attrs->{alias}; | |
585 | my $main_tbl_name = $main_rsrc->name; | |
586 | ||
587 | my (@unqualified_names, @qualified_names, @is_desc, @new_order_by); | |
588 | ||
589 | for my $bit (@order_bits) { | |
590 | ||
591 | ($bit, my $is_desc) = $self->_split_order_chunk($bit); | |
592 | ||
593 | push @is_desc, $is_desc; | |
594 | push @unqualified_names, $usable_order_colinfo->{$bit}{-colname}; | |
595 | push @qualified_names, $usable_order_colinfo->{$bit}{-fq_colname}; | |
596 | ||
597 | push @new_order_by, { ($is_desc ? '-desc' : '-asc') => $usable_order_colinfo->{$bit}{-fq_colname} }; | |
598 | }; | |
599 | ||
600 | my (@where_cond, @skip_colpair_stack); | |
601 | for my $i (0 .. $#order_bits) { | |
602 | my $ci = $usable_order_colinfo->{$order_bits[$i]}; | |
603 | ||
604 | my ($subq_col, $main_col) = map { "$_.$ci->{-colname}" } ($count_tbl_alias, $main_alias); | |
605 | my $cur_cond = { $subq_col => { ($is_desc[$i] ? '>' : '<') => { -ident => $main_col } } }; | |
606 | ||
607 | push @skip_colpair_stack, [ | |
608 | { $main_col => { -ident => $subq_col } }, | |
609 | ]; | |
610 | ||
611 | # we can trust the nullability flag because | |
612 | # we already used it during _id_col_set resolution | |
613 | # | |
614 | if ($ci->{is_nullable}) { | |
615 | push @{$skip_colpair_stack[-1]}, { $main_col => undef, $subq_col=> undef }; | |
616 | ||
617 | $cur_cond = [ | |
618 | { | |
619 | ($is_desc[$i] ? $subq_col : $main_col) => { '!=', undef }, | |
620 | ($is_desc[$i] ? $main_col : $subq_col) => undef, | |
621 | }, | |
622 | { | |
623 | $subq_col => { '!=', undef }, | |
624 | $main_col => { '!=', undef }, | |
625 | -and => $cur_cond, | |
626 | }, | |
627 | ]; | |
628 | } | |
629 | ||
630 | push @where_cond, { '-and', => [ @skip_colpair_stack[0..$i-1], $cur_cond ] }; | |
631 | } | |
632 | ||
633 | # reuse the sqlmaker WHERE, this will not be returning binds | |
634 | my $counted_where = do { | |
635 | local $self->{where_bind}; | |
636 | $self->where(\@where_cond); | |
637 | }; | |
638 | ||
639 | # construct the rownum condition by hand | |
640 | my $rownum_cond; | |
641 | if ($offset) { | |
642 | $rownum_cond = 'BETWEEN ? AND ?'; | |
643 | push @{$self->{limit_bind}}, | |
644 | [ $self->__offset_bindtype => $offset ], | |
645 | [ $self->__total_bindtype => $offset + $rows - 1] | |
646 | ; | |
647 | } | |
648 | else { | |
649 | $rownum_cond = '< ?'; | |
650 | push @{$self->{limit_bind}}, | |
651 | [ $self->__rows_bindtype => $rows ] | |
652 | ; | |
653 | } | |
654 | ||
655 | # and what we will order by inside | |
656 | my $inner_order_sql = do { | |
657 | local $self->{order_bind}; | |
658 | ||
659 | my $s = $self->_order_by (\@new_order_by); | |
660 | ||
661 | $self->throw_exception('Inner gensubq order may not contain binds... something went wrong') | |
662 | if @{$self->{order_bind}}; | |
663 | ||
664 | $s; | |
665 | }; | |
666 | ||
667 | ### resume originally scheduled programming | |
668 | ### | |
669 | ### | |
670 | ||
671 | # we need to supply the order for the supplements to be properly calculated | |
672 | my $sq_attrs = $self->_subqueried_limit_attrs ( | |
673 | $sql, { %$rs_attrs, order_by => \@new_order_by } | |
674 | ); | |
675 | ||
676 | my $in_sel = $sq_attrs->{selection_inner}; | |
677 | ||
678 | # add the order supplement (if any) as this is what will be used for the outer WHERE | |
679 | $in_sel .= ", $_" for sort keys %{$sq_attrs->{order_supplement}}; | |
680 | ||
681 | my $group_having_sql = $self->_parse_rs_attrs($rs_attrs); | |
682 | ||
683 | ||
684 | return sprintf (" | |
685 | SELECT $sq_attrs->{selection_outer} | |
686 | FROM ( | |
687 | SELECT $in_sel $sq_attrs->{query_leftover}${group_having_sql} | |
688 | ) %s | |
689 | WHERE ( SELECT COUNT(*) FROM %s %s $counted_where ) $rownum_cond | |
690 | $inner_order_sql | |
691 | ", map { $self->_quote ($_) } ( | |
692 | $rs_attrs->{alias}, | |
693 | $main_tbl_name, | |
694 | $count_tbl_alias, | |
695 | )); | |
696 | } | |
697 | ||
698 | ||
699 | # !!! THIS IS ALSO HORRIFIC !!! /me ashamed | |
700 | # | |
701 | # Generates inner/outer select lists for various limit dialects | |
702 | # which result in one or more subqueries (e.g. RNO, Top, RowNum) | |
703 | # Any non-main-table columns need to have their table qualifier | |
704 | # turned into a column alias (otherwise names in subqueries clash | |
705 | # and/or lose their source table) | |
706 | # | |
707 | # Returns mangled proto-sql, inner/outer strings of SQL QUOTED selectors | |
708 | # with aliases (to be used in whatever select statement), and an alias | |
709 | # index hashref of QUOTED SEL => QUOTED ALIAS pairs (to maybe be used | |
710 | # for string-subst higher up). | |
711 | # If an order_by is supplied, the inner select needs to bring out columns | |
712 | # used in implicit (non-selected) orders, and the order condition itself | |
713 | # needs to be realiased to the proper names in the outer query. Thus we | |
714 | # also return a hashref (order doesn't matter) of QUOTED EXTRA-SEL => | |
715 | # QUOTED ALIAS pairs, which is a list of extra selectors that do *not* | |
716 | # exist in the original select list | |
717 | sub _subqueried_limit_attrs { | |
718 | my ($self, $proto_sql, $rs_attrs) = @_; | |
719 | ||
720 | $self->throw_exception( | |
721 | 'Limit dialect implementation usable only in the context of DBIC (missing $rs_attrs)' | |
722 | ) unless ref ($rs_attrs) eq 'HASH'; | |
723 | ||
724 | # mangle the input sql as we will be replacing the selector entirely | |
725 | unless ( | |
726 | $rs_attrs->{_selector_sql} | |
727 | and | |
728 | $proto_sql =~ s/^ \s* SELECT \s* \Q$rs_attrs->{_selector_sql}//ix | |
729 | ) { | |
730 | $self->throw_exception("Unrecognizable SELECT: $proto_sql"); | |
731 | } | |
732 | ||
733 | my ($re_sep, $re_alias) = map { quotemeta $_ } ( $self->{name_sep}, $rs_attrs->{alias} ); | |
734 | ||
735 | # correlate select and as, build selection index | |
736 | my (@sel, $in_sel_index); | |
737 | for my $i (0 .. $#{$rs_attrs->{select}}) { | |
738 | ||
739 | my $s = $rs_attrs->{select}[$i]; | |
740 | my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef; | |
741 | ||
742 | # we throw away the @bind here deliberately | |
743 | my ($sql_sel) = $self->_recurse_fields ($s); | |
744 | ||
745 | push @sel, { | |
746 | arg => $s, | |
747 | sql => $sql_sel, | |
748 | unquoted_sql => do { | |
749 | local $self->{quote_char}; | |
750 | ($self->_recurse_fields ($s))[0]; # ignore binds again | |
751 | }, | |
752 | as => | |
753 | $sql_alias | |
754 | || | |
755 | $rs_attrs->{as}[$i] | |
756 | || | |
757 | $self->throw_exception("Select argument $i ($s) without corresponding 'as'") | |
758 | , | |
759 | }; | |
760 | ||
761 | # anything with a placeholder in it needs re-selection | |
762 | $in_sel_index->{$sql_sel}++ unless $sql_sel =~ / (?: ^ | \W ) \? (?: \W | $ ) /x; | |
763 | ||
764 | $in_sel_index->{$self->_quote ($sql_alias)}++ if $sql_alias; | |
765 | ||
766 | # record unqualified versions too, so we do not have | |
767 | # to reselect the same column twice (in qualified and | |
768 | # unqualified form) | |
769 | if (! ref $s && $sql_sel =~ / $re_sep (.+) $/x) { | |
770 | $in_sel_index->{$1}++; | |
771 | } | |
772 | } | |
773 | ||
774 | ||
775 | # re-alias and remove any name separators from aliases, | |
776 | # unless we are dealing with the current source alias | |
777 | # (which will transcend the subqueries as it is necessary | |
778 | # for possible further chaining) | |
779 | # same for anything we do not recognize | |
780 | my ($sel, $renamed); | |
781 | for my $node (@sel) { | |
782 | push @{$sel->{original}}, $node->{sql}; | |
783 | ||
784 | if ( | |
785 | ! $in_sel_index->{$node->{sql}} | |
786 | or | |
787 | $node->{as} =~ / (?<! ^ $re_alias ) \. /x | |
788 | or | |
789 | $node->{unquoted_sql} =~ / (?<! ^ $re_alias ) $re_sep /x | |
790 | ) { | |
791 | $node->{as} = $self->_unqualify_colname($node->{as}); | |
792 | my $quoted_as = $self->_quote($node->{as}); | |
793 | push @{$sel->{inner}}, sprintf '%s AS %s', $node->{sql}, $quoted_as; | |
794 | push @{$sel->{outer}}, $quoted_as; | |
795 | $renamed->{$node->{sql}} = $quoted_as; | |
796 | } | |
797 | else { | |
798 | push @{$sel->{inner}}, $node->{sql}; | |
799 | push @{$sel->{outer}}, $self->_quote (ref $node->{arg} ? $node->{as} : $node->{arg}); | |
800 | } | |
801 | } | |
802 | ||
803 | # see if the order gives us anything | |
804 | my $extra_order_sel; | |
805 | for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) { | |
806 | # order with bind | |
807 | $chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY'; | |
808 | ($chunk) = $self->_split_order_chunk($chunk); | |
809 | ||
810 | next if $in_sel_index->{$chunk}; | |
811 | ||
812 | $extra_order_sel->{$chunk} ||= $self->_quote ( | |
813 | 'ORDER__BY__' . sprintf '%03d', scalar keys %{$extra_order_sel||{}} | |
814 | ); | |
815 | } | |
816 | ||
817 | return { | |
818 | query_leftover => $proto_sql, | |
819 | (map {( "selection_$_" => join (', ', @{$sel->{$_}} ) )} keys %$sel ), | |
820 | outer_renames => $renamed, | |
821 | order_supplement => $extra_order_sel, | |
822 | }; | |
823 | } | |
824 | ||
825 | sub _unqualify_colname { | |
826 | my ($self, $fqcn) = @_; | |
827 | $fqcn =~ s/ \. /__/xg; | |
828 | return $fqcn; | |
829 | } | |
830 | ||
831 | 179 | =head1 FURTHER QUESTIONS? |
832 | 180 | |
833 | 181 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. |
838 | 186 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can |
839 | 187 | redistribute it and/or modify it under the same terms as the |
840 | 188 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. |
841 | ||
842 | =cut | |
843 | ||
844 | 1; |
0 | =for comment POD_DERIVED_INDEX_GENERATED | |
1 | The following documentation is automatically generated. Please do not edit | |
2 | this file, but rather the original, inline with DBIx::Class::SQLMaker::OracleJoins | |
3 | at lib/DBIx/Class/SQLMaker/OracleJoins.pm | |
4 | (on the system that originally ran this). | |
5 | If you do edit this file, and don't want your changes to be removed, make | |
6 | sure you change the first line. | |
7 | ||
8 | =cut | |
9 | ||
10 | =head1 NAME | |
11 | ||
12 | DBIx::Class::SQLMaker::OracleJoins - Pre-ANSI Joins-via-Where-Clause Syntax | |
13 | ||
14 | =head1 PURPOSE | |
15 | ||
16 | This module is used with Oracle < 9.0 due to lack of support for standard | |
17 | ANSI join syntax. | |
18 | ||
19 | =head1 SYNOPSIS | |
20 | ||
21 | Not intended for use directly; used as the sql_maker_class for schemas and components. | |
22 | ||
23 | =head1 DESCRIPTION | |
24 | ||
25 | Implements pre-ANSI joins specified in the where clause. Instead of: | |
26 | ||
27 | SELECT x FROM y JOIN z ON y.id = z.id | |
28 | ||
29 | It will write: | |
30 | ||
31 | SELECT x FROM y, z WHERE y.id = z.id | |
32 | ||
33 | It should properly support left joins, and right joins. Full outer joins are | |
34 | not possible due to the fact that Oracle requires the entire query be written | |
35 | to union the results of a left and right join, and by the time this module is | |
36 | called to create the where query and table definition part of the sql query, | |
37 | it's already too late. | |
38 | ||
39 | =head1 METHODS | |
40 | ||
41 | =over 4 | |
42 | ||
43 | =item select | |
44 | ||
45 | Overrides DBIx::Class::SQLMaker's select() method, which calls _oracle_joins() | |
46 | to modify the column and table list before calling next::method(). | |
47 | ||
48 | =back | |
49 | ||
50 | =head1 BUGS | |
51 | ||
52 | Does not support full outer joins (however neither really does DBIC itself) | |
53 | ||
54 | =head1 SEE ALSO | |
55 | ||
56 | =over 4 | |
57 | ||
58 | =item L<DBIx::Class::Storage::DBI::Oracle::WhereJoins> - Storage class using this | |
59 | ||
60 | =item L<DBIx::Class::SQLMaker> - Parent module | |
61 | ||
62 | =item L<DBIx::Class> - Duh | |
63 | ||
64 | =back | |
65 | ||
66 | =head1 INHERITED METHODS | |
67 | ||
68 | =over 4 | |
69 | ||
70 | =item L<SQL::Abstract> | |
71 | ||
72 | L<is_literal_value|SQL::Abstract/is_literal_value>, L<is_plain_value|SQL::Abstract/is_plain_value> | |
73 | ||
74 | =back | |
75 | ||
76 | =head1 FURTHER QUESTIONS? | |
77 | ||
78 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. | |
79 | ||
80 | =head1 COPYRIGHT AND LICENSE | |
81 | ||
82 | This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> | |
83 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can | |
84 | redistribute it and/or modify it under the same terms as the | |
85 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. | |
86 |
2 | 2 | use strict; |
3 | 3 | use warnings; |
4 | 4 | |
5 | use base qw( | |
6 | DBIx::Class::SQLMaker::ClassicExtensions | |
7 | SQL::Abstract::Classic | |
8 | ); | |
9 | ||
10 | # NOTE THE LACK OF mro SPECIFICATION | |
11 | # This is deliberate to ensure things will continue to work | |
12 | # with ( usually ) untagged custom darkpan subclasses | |
13 | ||
14 | 1; | |
15 | ||
16 | __END__ | |
17 | ||
5 | 18 | =head1 NAME |
6 | 19 | |
7 | DBIx::Class::SQLMaker - An SQL::Abstract-based SQL maker class | |
20 | DBIx::Class::SQLMaker - An SQL::Abstract::Classic-like SQL maker class | |
8 | 21 | |
9 | 22 | =head1 DESCRIPTION |
10 | 23 | |
11 | This module is a subclass of L<SQL::Abstract> and includes a number of | |
12 | DBIC-specific workarounds, not yet suitable for inclusion into the | |
13 | L<SQL::Abstract> core. It also provides all (and more than) the functionality | |
14 | of L<SQL::Abstract::Limit>, see L<DBIx::Class::SQLMaker::LimitDialects> for | |
15 | more info. | |
24 | This module serves as a mere "nexus class" providing | |
25 | L<SQL::Abstract::Classic>-like functionality to L<DBIx::Class> itself, and | |
26 | to a number of database-engine-specific subclasses. This indirection is | |
27 | explicitly maintained in order to allow swapping out the core of SQL | |
28 | generation within DBIC on per-C<$schema> basis without major architectural | |
29 | changes. It is guaranteed by design and tests that this fast-switching | |
30 | will continue being maintained indefinitely. | |
16 | 31 | |
17 | Currently the enhancements to L<SQL::Abstract> are: | |
32 | =head2 Implementation switching | |
33 | ||
34 | See L<DBIx::Class::Storage::DBI/connect_call_rebase_sqlmaker> | |
35 | ||
36 | =head1 ROADMAP | |
37 | ||
38 | Some maintainer musings on the current state of SQL generation within DBIC as | |
39 | of October 2019 | |
40 | ||
41 | =head2 Folding of most (or all) of L<SQL::Abstract::Classic (SQLAC) | |
42 | |SQL::Abstract::Classic> into DBIC. | |
43 | ||
44 | The rise of complex prefetch use, and the general streamlining of result | |
45 | parsing within DBIC ended up pushing the actual SQL generation to the forefront | |
46 | of many casual performance profiles. While the idea behind the SQLAC-like API | |
47 | is sound, the actual implementation is terribly inefficient (once again bumping | |
48 | into the ridiculously high overhead of perl function calls). | |
49 | ||
50 | Given that SQLAC has a B<very> distinct life on its own, and will hopefully | |
51 | continue to be used within an order of magnitude more projects compared to | |
52 | DBIC, it is prudent to B<not> disturb the current call chains within SQLAC | |
53 | itself. Instead in the future an effort will be undertaken to seek a more | |
54 | thorough decoupling of DBIC SQL generation from reliance on SQLAC, possibly | |
55 | to a point where B<< in the future DBIC may no longer depend on | |
56 | L<SQL::Abstract::Classic> >> at all. | |
57 | ||
58 | B<The L<SQL::Abstract::Classic> library itself will continue being maintained> | |
59 | although it is not likely to gain many extra features, notably it will B<NOT> | |
60 | add further dialect support, at least not within the preexisting | |
61 | C<SQL::Abstract::Classic> namespace. | |
62 | ||
63 | Such streamlining work (if undertaken) will take into consideration the | |
64 | following constraints: | |
18 | 65 | |
19 | 66 | =over |
20 | 67 | |
21 | =item * Support for C<JOIN> statements (via extended C<table/from> support) | |
68 | =item Main API compatibility | |
22 | 69 | |
23 | =item * Support of functions in C<SELECT> lists | |
70 | The object returned by C<< $schema->storage->sqlmaker >> needs to be able to | |
71 | satisfy most of the basic tests found in the current-at-the-time SQLAC dist. | |
72 | While things like L<case|SQL::Abstract::Classic/case> or | |
73 | L<logic|SQL::Abstract::Classic/logic> or even worse | |
74 | L<convert|SQL::Abstract::Classic/convert> will definitely remain | |
75 | unsupported, the rest of the tests should pass (within reason). | |
24 | 76 | |
25 | =item * C<GROUP BY>/C<HAVING> support (via extensions to the order_by parameter) | |
77 | =item Ability to replace SQL::Abstract::Classic with a derivative module | |
26 | 78 | |
27 | =item * Support of C<...FOR UPDATE> type of select statement modifiers | |
79 | During the initial work on L<Data::Query>, which later was slated to occupy | |
80 | the preexisting namespace of L<SQL::Abstract>, the test suite of DBIC turned | |
81 | out to be an invaluable asset to iron out hard-to-reason-about corner cases. | |
82 | In addition the test suite is much more vast and intricate than the tests of | |
83 | SQLAC itself. This state of affairs is way too valuable to sacrifice in order | |
84 | to gain faster SQL generation. Thus the | |
85 | L<SQLMaker rebase|DBIx::Class::Storage::DBI/connect_call_rebase_sqlmaker> | |
86 | functionality introduced in DBIC v0.082850 along with extra CI configurations | |
87 | will continue to ensure that DBIC can be used with an off-the-CPAN SQLAC and | |
88 | derivatives, and that it continues to flawlessly run its entire test suite. | |
89 | While this will undoubtedly complicate the future implementation of a better | |
90 | performing SQL generator, it will preserve both the usability of the test suite | |
91 | for external projects and will keep L<SQL::Abstract::Classic> from regressions | |
92 | in the future. | |
28 | 93 | |
29 | 94 | =back |
30 | 95 | |
31 | =cut | |
96 | Aside from these constraints it is becoming more and more practical to simply | |
97 | stop using SQLAC in day-to-day production deployments of DBIC. The flexibility | |
98 | of the internals is simply not worth the performance cost. | |
32 | 99 | |
33 | use base qw/ | |
34 | DBIx::Class::SQLMaker::LimitDialects | |
35 | SQL::Abstract | |
36 | DBIx::Class | |
37 | /; | |
38 | use mro 'c3'; | |
100 | =head2 Relationship to L<SQL::Abstract> and what formerly was known as L<Data::Query (DQ)|Data::Query> | |
39 | 101 | |
40 | use Sub::Name 'subname'; | |
41 | use DBIx::Class::Carp; | |
42 | use namespace::clean; | |
102 | When initial work on DQ was taking place, the tools in L<::Storage::DBIHacks | |
103 | |https://github.com/Perl5/DBIx-Class/blob/master/lib/DBIx/Class/Storage/DBIHacks.pm> | |
104 | were only beginning to take shape, and it wasn't clear how important they will | |
105 | become further down the road. In fact the I<regexing all over the place> was | |
106 | considered an ugly stop-gap, and even a couple of highly entertaining talks | |
107 | were given to that effect. As the use-cases of DBIC were progressing, and | |
108 | evidence for the importance of supporting arbitrary SQL was mounting, it became | |
109 | clearer that DBIC itself would not really benefit in any significant way from | |
110 | tigher integration with DQ, but on the contrary is likely to lose L<crucial | |
111 | functionality|https://github.com/Perl5/DBIx-Class/blob/7ef1a09ec4/lib/DBIx/Class/Storage/DBIHacks.pm#L373-L396> | |
112 | while the corners of the brand new DQ/SQLA codebase are sanded off. | |
43 | 113 | |
44 | __PACKAGE__->mk_group_accessors (simple => qw/quote_char name_sep limit_dialect/); | |
114 | The current stance on DBIC/SQLA integration is that it would mainly benefit | |
115 | SQLA by having access to the very extensive "early adopter" test suite, in the | |
116 | same manner as early DBIC benefitted tremendously from usurping the Class::DBI | |
117 | test suite. As far as the DBIC user-base - there are no immediate large-scale | |
118 | upsides to deep SQLA integration, neither in terms of API nor in performance. | |
119 | As such it is unlikely that DBIC will switch back to using L<SQL::Abstract> in | |
120 | its core any time soon, if ever. | |
45 | 121 | |
46 | sub _quoting_enabled { | |
47 | ( defined $_[0]->{quote_char} and length $_[0]->{quote_char} ) ? 1 : 0 | |
48 | } | |
49 | ||
50 | # for when I need a normalized l/r pair | |
51 | sub _quote_chars { | |
52 | ||
53 | # in case we are called in the old !!$sm->_quote_chars fashion | |
54 | return () if !wantarray and ( ! defined $_[0]->{quote_char} or ! length $_[0]->{quote_char} ); | |
55 | ||
56 | map | |
57 | { defined $_ ? $_ : '' } | |
58 | ( ref $_[0]->{quote_char} ? (@{$_[0]->{quote_char}}) : ( ($_[0]->{quote_char}) x 2 ) ) | |
59 | ; | |
60 | } | |
61 | ||
62 | # FIXME when we bring in the storage weaklink, check its schema | |
63 | # weaklink and channel through $schema->throw_exception | |
64 | sub throw_exception { DBIx::Class::Exception->throw($_[1]) } | |
65 | ||
66 | BEGIN { | |
67 | # reinstall the belch()/puke() functions of SQL::Abstract with custom versions | |
68 | # that use DBIx::Class::Carp/DBIx::Class::Exception instead of plain Carp | |
69 | no warnings qw/redefine/; | |
70 | ||
71 | *SQL::Abstract::belch = subname 'SQL::Abstract::belch' => sub (@) { | |
72 | my($func) = (caller(1))[3]; | |
73 | carp "[$func] Warning: ", @_; | |
74 | }; | |
75 | ||
76 | *SQL::Abstract::puke = subname 'SQL::Abstract::puke' => sub (@) { | |
77 | my($func) = (caller(1))[3]; | |
78 | __PACKAGE__->throw_exception("[$func] Fatal: " . join ('', @_)); | |
79 | }; | |
80 | } | |
81 | ||
82 | # the "oh noes offset/top without limit" constant | |
83 | # limited to 31 bits for sanity (and consistency, | |
84 | # since it may be handed to the like of sprintf %u) | |
85 | # | |
86 | # Also *some* builds of SQLite fail the test | |
87 | # some_column BETWEEN ? AND ?: 1, 4294967295 | |
88 | # with the proper integer bind attrs | |
89 | # | |
90 | # Implemented as a method, since ::Storage::DBI also | |
91 | # refers to it (i.e. for the case of software_limit or | |
92 | # as the value to abuse with MSSQL ordered subqueries) | |
93 | sub __max_int () { 0x7FFFFFFF }; | |
94 | ||
95 | # we ne longer need to check this - DBIC has ways of dealing with it | |
96 | # specifically ::Storage::DBI::_resolve_bindattrs() | |
97 | sub _assert_bindval_matches_bindtype () { 1 }; | |
98 | ||
99 | # poor man's de-qualifier | |
100 | sub _quote { | |
101 | $_[0]->next::method( ( $_[0]{_dequalify_idents} and ! ref $_[1] ) | |
102 | ? $_[1] =~ / ([^\.]+) $ /x | |
103 | : $_[1] | |
104 | ); | |
105 | } | |
106 | ||
107 | sub _where_op_NEST { | |
108 | carp_unique ("-nest in search conditions is deprecated, you most probably wanted:\n" | |
109 | .q|{..., -and => [ \%cond0, \@cond1, \'cond2', \[ 'cond3', [ col => bind ] ], etc. ], ... }| | |
110 | ); | |
111 | ||
112 | shift->next::method(@_); | |
113 | } | |
114 | ||
115 | # Handle limit-dialect selection | |
116 | sub select { | |
117 | my ($self, $table, $fields, $where, $rs_attrs, $limit, $offset) = @_; | |
118 | ||
119 | ||
120 | ($fields, @{$self->{select_bind}}) = $self->_recurse_fields($fields); | |
121 | ||
122 | if (defined $offset) { | |
123 | $self->throw_exception('A supplied offset must be a non-negative integer') | |
124 | if ( $offset =~ /\D/ or $offset < 0 ); | |
125 | } | |
126 | $offset ||= 0; | |
127 | ||
128 | if (defined $limit) { | |
129 | $self->throw_exception('A supplied limit must be a positive integer') | |
130 | if ( $limit =~ /\D/ or $limit <= 0 ); | |
131 | } | |
132 | elsif ($offset) { | |
133 | $limit = $self->__max_int; | |
134 | } | |
135 | ||
136 | ||
137 | my ($sql, @bind); | |
138 | if ($limit) { | |
139 | # this is legacy code-flow from SQLA::Limit, it is not set in stone | |
140 | ||
141 | ($sql, @bind) = $self->next::method ($table, $fields, $where); | |
142 | ||
143 | my $limiter; | |
144 | ||
145 | if( $limiter = $self->can ('emulate_limit') ) { | |
146 | carp_unique( | |
147 | 'Support for the legacy emulate_limit() mechanism inherited from ' | |
148 | . 'SQL::Abstract::Limit has been deprecated, and will be removed when ' | |
149 | . 'DBIC transitions to Data::Query. If your code uses this type of ' | |
150 | . 'limit specification please file an RT and provide the source of ' | |
151 | . 'your emulate_limit() implementation, so an acceptable upgrade-path ' | |
152 | . 'can be devised' | |
153 | ); | |
154 | } | |
155 | else { | |
156 | my $dialect = $self->limit_dialect | |
157 | or $self->throw_exception( "Unable to generate SQL-limit - no limit dialect specified on $self" ); | |
158 | ||
159 | $limiter = $self->can ("_$dialect") | |
160 | or $self->throw_exception(__PACKAGE__ . " does not implement the requested dialect '$dialect'"); | |
161 | } | |
162 | ||
163 | $sql = $self->$limiter ( | |
164 | $sql, | |
165 | { %{$rs_attrs||{}}, _selector_sql => $fields }, | |
166 | $limit, | |
167 | $offset | |
168 | ); | |
169 | } | |
170 | else { | |
171 | ($sql, @bind) = $self->next::method ($table, $fields, $where, $rs_attrs); | |
172 | } | |
173 | ||
174 | push @{$self->{where_bind}}, @bind; | |
175 | ||
176 | # this *must* be called, otherwise extra binds will remain in the sql-maker | |
177 | my @all_bind = $self->_assemble_binds; | |
178 | ||
179 | $sql .= $self->_lock_select ($rs_attrs->{for}) | |
180 | if $rs_attrs->{for}; | |
181 | ||
182 | return wantarray ? ($sql, @all_bind) : $sql; | |
183 | } | |
184 | ||
185 | sub _assemble_binds { | |
186 | my $self = shift; | |
187 | return map { @{ (delete $self->{"${_}_bind"}) || [] } } (qw/pre_select select from where group having order limit/); | |
188 | } | |
189 | ||
190 | my $for_syntax = { | |
191 | update => 'FOR UPDATE', | |
192 | shared => 'FOR SHARE', | |
193 | }; | |
194 | sub _lock_select { | |
195 | my ($self, $type) = @_; | |
196 | ||
197 | my $sql; | |
198 | if (ref($type) eq 'SCALAR') { | |
199 | $sql = "FOR $$type"; | |
200 | } | |
201 | else { | |
202 | $sql = $for_syntax->{$type} || $self->throw_exception( "Unknown SELECT .. FOR type '$type' requested" ); | |
203 | } | |
204 | ||
205 | return " $sql"; | |
206 | } | |
207 | ||
208 | # Handle default inserts | |
209 | sub insert { | |
210 | # optimized due to hotttnesss | |
211 | # my ($self, $table, $data, $options) = @_; | |
212 | ||
213 | # SQLA will emit INSERT INTO $table ( ) VALUES ( ) | |
214 | # which is sadly understood only by MySQL. Change default behavior here, | |
215 | # until SQLA2 comes with proper dialect support | |
216 | if (! $_[2] or (ref $_[2] eq 'HASH' and !keys %{$_[2]} ) ) { | |
217 | my @bind; | |
218 | my $sql = sprintf( | |
219 | 'INSERT INTO %s DEFAULT VALUES', $_[0]->_quote($_[1]) | |
220 | ); | |
221 | ||
222 | if ( ($_[3]||{})->{returning} ) { | |
223 | my $s; | |
224 | ($s, @bind) = $_[0]->_insert_returning ($_[3]); | |
225 | $sql .= $s; | |
226 | } | |
227 | ||
228 | return ($sql, @bind); | |
229 | } | |
230 | ||
231 | next::method(@_); | |
232 | } | |
233 | ||
234 | sub _recurse_fields { | |
235 | my ($self, $fields) = @_; | |
236 | my $ref = ref $fields; | |
237 | return $self->_quote($fields) unless $ref; | |
238 | return $$fields if $ref eq 'SCALAR'; | |
239 | ||
240 | if ($ref eq 'ARRAY') { | |
241 | my (@select, @bind); | |
242 | for my $field (@$fields) { | |
243 | my ($select, @new_bind) = $self->_recurse_fields($field); | |
244 | push @select, $select; | |
245 | push @bind, @new_bind; | |
246 | } | |
247 | return (join(', ', @select), @bind); | |
248 | } | |
249 | elsif ($ref eq 'HASH') { | |
250 | my %hash = %$fields; # shallow copy | |
251 | ||
252 | my $as = delete $hash{-as}; # if supplied | |
253 | ||
254 | my ($func, $rhs, @toomany) = %hash; | |
255 | ||
256 | # there should be only one pair | |
257 | if (@toomany) { | |
258 | $self->throw_exception( "Malformed select argument - too many keys in hash: " . join (',', keys %$fields ) ); | |
259 | } | |
260 | ||
261 | if (lc ($func) eq 'distinct' && ref $rhs eq 'ARRAY' && @$rhs > 1) { | |
262 | $self->throw_exception ( | |
263 | 'The select => { distinct => ... } syntax is not supported for multiple columns.' | |
264 | .' Instead please use { group_by => [ qw/' . (join ' ', @$rhs) . '/ ] }' | |
265 | .' or { select => [ qw/' . (join ' ', @$rhs) . '/ ], distinct => 1 }' | |
266 | ); | |
267 | } | |
268 | ||
269 | my ($rhs_sql, @rhs_bind) = $self->_recurse_fields($rhs); | |
270 | my $select = sprintf ('%s( %s )%s', | |
271 | $self->_sqlcase($func), | |
272 | $rhs_sql, | |
273 | $as | |
274 | ? sprintf (' %s %s', $self->_sqlcase('as'), $self->_quote ($as) ) | |
275 | : '' | |
276 | ); | |
277 | ||
278 | return ($select, @rhs_bind); | |
279 | } | |
280 | elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) { | |
281 | return @{$$fields}; | |
282 | } | |
283 | else { | |
284 | $self->throw_exception( $ref . qq{ unexpected in _recurse_fields()} ); | |
285 | } | |
286 | } | |
287 | ||
288 | ||
289 | # this used to be a part of _order_by but is broken out for clarity. | |
290 | # What we have been doing forever is hijacking the $order arg of | |
291 | # SQLA::select to pass in arbitrary pieces of data (first the group_by, | |
292 | # then pretty much the entire resultset attr-hash, as more and more | |
293 | # things in the SQLA space need to have more info about the $rs they | |
294 | # create SQL for. The alternative would be to keep expanding the | |
295 | # signature of _select with more and more positional parameters, which | |
296 | # is just gross. All hail SQLA2! | |
297 | sub _parse_rs_attrs { | |
298 | my ($self, $arg) = @_; | |
299 | ||
300 | my $sql = ''; | |
301 | ||
302 | if ($arg->{group_by}) { | |
303 | if ( my ($group_sql, @group_bind) = $self->_recurse_fields($arg->{group_by}) ) { | |
304 | $sql .= $self->_sqlcase(' group by ') . $group_sql; | |
305 | push @{$self->{group_bind}}, @group_bind; | |
306 | } | |
307 | } | |
308 | ||
309 | if (defined $arg->{having}) { | |
310 | my ($frag, @bind) = $self->_recurse_where($arg->{having}); | |
311 | push(@{$self->{having_bind}}, @bind); | |
312 | $sql .= $self->_sqlcase(' having ') . $frag; | |
313 | } | |
314 | ||
315 | if (defined $arg->{order_by}) { | |
316 | $sql .= $self->_order_by ($arg->{order_by}); | |
317 | } | |
318 | ||
319 | return $sql; | |
320 | } | |
321 | ||
322 | sub _order_by { | |
323 | my ($self, $arg) = @_; | |
324 | ||
325 | # check that we are not called in legacy mode (order_by as 4th argument) | |
326 | if (ref $arg eq 'HASH' and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) { | |
327 | return $self->_parse_rs_attrs ($arg); | |
328 | } | |
329 | else { | |
330 | my ($sql, @bind) = $self->next::method($arg); | |
331 | push @{$self->{order_bind}}, @bind; | |
332 | return $sql; | |
333 | } | |
334 | } | |
335 | ||
336 | sub _split_order_chunk { | |
337 | my ($self, $chunk) = @_; | |
338 | ||
339 | # strip off sort modifiers, but always succeed, so $1 gets reset | |
340 | $chunk =~ s/ (?: \s+ (ASC|DESC) )? \s* $//ix; | |
341 | ||
342 | return ( | |
343 | $chunk, | |
344 | ( $1 and uc($1) eq 'DESC' ) ? 1 : 0, | |
345 | ); | |
346 | } | |
347 | ||
348 | sub _table { | |
349 | # optimized due to hotttnesss | |
350 | # my ($self, $from) = @_; | |
351 | if (my $ref = ref $_[1] ) { | |
352 | if ($ref eq 'ARRAY') { | |
353 | return $_[0]->_recurse_from(@{$_[1]}); | |
354 | } | |
355 | elsif ($ref eq 'HASH') { | |
356 | return $_[0]->_recurse_from($_[1]); | |
357 | } | |
358 | elsif ($ref eq 'REF' && ref ${$_[1]} eq 'ARRAY') { | |
359 | my ($sql, @bind) = @{ ${$_[1]} }; | |
360 | push @{$_[0]->{from_bind}}, @bind; | |
361 | return $sql | |
362 | } | |
363 | } | |
364 | return $_[0]->next::method ($_[1]); | |
365 | } | |
366 | ||
367 | sub _generate_join_clause { | |
368 | my ($self, $join_type) = @_; | |
369 | ||
370 | $join_type = $self->{_default_jointype} | |
371 | if ! defined $join_type; | |
372 | ||
373 | return sprintf ('%s JOIN ', | |
374 | $join_type ? $self->_sqlcase($join_type) : '' | |
375 | ); | |
376 | } | |
377 | ||
378 | sub _recurse_from { | |
379 | my $self = shift; | |
380 | return join (' ', $self->_gen_from_blocks(@_) ); | |
381 | } | |
382 | ||
383 | sub _gen_from_blocks { | |
384 | my ($self, $from, @joins) = @_; | |
385 | ||
386 | my @fchunks = $self->_from_chunk_to_sql($from); | |
387 | ||
388 | for (@joins) { | |
389 | my ($to, $on) = @$_; | |
390 | ||
391 | # check whether a join type exists | |
392 | my $to_jt = ref($to) eq 'ARRAY' ? $to->[0] : $to; | |
393 | my $join_type; | |
394 | if (ref($to_jt) eq 'HASH' and defined($to_jt->{-join_type})) { | |
395 | $join_type = $to_jt->{-join_type}; | |
396 | $join_type =~ s/^\s+ | \s+$//xg; | |
397 | } | |
398 | ||
399 | my @j = $self->_generate_join_clause( $join_type ); | |
400 | ||
401 | if (ref $to eq 'ARRAY') { | |
402 | push(@j, '(', $self->_recurse_from(@$to), ')'); | |
403 | } | |
404 | else { | |
405 | push(@j, $self->_from_chunk_to_sql($to)); | |
406 | } | |
407 | ||
408 | my ($sql, @bind) = $self->_join_condition($on); | |
409 | push(@j, ' ON ', $sql); | |
410 | push @{$self->{from_bind}}, @bind; | |
411 | ||
412 | push @fchunks, join '', @j; | |
413 | } | |
414 | ||
415 | return @fchunks; | |
416 | } | |
417 | ||
418 | sub _from_chunk_to_sql { | |
419 | my ($self, $fromspec) = @_; | |
420 | ||
421 | return join (' ', do { | |
422 | if (! ref $fromspec) { | |
423 | $self->_quote($fromspec); | |
424 | } | |
425 | elsif (ref $fromspec eq 'SCALAR') { | |
426 | $$fromspec; | |
427 | } | |
428 | elsif (ref $fromspec eq 'REF' and ref $$fromspec eq 'ARRAY') { | |
429 | push @{$self->{from_bind}}, @{$$fromspec}[1..$#$$fromspec]; | |
430 | $$fromspec->[0]; | |
431 | } | |
432 | elsif (ref $fromspec eq 'HASH') { | |
433 | my ($as, $table, $toomuch) = ( map | |
434 | { $_ => $fromspec->{$_} } | |
435 | ( grep { $_ !~ /^\-/ } keys %$fromspec ) | |
436 | ); | |
437 | ||
438 | $self->throw_exception( "Only one table/as pair expected in from-spec but an exra '$toomuch' key present" ) | |
439 | if defined $toomuch; | |
440 | ||
441 | ($self->_from_chunk_to_sql($table), $self->_quote($as) ); | |
442 | } | |
443 | else { | |
444 | $self->throw_exception('Unsupported from refkind: ' . ref $fromspec ); | |
445 | } | |
446 | }); | |
447 | } | |
448 | ||
449 | sub _join_condition { | |
450 | my ($self, $cond) = @_; | |
451 | ||
452 | # Backcompat for the old days when a plain hashref | |
453 | # { 't1.col1' => 't2.col2' } meant ON t1.col1 = t2.col2 | |
454 | if ( | |
455 | ref $cond eq 'HASH' | |
456 | and | |
457 | keys %$cond == 1 | |
458 | and | |
459 | (keys %$cond)[0] =~ /\./ | |
460 | and | |
461 | ! ref ( (values %$cond)[0] ) | |
462 | ) { | |
463 | carp_unique( | |
464 | "ResultSet {from} structures with conditions not conforming to the " | |
465 | . "SQL::Abstract syntax are deprecated: you either need to stop abusing " | |
466 | . "{from} altogether, or express the condition properly using the " | |
467 | . "{ -ident => ... } operator" | |
468 | ); | |
469 | $cond = { keys %$cond => { -ident => values %$cond } } | |
470 | } | |
471 | elsif ( ref $cond eq 'ARRAY' ) { | |
472 | # do our own ORing so that the hashref-shim above is invoked | |
473 | my @parts; | |
474 | my @binds; | |
475 | foreach my $c (@$cond) { | |
476 | my ($sql, @bind) = $self->_join_condition($c); | |
477 | push @binds, @bind; | |
478 | push @parts, $sql; | |
479 | } | |
480 | return join(' OR ', @parts), @binds; | |
481 | } | |
482 | ||
483 | return $self->_recurse_where($cond); | |
484 | } | |
485 | ||
486 | # This is hideously ugly, but SQLA does not understand multicol IN expressions | |
487 | # FIXME TEMPORARY - DQ should have native syntax for this | |
488 | # moved here to raise API questions | |
489 | # | |
490 | # !!! EXPERIMENTAL API !!! WILL CHANGE !!! | |
491 | sub _where_op_multicolumn_in { | |
492 | my ($self, $lhs, $rhs) = @_; | |
493 | ||
494 | if (! ref $lhs or ref $lhs eq 'ARRAY') { | |
495 | my (@sql, @bind); | |
496 | for (ref $lhs ? @$lhs : $lhs) { | |
497 | if (! ref $_) { | |
498 | push @sql, $self->_quote($_); | |
499 | } | |
500 | elsif (ref $_ eq 'SCALAR') { | |
501 | push @sql, $$_; | |
502 | } | |
503 | elsif (ref $_ eq 'REF' and ref $$_ eq 'ARRAY') { | |
504 | my ($s, @b) = @$$_; | |
505 | push @sql, $s; | |
506 | push @bind, @b; | |
507 | } | |
508 | else { | |
509 | $self->throw_exception("ARRAY of @{[ ref $_ ]}es unsupported for multicolumn IN lhs..."); | |
510 | } | |
511 | } | |
512 | $lhs = \[ join(', ', @sql), @bind]; | |
513 | } | |
514 | elsif (ref $lhs eq 'SCALAR') { | |
515 | $lhs = \[ $$lhs ]; | |
516 | } | |
517 | elsif (ref $lhs eq 'REF' and ref $$lhs eq 'ARRAY' ) { | |
518 | # noop | |
519 | } | |
520 | else { | |
521 | $self->throw_exception( ref($lhs) . "es unsupported for multicolumn IN lhs..."); | |
522 | } | |
523 | ||
524 | # is this proper...? | |
525 | $rhs = \[ $self->_recurse_where($rhs) ]; | |
526 | ||
527 | for ($lhs, $rhs) { | |
528 | $$_->[0] = "( $$_->[0] )" | |
529 | unless $$_->[0] =~ /^ \s* \( .* \) \s* $/xs; | |
530 | } | |
531 | ||
532 | \[ join( ' IN ', shift @$$lhs, shift @$$rhs ), @$$lhs, @$$rhs ]; | |
533 | } | |
122 | Accordingly the DBIC development effort will in the foreseable future ignore | |
123 | the existence of the new-guts SQLA, and will continue optimizing the | |
124 | preexisting SQLAC-based solution, potentially "organically growing" its own | |
125 | compatible implementation. Also, as described higher up, the ability to plug a | |
126 | separate SQLAC-compatible class providing the necessary surface API will remain | |
127 | possible, and will be protected at all costs in order to continue providing | |
128 | SQLA and friends access to the test cases of DBIC. | |
534 | 129 | |
535 | 130 | =head1 FURTHER QUESTIONS? |
536 | 131 | |
542 | 137 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can |
543 | 138 | redistribute it and/or modify it under the same terms as the |
544 | 139 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. |
545 | ||
546 | =cut | |
547 | ||
548 | 1; |
0 | =for comment POD_DERIVED_INDEX_GENERATED | |
1 | The following documentation is automatically generated. Please do not edit | |
2 | this file, but rather the original, inline with DBIx::Class::SQLMaker | |
3 | at lib/DBIx/Class/SQLMaker.pm | |
4 | (on the system that originally ran this). | |
5 | If you do edit this file, and don't want your changes to be removed, make | |
6 | sure you change the first line. | |
7 | ||
8 | =cut | |
9 | ||
10 | =head1 NAME | |
11 | ||
12 | DBIx::Class::SQLMaker - An SQL::Abstract-based SQL maker class | |
13 | ||
14 | =head1 DESCRIPTION | |
15 | ||
16 | This module is a subclass of L<SQL::Abstract> and includes a number of | |
17 | DBIC-specific workarounds, not yet suitable for inclusion into the | |
18 | L<SQL::Abstract> core. It also provides all (and more than) the functionality | |
19 | of L<SQL::Abstract::Limit>, see L<DBIx::Class::SQLMaker::LimitDialects> for | |
20 | more info. | |
21 | ||
22 | Currently the enhancements to L<SQL::Abstract> are: | |
23 | ||
24 | =over 4 | |
25 | ||
26 | =item * Support for C<JOIN> statements (via extended C<table/from> support) | |
27 | ||
28 | =item * Support of functions in C<SELECT> lists | |
29 | ||
30 | =item * C<GROUP BY>/C<HAVING> support (via extensions to the order_by parameter) | |
31 | ||
32 | =item * Support of C<...FOR UPDATE> type of select statement modifiers | |
33 | ||
34 | =back | |
35 | ||
36 | =head1 INHERITED METHODS | |
37 | ||
38 | =over 4 | |
39 | ||
40 | =item L<SQL::Abstract> | |
41 | ||
42 | L<is_literal_value|SQL::Abstract/is_literal_value>, L<is_plain_value|SQL::Abstract/is_plain_value> | |
43 | ||
44 | =back | |
45 | ||
46 | =head1 FURTHER QUESTIONS? | |
47 | ||
48 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. | |
49 | ||
50 | =head1 COPYRIGHT AND LICENSE | |
51 | ||
52 | This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> | |
53 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can | |
54 | redistribute it and/or modify it under the same terms as the | |
55 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. | |
56 |
5 | 5 | use mro 'c3'; |
6 | 6 | |
7 | 7 | use DBI (); |
8 | use List::Util 'first'; | |
9 | use namespace::clean; | |
10 | 8 | |
11 | 9 | __PACKAGE__->sql_limit_dialect ('Top'); |
12 | 10 | __PACKAGE__->sql_maker_class('DBIx::Class::SQLMaker::ACCESS'); |
65 | 63 | my $columns_info = $source->columns_info; |
66 | 64 | |
67 | 65 | if (keys %$to_insert == 0) { |
68 | my $autoinc_col = first { | |
66 | my ($autoinc_col) = grep { | |
69 | 67 | $columns_info->{$_}{is_auto_increment} |
70 | 68 | } keys %$columns_info; |
71 | 69 |
6 | 6 | |
7 | 7 | use Try::Tiny; |
8 | 8 | use Scalar::Util qw(refaddr weaken); |
9 | use List::Util 'shuffle'; | |
10 | 9 | use DBIx::Class::_Util 'detected_reinvoked_destructor'; |
11 | 10 | use namespace::clean; |
12 | 11 | |
178 | 177 | |
179 | 178 | (undef, $sth) = $self->storage->_select( @{$self->{args}} ); |
180 | 179 | |
181 | return ( | |
180 | ( | |
182 | 181 | DBIx::Class::_ENV_::SHUFFLE_UNORDERED_RESULTSETS |
183 | 182 | and |
184 | 183 | ! $self->{attrs}{order_by} |
184 | and | |
185 | require List::Util | |
185 | 186 | ) |
186 | ? shuffle @{$sth->fetchall_arrayref} | |
187 | ? List::Util::shuffle( @{$sth->fetchall_arrayref} ) | |
187 | 188 | : @{$sth->fetchall_arrayref} |
188 | 189 | ; |
189 | 190 | } |
4 | 4 | |
5 | 5 | use base qw/DBIx::Class::Storage::DBI/; |
6 | 6 | use mro 'c3'; |
7 | use Try::Tiny; | |
8 | use namespace::clean; | |
9 | 7 | |
10 | 8 | __PACKAGE__->datetime_parser_type('DateTime::Format::DB2'); |
11 | 9 | __PACKAGE__->sql_quote_char ('"'); |
3 | 3 | use warnings; |
4 | 4 | use base qw/DBIx::Class::Storage::DBI/; |
5 | 5 | use mro 'c3'; |
6 | use List::Util 'first'; | |
7 | use namespace::clean; | |
8 | 6 | |
9 | 7 | =head1 NAME |
10 | 8 | |
79 | 77 | $generator = uc $generator unless $quoted; |
80 | 78 | |
81 | 79 | return $generator |
82 | if first { | |
80 | if grep { | |
83 | 81 | $self->sql_maker->quote_char ? ($_ eq $col) : (uc($_) eq uc($col)) |
84 | 82 | } @trig_cols; |
85 | 83 | } |
3 | 3 | use warnings; |
4 | 4 | use base 'DBIx::Class::Storage::DBI'; |
5 | 5 | use mro 'c3'; |
6 | ||
7 | use namespace::clean; | |
8 | 6 | |
9 | 7 | =head1 NAME |
10 | 8 |
9 | 9 | use mro 'c3'; |
10 | 10 | |
11 | 11 | use Try::Tiny; |
12 | use List::Util 'first'; | |
13 | 12 | use namespace::clean; |
14 | 13 | |
15 | 14 | __PACKAGE__->mk_group_accessors(simple => qw/ |
4 | 4 | |
5 | 5 | use base 'DBIx::Class::Storage::DBI'; |
6 | 6 | use mro 'c3'; |
7 | ||
8 | use DBIx::Class::SQLMaker::LimitDialects; | |
9 | use List::Util qw/first/; | |
10 | ||
11 | use namespace::clean; | |
12 | 7 | |
13 | 8 | =head1 NAME |
14 | 9 |
7 | 7 | use Scope::Guard (); |
8 | 8 | use Context::Preserve 'preserve_context'; |
9 | 9 | use Try::Tiny; |
10 | use List::Util 'first'; | |
11 | 10 | use namespace::clean; |
12 | 11 | |
13 | 12 | __PACKAGE__->sql_limit_dialect ('RowNum'); |
284 | 283 | my ($self, $sql, $bind) = @_[0,2,3]; |
285 | 284 | |
286 | 285 | # Turn off sth caching for multi-part LOBs. See _prep_for_execute below |
287 | local $self->{disable_sth_caching} = 1 if first { | |
286 | local $self->{disable_sth_caching} = 1 if grep { | |
288 | 287 | ($_->[0]{_ora_lob_autosplit_part}||0) |
289 | 288 | > |
290 | 289 | (__cache_queries_with_max_lob_parts - 1) |
650 | 649 | |
651 | 650 | my $alias = $self->next::method(@_); |
652 | 651 | |
653 | # we need to shorten here in addition to the shortening in SQLA itself, | |
652 | # we need to shorten here in addition to the shortening in SQLMaker itself, | |
654 | 653 | # since the final relnames are crucial for the join optimizer |
655 | 654 | return $self->sql_maker->_shorten_identifier($alias); |
656 | 655 | } |
4 | 4 | |
5 | 5 | use base qw/DBIx::Class::Storage::DBI/; |
6 | 6 | use mro 'c3'; |
7 | use Try::Tiny; | |
8 | use namespace::clean; | |
9 | 7 | |
10 | 8 | sub _rebless { |
11 | 9 | my ($self) = @_; |
18 | 18 | Given a pool (L<DBIx::Class::Storage::DBI::Replicated::Pool>) of replicated |
19 | 19 | database's (L<DBIx::Class::Storage::DBI::Replicated::Replicant>), defines a |
20 | 20 | method by which query load can be spread out across each replicant in the pool. |
21 | ||
22 | This Balancer uses L<List::Util> keyword 'shuffle' to randomly pick an active | |
23 | replicant from the associated pool. This may or may not be random enough for | |
24 | you, patches welcome. | |
25 | 21 | |
26 | 22 | =head1 ATTRIBUTES |
27 | 23 |
1 | 1 | |
2 | 2 | use Moose; |
3 | 3 | use DBIx::Class::Storage::DBI::Replicated::Replicant; |
4 | use List::Util 'sum'; | |
5 | 4 | use Scalar::Util 'reftype'; |
6 | 5 | use DBI (); |
7 | 6 | use MooseX::Types::Moose qw/Num Int ClassName HashRef/; |
321 | 320 | =cut |
322 | 321 | |
323 | 322 | sub connected_replicants { |
324 | my $self = shift @_; | |
325 | return sum( map { | |
326 | $_->connected ? 1:0 | |
327 | } $self->all_replicants ); | |
323 | return scalar grep | |
324 | { $_->connected } | |
325 | shift->all_replicants | |
326 | ; | |
328 | 327 | } |
329 | 328 | |
330 | 329 | =head2 active_replicants |
13 | 13 | use MooseX::Types::Moose qw/ClassName HashRef Object/; |
14 | 14 | use Scalar::Util 'reftype'; |
15 | 15 | use Hash::Merge; |
16 | use List::Util qw/min max reduce/; | |
16 | use List::Util (); | |
17 | 17 | use Context::Preserve 'preserve_context'; |
18 | 18 | use Try::Tiny; |
19 | 19 | |
329 | 329 | _arm_global_destructor |
330 | 330 | _verify_pid |
331 | 331 | |
332 | _seems_connected | |
333 | _ping | |
334 | ||
332 | 335 | get_use_dbms_capability |
333 | 336 | set_use_dbms_capability |
334 | 337 | get_dbms_capability |
941 | 944 | sub lag_behind_master { |
942 | 945 | my $self = shift; |
943 | 946 | |
944 | return max map $_->lag_behind_master, $self->replicants; | |
947 | return List::Util::max( map { $_->lag_behind_master } $self->replicants ); | |
945 | 948 | } |
946 | 949 | |
947 | 950 | =head2 is_replicating |
968 | 971 | $_->connect_call_datetime_setup for $self->all_storages; |
969 | 972 | } |
970 | 973 | |
974 | =head2 connect_call_rebase_sqlmaker | |
975 | ||
976 | calls L<DBIx::Class::Storage::DBI/connect_call_rebase_sqlmaker> for all storages | |
977 | ||
978 | =cut | |
979 | ||
980 | sub connect_call_rebase_sqlmaker { | |
981 | my( $self, $target_base ) = @_; | |
982 | $_->connect_call_rebase_sqlmaker( $target_base ) for $self->all_storages; | |
983 | } | |
984 | ||
971 | 985 | sub _populate_dbh { |
972 | 986 | my $self = shift; |
973 | 987 | $_->_populate_dbh for $self->all_storages; |
1026 | 1040 | sub disconnect_call_do_sql { |
1027 | 1041 | my $self = shift; |
1028 | 1042 | $_->disconnect_call_do_sql(@_) for $self->all_storages; |
1029 | } | |
1030 | ||
1031 | sub _seems_connected { | |
1032 | my $self = shift; | |
1033 | ||
1034 | return min map $_->_seems_connected, $self->all_storages; | |
1035 | } | |
1036 | ||
1037 | sub _ping { | |
1038 | my $self = shift; | |
1039 | ||
1040 | return min map $_->_ping, $self->all_storages; | |
1041 | 1043 | } |
1042 | 1044 | |
1043 | 1045 | # not using the normalized_version, because we want to preserve |
3 | 3 | use warnings; |
4 | 4 | use base qw/DBIx::Class::Storage::DBI::UniqueIdentifier/; |
5 | 5 | use mro 'c3'; |
6 | use List::Util 'first'; | |
7 | 6 | use Try::Tiny; |
8 | 7 | use namespace::clean; |
9 | 8 | |
48 | 47 | |
49 | 48 | my $values = $self->next::method(@_); |
50 | 49 | |
51 | my $identity_col = | |
52 | first { $colinfo->{$_}{is_auto_increment} } keys %$colinfo; | |
50 | my ($identity_col) = | |
51 | grep { $colinfo->{$_}{is_auto_increment} } keys %$colinfo; | |
53 | 52 | |
54 | 53 | # user might have an identity PK without is_auto_increment |
55 | 54 | # |
5 | 5 | use base qw/DBIx::Class::Storage::DBI/; |
6 | 6 | use mro 'c3'; |
7 | 7 | |
8 | use SQL::Abstract 'is_plain_value'; | |
8 | use SQL::Abstract::Util 'is_plain_value'; | |
9 | 9 | use DBIx::Class::_Util qw(modver_gt_or_eq sigwarn_silencer); |
10 | 10 | use DBIx::Class::Carp; |
11 | 11 | use Try::Tiny; |
15 | 15 | __PACKAGE__->sql_limit_dialect ('LimitOffset'); |
16 | 16 | __PACKAGE__->sql_quote_char ('"'); |
17 | 17 | __PACKAGE__->datetime_parser_type ('DateTime::Format::SQLite'); |
18 | ||
19 | sub _determine_supports_multicolumn_in { | |
20 | ( shift->_server_info->{normalized_dbms_version} < '3.014' ) | |
21 | ? 0 | |
22 | : 1 | |
23 | } | |
24 | ||
18 | 25 | |
19 | 26 | =head1 NAME |
20 | 27 |
7 | 7 | DBIx::Class::Storage::DBI::Sybase::ASE |
8 | 8 | /; |
9 | 9 | use mro 'c3'; |
10 | use List::Util 'first'; | |
11 | 10 | use Scalar::Util 'looks_like_number'; |
12 | 11 | use namespace::clean; |
13 | 12 | |
41 | 40 | |
42 | 41 | return $self->next::method(@_) if not defined $value or not defined $type; |
43 | 42 | |
44 | if (my $key = first { $type =~ /$_/i } keys %noquote) { | |
43 | if (my ($key) = grep { $type =~ /$_/i } keys %noquote) { | |
45 | 44 | return 1 if $noquote{$key}->($value); |
46 | 45 | } |
47 | 46 | elsif ($self->is_datatype_numeric($type) && $number->($value)) { |
10 | 10 | use mro 'c3'; |
11 | 11 | use DBIx::Class::Carp; |
12 | 12 | use Scalar::Util qw/blessed weaken/; |
13 | use List::Util 'first'; | |
14 | 13 | use Sub::Name(); |
15 | 14 | use Data::Dumper::Concise 'Dumper'; |
16 | 15 | use Try::Tiny; |
473 | 472 | if (keys %$fields) { |
474 | 473 | |
475 | 474 | # Now set the identity update flags for the actual update |
476 | local $self->{_autoinc_supplied_for_op} = (first | |
475 | local $self->{_autoinc_supplied_for_op} = grep | |
477 | 476 | { $_->{is_auto_increment} } |
478 | 477 | values %{ $source->columns_info([ keys %$fields ]) } |
479 | ) ? 1 : 0; | |
478 | ; | |
480 | 479 | |
481 | 480 | my $next = $self->next::can; |
482 | 481 | my $args = \@_; |
491 | 490 | } |
492 | 491 | else { |
493 | 492 | # Set the identity update flags for the actual update |
494 | local $self->{_autoinc_supplied_for_op} = (first | |
493 | local $self->{_autoinc_supplied_for_op} = grep | |
495 | 494 | { $_->{is_auto_increment} } |
496 | 495 | values %{ $source->columns_info([ keys %$fields ]) } |
497 | ) ? 1 : 0; | |
496 | ; | |
498 | 497 | |
499 | 498 | return $self->next::method(@_); |
500 | 499 | } |
506 | 505 | |
507 | 506 | my $columns_info = $source->columns_info; |
508 | 507 | |
509 | my $identity_col = | |
510 | first { $columns_info->{$_}{is_auto_increment} } | |
508 | my ($identity_col) = | |
509 | grep { $columns_info->{$_}{is_auto_increment} } | |
511 | 510 | keys %$columns_info; |
512 | 511 | |
513 | 512 | # FIXME - this is duplication from DBI.pm. When refactored towards |
514 | 513 | # the LobWriter this can be folded back where it belongs. |
515 | local $self->{_autoinc_supplied_for_op} = | |
516 | (first { $_ eq $identity_col } @$cols) | |
517 | ? 1 | |
518 | : 0 | |
519 | ; | |
514 | local $self->{_autoinc_supplied_for_op} | |
515 | = grep { $_ eq $identity_col } @$cols; | |
520 | 516 | |
521 | 517 | my $use_bulk_api = |
522 | 518 | $self->_bulk_storage && |
579 | 575 | my @source_columns = $source->columns; |
580 | 576 | |
581 | 577 | # bcp identity index is 1-based |
582 | my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns); | |
578 | my ($identity_idx) = grep { $source_columns[$_] eq $identity_col } (0..$#source_columns); | |
583 | 579 | $identity_idx = defined $identity_idx ? $identity_idx + 1 : 0; |
584 | 580 | |
585 | 581 | my @new_data; |
628 | 624 | |
629 | 625 | ## FIXME - once this is done - address the FIXME on finish() below |
630 | 626 | ## XXX get this to work instead of our own $sth |
631 | ## will require SQLA or *Hacks changes for ordered columns | |
627 | ## will require SQLMaker or *Hacks changes for ordered columns | |
632 | 628 | # $bulk->next::method($source, \@source_columns, \@new_data, { |
633 | 629 | # syb_bcp_attribs => { |
634 | 630 | # identity_flag => $self->_autoinc_supplied_for_op ? 1 : 0, |
77 | 77 | local $dbh->{RaiseError} = 1; |
78 | 78 | local $dbh->{PrintError} = 0; |
79 | 79 | |
80 | # FIXME if the main connection goes stale, does opening another for this statement | |
81 | # really determine anything? | |
82 | ||
83 | if ($dbh->{syb_no_child_con}) { | |
84 | return try { | |
85 | $self->_connect->do('select 1'); | |
86 | 1; | |
87 | } | |
88 | catch { | |
89 | 0; | |
90 | }; | |
91 | } | |
92 | ||
93 | return try { | |
94 | $dbh->do('select 1'); | |
95 | 1; | |
96 | } | |
97 | catch { | |
98 | 0; | |
99 | }; | |
80 | ( try { $dbh->do('select 1'); 1 } ) | |
81 | ? 1 | |
82 | : 0 | |
83 | ; | |
100 | 84 | } |
101 | 85 | |
102 | 86 | sub _set_max_connect { |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base qw/DBIx::Class::Storage::DBI/; |
6 | ||
7 | use namespace::clean; | |
8 | 6 | |
9 | 7 | __PACKAGE__->sql_maker_class('DBIx::Class::SQLMaker::MySQL'); |
10 | 8 | __PACKAGE__->sql_limit_dialect ('LimitXY'); |
43 | 41 | return $self->next::method(@_) if ( $_[0] eq 'select' or $_[0] eq 'insert' ); |
44 | 42 | |
45 | 43 | |
46 | # FIXME FIXME FIXME - this is a terrible, gross, incomplete hack | |
47 | # it should be trivial for mst to port this to DQ (and a good | |
48 | # exercise as well, since we do not yet have such wide tree walking | |
49 | # in place). For the time being this will work in limited cases, | |
50 | # mainly complex update/delete, which is really all we want it for | |
51 | # currently (allows us to fix some bugs without breaking MySQL in | |
52 | # the process, and is also crucial for Shadow to be usable) | |
44 | # FIXME FIXME FIXME - this is a terrible, gross, incomplete, MySQL-specific | |
45 | # hack but it works rather well for the limited amount of actual use cases | |
46 | # which can not be done in any other way on MySQL. This allows us to fix | |
47 | # some bugs without breaking MySQL support in the process and is also | |
48 | # crucial for more complex things like Shadow to be usable | |
49 | # | |
50 | # This code is just a pre-analyzer, working in tandem with ::SQLMaker::MySQL, | |
51 | # where the possibly-set value of {_modification_target_referenced_re} is | |
52 | # used to demarcate which part of the final SQL to double-wrap in a subquery. | |
53 | # | |
54 | # This is covered extensively by "offline" tests, so that competing SQLMaker | |
55 | # implementations could benefit from the existing tests just as well. | |
53 | 56 | |
54 | 57 | # extract the source name, construct modification indicator re |
55 | 58 | my $sm = $self->sql_maker; |
8 | 8 | |
9 | 9 | use DBIx::Class::Carp; |
10 | 10 | use Scalar::Util qw/refaddr weaken reftype blessed/; |
11 | use List::Util qw/first/; | |
12 | 11 | use Context::Preserve 'preserve_context'; |
13 | 12 | use Try::Tiny; |
14 | use SQL::Abstract qw(is_plain_value is_literal_value); | |
15 | use DBIx::Class::_Util qw(quote_sub perlstring serialize detected_reinvoked_destructor); | |
13 | use SQL::Abstract::Util qw(is_plain_value is_literal_value); | |
14 | use DBIx::Class::_Util qw(quote_sub perlstring serialize detected_reinvoked_destructor sigwarn_silencer); | |
16 | 15 | use namespace::clean; |
17 | 16 | |
18 | 17 | # default cursor class, overridable in connect_info attributes |
1060 | 1059 | } |
1061 | 1060 | |
1062 | 1061 | sub _run_connection_actions { |
1062 | # there are pathological cases in the CI where this can loop | |
1063 | # did not investigae in depth, but in either case this makes | |
1064 | # sense to guard like this | |
1065 | return if $_[0]->{_running_connections_actions}; | |
1066 | ||
1067 | local $_[0]->{_running_connections_actions} = 1; | |
1063 | 1068 | |
1064 | 1069 | $_[0]->_do_connection_actions(connect_call_ => $_) for ( |
1065 | 1070 | ( $_[0]->on_connect_call || () ), |
1066 | 1071 | $_[0]->_parse_connect_do ('on_connect_do'), |
1067 | 1072 | ); |
1073 | ||
1074 | my $sqlac_like; | |
1075 | if( | |
1076 | DBIx::Class::_ENV_::DEVREL | |
1077 | and | |
1078 | $ENV{DBICDEVREL_SWAPOUT_SQLAC_WITH} | |
1079 | and | |
1080 | ( $sqlac_like ) = $ENV{DBICDEVREL_SWAPOUT_SQLAC_WITH} =~ /(.+)/ | |
1081 | and | |
1082 | # delay calling ->sql_maker as long as we can | |
1083 | # ensure_class_loaded returns undef or throws | |
1084 | ( Class::C3::Componentised->ensure_class_loaded( $sqlac_like ), 1 ) | |
1085 | and | |
1086 | ( ref $_[0]->sql_maker ) !~ /__REBASED__/ | |
1087 | ) { | |
1088 | ||
1089 | require DBIx::Class::SQLMaker::ClassicExtensions; | |
1090 | require SQL::Abstract::Classic; | |
1091 | ||
1092 | Class::C3::Componentised->inject_base( | |
1093 | 'DBICDevRel::SQLAC::SwapOut', | |
1094 | 'DBIx::Class::SQLMaker::ClassicExtensions', | |
1095 | $sqlac_like, | |
1096 | 'SQL::Abstract::Classic', | |
1097 | ); | |
1098 | ||
1099 | $_[0]->_do_connection_actions(connect_call_ => [[ rebase_sqlmaker => 'DBICDevRel::SQLAC::SwapOut' ]]); | |
1100 | } | |
1068 | 1101 | } |
1069 | 1102 | |
1070 | 1103 | |
1187 | 1220 | |
1188 | 1221 | $drv = "DBD::$drv" if $drv; |
1189 | 1222 | |
1190 | my $res = { | |
1191 | DBIC_DSN => $self->_dbi_connect_info->[0], | |
1192 | DBI_VER => DBI->VERSION, | |
1193 | DBIC_VER => DBIx::Class->VERSION, | |
1194 | DBIC_DRIVER => ref $self, | |
1195 | $drv ? ( | |
1196 | DBD => $drv, | |
1197 | DBD_VER => try { $drv->VERSION }, | |
1198 | ) : (), | |
1223 | my $res = do { | |
1224 | local $SIG{__WARN__} = sigwarn_silencer(qr/Argument .+? isn't numeric in subroutine entry/); | |
1225 | { | |
1226 | DBIC_DSN => $self->_dbi_connect_info->[0], | |
1227 | DBI_VER => DBI->VERSION, | |
1228 | DBIC_VER => DBIx::Class->VERSION, | |
1229 | DBIC_DRIVER => ref $self, | |
1230 | $drv ? ( | |
1231 | DBD => $drv, | |
1232 | DBD_VER => try { $drv->VERSION }, | |
1233 | ) : (), | |
1234 | } | |
1199 | 1235 | }; |
1200 | 1236 | |
1201 | 1237 | # try to grab data even if we never managed to connect |
1472 | 1508 | return $self; |
1473 | 1509 | } |
1474 | 1510 | |
1511 | =head2 connect_call_rebase_sqlmaker | |
1512 | ||
1513 | This on-connect call takes as a single argument the name of a class to "rebase" | |
1514 | the SQLMaker inheritance hierarchy upon. For this to work properly the target | |
1515 | class B<MUST> inherit from L<DBIx::Class::SQLMaker::ClassicExtensions> and | |
1516 | L<SQL::Abstract::Classic> as shown below. | |
1517 | ||
1518 | This infrastructure is provided to aid recent activity around experimental new | |
1519 | aproaches to SQL generation within DBIx::Class. You can (and are encouraged to) | |
1520 | mix and match old and new within the same codebase as follows: | |
1521 | ||
1522 | package DBIx::Class::Awesomer::SQLMaker; | |
1523 | # you MUST inherit in this order to get the composition right | |
1524 | # you are free to override-without-next::method any part you need | |
1525 | use base qw( | |
1526 | DBIx::Class::SQLMaker::ClassicExtensions | |
1527 | << OPTIONAL::AWESOME::Class::Implementing::ExtraRainbowSauce >> | |
1528 | SQL::Abstract::Classic | |
1529 | ); | |
1530 | << your new code goes here >> | |
1531 | ||
1532 | ||
1533 | ... and then ... | |
1534 | ||
1535 | ||
1536 | my $experimental_schema = $original_schema->connect( | |
1537 | sub { | |
1538 | $original_schema->storage->dbh | |
1539 | }, | |
1540 | { | |
1541 | # the nested arrayref is important, as per | |
1542 | # https://metacpan.org/pod/DBIx::Class::Storage::DBI#on_connect_call | |
1543 | on_connect_call => [ [ rebase_sqlmaker => 'DBIx::Class::Awesomer::SQLMaker' ] ], | |
1544 | }, | |
1545 | ); | |
1546 | ||
1547 | =cut | |
1548 | ||
1549 | sub connect_call_rebase_sqlmaker { | |
1550 | my( $self, $requested_base_class ) = @_; | |
1551 | ||
1552 | $self->throw_exception( | |
1553 | "The on_connect callee 'rebase_sqlmaker' expects a single plain string argument: the name of the target base class" | |
1554 | ) if ( | |
1555 | @_ != 2 | |
1556 | or | |
1557 | ! length( $requested_base_class ) | |
1558 | ); | |
1559 | ||
1560 | my $old_class = ref( $self->sql_maker ); | |
1561 | ||
1562 | # nothing to do! | |
1563 | return if $old_class->isa( $requested_base_class ); | |
1564 | ||
1565 | my $synthetic_class = "${old_class}__REBASED_ON__${requested_base_class}"; | |
1566 | ||
1567 | { | |
1568 | no strict 'refs'; | |
1569 | ||
1570 | # skip if we already made that class | |
1571 | unless( @{"${synthetic_class}::ISA"} ) { | |
1572 | ||
1573 | $self->ensure_class_loaded( $requested_base_class ); | |
1574 | ||
1575 | for my $base (qw( | |
1576 | DBIx::Class::SQLMaker::ClassicExtensions | |
1577 | SQL::Abstract::Classic | |
1578 | )) { | |
1579 | ||
1580 | $self->throw_exception( | |
1581 | "The 'rebase_sqlmaker' target class '$requested_base_class' is not inheriting from '$base', this can not work" | |
1582 | ) unless $requested_base_class->isa( $base ); | |
1583 | } | |
1584 | ||
1585 | $self->inject_base( $synthetic_class, $old_class, $requested_base_class ); | |
1586 | ||
1587 | Class::C3->reinitialize | |
1588 | if DBIx::Class::_ENV_::OLD_MRO; | |
1589 | } | |
1590 | } | |
1591 | ||
1592 | # force re-build on next access for this particular $storage instance | |
1593 | $self->sql_maker_class( $synthetic_class ); | |
1594 | $self->_sql_maker( undef ); | |
1595 | } | |
1596 | ||
1475 | 1597 | sub _connect { |
1476 | 1598 | my $self = shift; |
1477 | 1599 | |
1686 | 1808 | and |
1687 | 1809 | $op eq 'select' |
1688 | 1810 | and |
1689 | first { | |
1811 | grep { | |
1690 | 1812 | length ref $_->[1] |
1691 | 1813 | and |
1692 | 1814 | blessed($_->[1]) |
1936 | 2058 | # they can be fused once again with the final return |
1937 | 2059 | $to_insert = { %$to_insert, %$prefetched_values }; |
1938 | 2060 | |
1939 | # FIXME - we seem to assume undef values as non-supplied. This is wrong. | |
1940 | # Investigate what does it take to s/defined/exists/ | |
1941 | 2061 | my %pcols = map { $_ => 1 } $source->primary_columns; |
2062 | ||
1942 | 2063 | my (%retrieve_cols, $autoinc_supplied, $retrieve_autoinc_col); |
2064 | ||
1943 | 2065 | for my $col ($source->columns) { |
2066 | ||
2067 | # first autoinc wins - this is why ->columns() in-order iteration is important | |
2068 | # | |
2069 | # FIXME - there ought to be a sanity-check for multiple is_auto_increment settings | |
2070 | # or something... | |
2071 | # | |
1944 | 2072 | if ($col_infos->{$col}{is_auto_increment}) { |
2073 | ||
2074 | # FIXME - we seem to assume undef values as non-supplied. | |
2075 | # This is wrong. | |
2076 | # Investigate what does it take to s/defined/exists/ | |
2077 | # ( fails t/cdbi/copy.t amoong other things ) | |
1945 | 2078 | $autoinc_supplied ||= 1 if defined $to_insert->{$col}; |
2079 | ||
1946 | 2080 | $retrieve_autoinc_col ||= $col unless $autoinc_supplied; |
1947 | 2081 | } |
1948 | 2082 | |
1949 | 2083 | # nothing to retrieve when explicit values are supplied |
1950 | 2084 | next if ( |
1951 | defined $to_insert->{$col} and ! is_literal_value($to_insert->{$col}) | |
2085 | # FIXME - we seem to assume undef values as non-supplied. | |
2086 | # This is wrong. | |
2087 | # Investigate what does it take to s/defined/exists/ | |
2088 | # ( fails t/cdbi/copy.t amoong other things ) | |
2089 | defined $to_insert->{$col} | |
2090 | and | |
2091 | ( | |
2092 | # not a ref - cheaper to check before a call to is_literal_value() | |
2093 | ! length ref $to_insert->{$col} | |
2094 | or | |
2095 | # not a literal we *MAY* need to pull out ( see check below ) | |
2096 | ! is_literal_value( $to_insert->{$col} ) | |
2097 | ) | |
1952 | 2098 | ); |
1953 | 2099 | |
1954 | 2100 | # the 'scalar keys' is a trick to preserve the ->columns declaration order |
1958 | 2104 | $col_infos->{$col}{retrieve_on_insert} |
1959 | 2105 | ); |
1960 | 2106 | }; |
2107 | ||
2108 | # corner case of a non-supplied PK which is *not* declared as autoinc | |
2109 | if ( | |
2110 | ! $autoinc_supplied | |
2111 | and | |
2112 | ! defined $retrieve_autoinc_col | |
2113 | and | |
2114 | # FIXME - first come-first serve, suboptimal... | |
2115 | ($retrieve_autoinc_col) = ( grep | |
2116 | { | |
2117 | $pcols{$_} | |
2118 | and | |
2119 | ! $col_infos->{$_}{retrieve_on_insert} | |
2120 | and | |
2121 | ! defined $col_infos->{$_}{is_auto_increment} | |
2122 | } | |
2123 | sort | |
2124 | { $retrieve_cols{$a} <=> $retrieve_cols{$b} } | |
2125 | keys %retrieve_cols | |
2126 | ) | |
2127 | ) { | |
2128 | carp_unique( | |
2129 | "Missing value for primary key column '$retrieve_autoinc_col' on " | |
2130 | . "@{[ $source->source_name ]} - perhaps you forgot to set its " | |
2131 | . "'is_auto_increment' attribute during add_columns()? Treating " | |
2132 | . "'$retrieve_autoinc_col' implicitly as an autoinc, and attempting " | |
2133 | . 'value retrieval' | |
2134 | ); | |
2135 | } | |
1961 | 2136 | |
1962 | 2137 | local $self->{_autoinc_supplied_for_op} = $autoinc_supplied; |
1963 | 2138 | local $self->{_perform_autoinc_retrieval} = $retrieve_autoinc_col; |
2087 | 2262 | # because a user-supplied literal/bind (or something else specific to a |
2088 | 2263 | # resultsource and/or storage driver) can inject extra binds along the |
2089 | 2264 | # way, so one can't rely on "shift positions" ordering at all. Also we |
2090 | # can't just hand SQLA a set of some known "values" (e.g. hashrefs that | |
2265 | # can't just hand SQLMaker a set of some known "values" (e.g. hashrefs that | |
2091 | 2266 | # can be later matched up by address), because we want to supply a real |
2092 | 2267 | # value on which perhaps e.g. datatype checks will be performed |
2093 | 2268 | my ($proto_data, $serialized_bind_type_by_col_idx); |
2435 | 2610 | # however currently we *may* pass the same $orig_attrs |
2436 | 2611 | # with different ident/select/where |
2437 | 2612 | # the whole interface needs to be rethought, since it |
2438 | # was centered around the flawed SQLA API. We can do | |
2613 | # was centered around the flawed SQLMaker API. We can do | |
2439 | 2614 | # soooooo much better now. But that is also another |
2440 | 2615 | # battle... |
2441 | 2616 | #return ( |
1 | 1 | DBIx::Class::Storage::DBIHacks; |
2 | 2 | |
3 | 3 | # |
4 | # This module contains code that should never have seen the light of day, | |
5 | # does not belong in the Storage, or is otherwise unfit for public | |
6 | # display. The arrival of SQLA2 should immediately obsolete 90% of this | |
4 | # This module contains code supporting a battery of special cases and tests for | |
5 | # many corner cases pushing the envelope of what DBIC can do. When work on | |
6 | # these utilities began in mid 2009 (51a296b402c) it wasn't immediately obvious | |
7 | # that these pieces, despite their misleading on-first-sighe-flakiness, will | |
8 | # become part of the generic query rewriting machinery of DBIC, allowing it to | |
9 | # both generate and process queries representing incredibly complex sets with | |
10 | # reasonable efficiency. | |
11 | # | |
12 | # Now (end of 2019), more than 10 years later the routines in this class have | |
13 | # stabilized enough, and are meticulously covered with tests, to a point where | |
14 | # an effort to formalize them into user-facing APIs might be worthwhile. | |
15 | # | |
16 | # An implementor working on publicizing and/or replacing the routines with a | |
17 | # more modern SQL generation framework should keep in mind that pretty much all | |
18 | # existing tests are constructed on the basis of real-world code used in | |
19 | # production somewhere. | |
20 | # | |
21 | # Please hack on this responsibly ;) | |
7 | 22 | # |
8 | 23 | |
9 | 24 | use strict; |
12 | 27 | use base 'DBIx::Class::Storage'; |
13 | 28 | use mro 'c3'; |
14 | 29 | |
15 | use List::Util 'first'; | |
16 | 30 | use Scalar::Util 'blessed'; |
17 | 31 | use DBIx::Class::_Util qw(UNRESOLVABLE_CONDITION serialize); |
18 | use SQL::Abstract qw(is_plain_value is_literal_value); | |
32 | use SQL::Abstract::Util qw(is_plain_value is_literal_value); | |
19 | 33 | use DBIx::Class::Carp; |
20 | 34 | use namespace::clean; |
21 | 35 | |
312 | 326 | ) { |
313 | 327 | push @outer_from, $j |
314 | 328 | } |
315 | elsif (first { $_->{$alias} } @outer_nonselecting_chains ) { | |
329 | elsif (grep { $_->{$alias} } @outer_nonselecting_chains ) { | |
316 | 330 | push @outer_from, $j; |
317 | 331 | $may_need_outer_group_by ||= $outer_aliastypes->{multiplying}{$alias} ? 1 : 0; |
318 | 332 | } |
329 | 343 | }); |
330 | 344 | } |
331 | 345 | |
332 | # This is totally horrific - the {where} ends up in both the inner and outer query | |
333 | # Unfortunately not much can be done until SQLA2 introspection arrives, and even | |
334 | # then if where conditions apply to the *right* side of the prefetch, you may have | |
335 | # to both filter the inner select (e.g. to apply a limit) and then have to re-filter | |
336 | # the outer select to exclude joins you didn't want in the first place | |
346 | # FIXME: The {where} ends up in both the inner and outer query, i.e. *twice* | |
347 | # | |
348 | # This is rather horrific, and while we currently *do* have enough | |
349 | # introspection tooling available to attempt a stab at properly deciding | |
350 | # whether or not to include the where condition on the outside, the | |
351 | # machinery is still too slow to apply it here. | |
352 | # Thus for the time being we do not attempt any sanitation of the where | |
353 | # clause and just pass it through on both sides of the subquery. This *will* | |
354 | # be addressed at a later stage, most likely after folding the SQL generator | |
355 | # into SQLMaker proper | |
337 | 356 | # |
338 | 357 | # OTOH it can be seen as a plus: <ash> (notes that this query would make a DBA cry ;) |
358 | # | |
339 | 359 | return $outer_attrs; |
340 | 360 | } |
341 | 361 | |
342 | 362 | # |
343 | # I KNOW THIS SUCKS! GET SQLA2 OUT THE DOOR SO THIS CAN DIE! | |
344 | # | |
345 | # Due to a lack of SQLA2 we fall back to crude scans of all the | |
346 | # select/where/order/group attributes, in order to determine what | |
347 | # aliases are needed to fulfill the query. This information is used | |
348 | # throughout the code to prune unnecessary JOINs from the queries | |
349 | # in an attempt to reduce the execution time. | |
350 | # Although the method is pretty horrific, the worst thing that can | |
351 | # happen is for it to fail due to some scalar SQL, which in turn will | |
352 | # result in a vocal exception. | |
363 | # This is probably the ickiest, yet most relied upon part of the codebase: | |
364 | # this is the place where we take arbitrary SQL input and break it into its | |
365 | # constituent parts, making sure we know which *sources* are used in what | |
366 | # *capacity* ( selecting / restricting / grouping / ordering / joining, etc. ) | |
367 | # Although the method is pretty horrific, the worst thing that can happen is | |
368 | # for a classification failure, which in turn will result in a vocal exception, | |
369 | # and will lead to a relatively prompt fix. | |
370 | # The code has been slowly improving and is covered with a formiddable battery | |
371 | # of tests, so can be considered "reliably stable" at this point (Oct 2015). | |
372 | # | |
373 | # A note to implementors attempting to "replace" this - keep in mind that while | |
374 | # there are multiple optimization avenues, the actual "scan literal elements" | |
375 | # part *MAY NEVER BE REMOVED*, even if in the future it is limited to only AST | |
376 | # nodes that are deemed opaque (i.e. contain literal expressions). The use and | |
377 | # comprehension of blackbox literals is at this point firmly a user-facing API, | |
378 | # and is one of *the* reasons DBIC remains as flexible as it is. | |
379 | # | |
380 | # In other words, when working on this keep in mind that the following is both | |
381 | # a widespread and *encouraged* way of using DBIC in the wild when push comes | |
382 | # to shove: | |
383 | # | |
384 | # $rs->search( {}, { | |
385 | # select => \[ $random, @stuff], | |
386 | # from => \[ $random, @stuff ], | |
387 | # where => \[ $random, @stuff ], | |
388 | # group_by => \[ $random, @stuff ], | |
389 | # order_by => \[ $random, @stuff ], | |
390 | # } ) | |
391 | # | |
392 | # Various incarnations of the above are reflected in many of the tests. If one | |
393 | # gets to fail, or if a user complains: you get to fix it. A stance amounting | |
394 | # to "this is crazy, nobody does that" is not acceptable going forward. | |
395 | # | |
353 | 396 | sub _resolve_aliastypes_from_select_args { |
354 | 397 | my ( $self, $attrs ) = @_; |
355 | 398 | |
386 | 429 | # get a column to source/alias map (including unambiguous unqualified ones) |
387 | 430 | my $colinfo = $self->_resolve_column_info ($attrs->{from}); |
388 | 431 | |
389 | # set up a botched SQLA | |
432 | # set up a botched SQLMaker | |
390 | 433 | my $sql_maker = $self->sql_maker; |
391 | 434 | |
392 | 435 | # these are throw away results, do not pollute the bind stack |
979 | 1022 | ]) ? $colinfos_to_return : (); |
980 | 1023 | } |
981 | 1024 | |
982 | # Attempts to flatten a passed in SQLA condition as much as possible towards | |
1025 | # Attempts to flatten a passed in SQLAC condition as much as possible towards | |
983 | 1026 | # a plain hashref, *without* altering its semantics. Required by |
984 | 1027 | # create/populate being able to extract definitive conditions from preexisting |
985 | 1028 | # resultset {where} stacks |
986 | 1029 | # |
987 | 1030 | # FIXME - while relatively robust, this is still imperfect, one of the first |
988 | # things to tackle with DQ | |
1031 | # things to tackle when we get access to a formalized AST. Note that this code | |
1032 | # is covered by a *ridiculous* amount of tests, so starting with porting this | |
1033 | # code would be a rather good exercise | |
989 | 1034 | sub _collapse_cond { |
990 | 1035 | my ($self, $where, $where_is_anded_array) = @_; |
991 | 1036 | |
1005 | 1050 | if (ref $chunk eq 'HASH') { |
1006 | 1051 | for (sort keys %$chunk) { |
1007 | 1052 | |
1008 | # Match SQLA 1.79 behavior | |
1053 | # Match SQLAC 1.79 behavior | |
1009 | 1054 | if ($_ eq '') { |
1010 | 1055 | is_literal_value($chunk->{$_}) |
1011 | 1056 | ? carp 'Hash-pairs consisting of an empty string with a literal are deprecated, use -and => [ $literal ] instead' |
1022 | 1067 | } |
1023 | 1068 | elsif ( ! length ref $chunk) { |
1024 | 1069 | |
1025 | # Match SQLA 1.79 behavior | |
1070 | # Match SQLAC 1.79 behavior | |
1026 | 1071 | $self->throw_exception("Supplying an empty left hand side argument is not supported in array-pairs") |
1027 | 1072 | if $where_is_anded_array and (! defined $chunk or $chunk eq ''); |
1028 | 1073 | |
1078 | 1123 | |
1079 | 1124 | for (my $i = 0; $i <= $#$where; $i++ ) { |
1080 | 1125 | |
1081 | # Match SQLA 1.79 behavior | |
1126 | # Match SQLAC 1.79 behavior | |
1082 | 1127 | $self->throw_exception( |
1083 | 1128 | "Supplying an empty left hand side argument is not supported in array-pairs" |
1084 | 1129 | ) if (! defined $where->[$i] or ! length $where->[$i]); |
1232 | 1277 | } |
1233 | 1278 | else { |
1234 | 1279 | if (ref $rhs eq 'HASH' and ! keys %$rhs) { |
1235 | # FIXME - SQLA seems to be doing... nothing...? | |
1280 | # FIXME - SQLAC seems to be doing... nothing...? | |
1236 | 1281 | } |
1237 | 1282 | # normalize top level -ident, for saner extract_fixed_condition_columns code |
1238 | 1283 | elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-ident}) { |
1239 | 1284 | push @conds, { $lhs => { '=', $rhs } }; |
1240 | 1285 | } |
1241 | elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{-value} and is_plain_value $rhs->{-value}) { | |
1286 | # can't simply use is_plain_value result, as we need to | |
1287 | # preserve the -value marker where necessary (non-blessed ref) | |
1288 | elsif ( | |
1289 | ref $rhs eq 'HASH' | |
1290 | and | |
1291 | keys %$rhs == 1 | |
1292 | and | |
1293 | exists $rhs->{-value} | |
1294 | and | |
1295 | ( | |
1296 | ! length ref( $rhs->{-value} ) | |
1297 | or | |
1298 | ( | |
1299 | defined( blessed $rhs->{-value} ) | |
1300 | and | |
1301 | is_plain_value $rhs->{-value} | |
1302 | ) | |
1303 | ) | |
1304 | ) { | |
1242 | 1305 | push @conds, { $lhs => $rhs->{-value} }; |
1243 | 1306 | } |
1244 | 1307 | elsif (ref $rhs eq 'HASH' and keys %$rhs == 1 and exists $rhs->{'='}) { |
1303 | 1366 | } |
1304 | 1367 | } |
1305 | 1368 | # unroll func + { -value => ... } |
1369 | # can't simply use is_plain_value result, as we need to | |
1370 | # preserve the -value marker where necessary (non-blessed ref) | |
1306 | 1371 | elsif ( |
1307 | 1372 | ref $rhs eq 'HASH' |
1308 | 1373 | and |
1309 | 1374 | ( my ($subop) = keys %$rhs ) == 1 |
1310 | 1375 | and |
1311 | length ref ((values %$rhs)[0]) | |
1376 | ref( (values %$rhs)[0] ) eq 'HASH' | |
1312 | 1377 | and |
1313 | my $vref = is_plain_value( (values %$rhs)[0] ) | |
1378 | keys %{ (values %$rhs)[0] } == 1 | |
1379 | and | |
1380 | exists( (values %$rhs)[0]->{-value} ) | |
1381 | and | |
1382 | ( | |
1383 | ! length ref( (values %$rhs)[0]->{-value} ) | |
1384 | or | |
1385 | ( | |
1386 | defined( blessed( (values %$rhs)[0]->{-value} ) ) | |
1387 | and | |
1388 | is_plain_value( (values %$rhs)[0]->{-value} ) | |
1389 | ) | |
1390 | ) | |
1314 | 1391 | ) { |
1315 | push @conds, { $lhs => { $subop => $$vref } } | |
1392 | push @conds, { $lhs => { $subop => (values %$rhs)[0]->{-value} } }; | |
1316 | 1393 | } |
1317 | 1394 | else { |
1318 | 1395 | push @conds, { $lhs => $rhs }; |
0 | package DBIx::Class::Storage::Debug::PrettyTrace; | |
1 | ||
2 | use strict; | |
3 | use warnings; | |
4 | ||
5 | use base 'DBIx::Class::Storage::Statistics'; | |
6 | ||
7 | use SQL::Abstract::Tree; | |
8 | ||
9 | __PACKAGE__->mk_group_accessors( simple => '_sqlat' ); | |
10 | __PACKAGE__->mk_group_accessors( simple => '_clear_line_str' ); | |
11 | __PACKAGE__->mk_group_accessors( simple => '_executing_str' ); | |
12 | __PACKAGE__->mk_group_accessors( simple => '_show_progress' ); | |
13 | __PACKAGE__->mk_group_accessors( simple => '_last_sql' ); | |
14 | __PACKAGE__->mk_group_accessors( simple => 'squash_repeats' ); | |
15 | ||
16 | sub new { | |
17 | my $class = shift; | |
18 | my $args = shift; | |
19 | ||
20 | my $clear_line = $args->{clear_line} || "\r\x1b[J"; | |
21 | my $executing = $args->{executing} || ( | |
22 | eval { require Term::ANSIColor } ? do { | |
23 | my $c = \&Term::ANSIColor::color; | |
24 | $c->('blink white on_black') . 'EXECUTING...' . $c->('reset'); | |
25 | } : 'EXECUTING...' | |
26 | ); | |
27 | my $show_progress = $args->{show_progress}; | |
28 | ||
29 | my $squash_repeats = $args->{squash_repeats}; | |
30 | my $sqlat = SQL::Abstract::Tree->new($args); | |
31 | my $self = $class->next::method(@_); | |
32 | $self->_clear_line_str($clear_line); | |
33 | $self->_executing_str($executing); | |
34 | $self->_show_progress($show_progress); | |
35 | ||
36 | $self->squash_repeats($squash_repeats); | |
37 | ||
38 | $self->_sqlat($sqlat); | |
39 | $self->_last_sql(''); | |
40 | ||
41 | return $self | |
42 | } | |
43 | ||
44 | sub print { | |
45 | my $self = shift; | |
46 | my $string = shift; | |
47 | my $bindargs = shift || []; | |
48 | ||
49 | my ($lw, $lr); | |
50 | ($lw, $string, $lr) = $string =~ /^(\s*)(.+?)(\s*)$/s; | |
51 | ||
52 | local $self->_sqlat->{fill_in_placeholders} = 0 if defined $bindargs | |
53 | && defined $bindargs->[0] && $bindargs->[0] eq q('__BULK_INSERT__'); | |
54 | ||
55 | my $use_placeholders = !!$self->_sqlat->fill_in_placeholders; | |
56 | ||
57 | my $sqlat = $self->_sqlat; | |
58 | my $formatted; | |
59 | if ($self->squash_repeats && $self->_last_sql eq $string) { | |
60 | my ( $l, $r ) = @{ $sqlat->placeholder_surround }; | |
61 | $formatted = '... : ' . join(', ', map "$l$_$r", @$bindargs) | |
62 | } else { | |
63 | $self->_last_sql($string); | |
64 | $formatted = $sqlat->format($string, $bindargs); | |
65 | $formatted = "$formatted : " . join ', ', @{$bindargs} | |
66 | unless $use_placeholders; | |
67 | } | |
68 | ||
69 | $self->next::method("$lw$formatted$lr", @_); | |
70 | } | |
71 | ||
72 | sub query_start { | |
73 | my ($self, $string, @bind) = @_; | |
74 | ||
75 | if (defined $self->callback) { | |
76 | $string =~ m/^(\w+)/; | |
77 | $self->callback->($1, "$string: ".join(', ', @bind)."\n"); | |
78 | return; | |
79 | } | |
80 | ||
81 | $string =~ s/\s+$//; | |
82 | ||
83 | $self->print("$string\n", \@bind); | |
84 | ||
85 | $self->debugfh->print($self->_executing_str) if $self->_show_progress | |
86 | } | |
87 | ||
88 | sub query_end { | |
89 | $_[0]->debugfh->print($_[0]->_clear_line_str) if $_[0]->_show_progress | |
90 | } | |
91 | ||
92 | 1; | |
93 | ||
94 | =pod | |
95 | ||
96 | =head1 NAME | |
97 | ||
98 | DBIx::Class::Storage::Debug::PrettyTrace - Pretty Tracing DebugObj | |
99 | ||
100 | =head1 SYNOPSIS | |
101 | ||
102 | DBIC_TRACE_PROFILE=~/dbic.json perl -Ilib ./foo.pl | |
103 | ||
104 | Where dbic.json contains: | |
105 | ||
106 | { | |
107 | "profile":"console", | |
108 | "show_progress":1, | |
109 | "squash_repeats":1 | |
110 | } | |
111 | ||
112 | =head1 METHODS | |
113 | ||
114 | =head2 new | |
115 | ||
116 | my $pp = DBIx::Class::Storage::Debug::PrettyTrace->new({ | |
117 | show_progress => 1, # tries it's best to make it clear that a SQL | |
118 | # statement is still running | |
119 | executing => '...', # the string that is added to the end of SQL | |
120 | # if show_progress is on. You probably don't | |
121 | # need to set this | |
122 | clear_line => '<CR><ESC>[J', # the string used to erase the string added | |
123 | # to SQL if show_progress is on. Again, the | |
124 | # default is probably good enough. | |
125 | ||
126 | squash_repeats => 1, # set to true to make repeated SQL queries | |
127 | # be ellided and only show the new bind params | |
128 | # any other args are passed through directly to SQL::Abstract::Tree | |
129 | }); | |
130 | ||
131 | =head1 FURTHER QUESTIONS? | |
132 | ||
133 | Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. | |
134 | ||
135 | =head1 COPYRIGHT AND LICENSE | |
136 | ||
137 | This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> | |
138 | by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can | |
139 | redistribute it and/or modify it under the same terms as the | |
140 | L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>. | |
141 | ||
142 | =cut |
471 | 471 | |
472 | 472 | $self->{debugobj} ||= do { |
473 | 473 | if (my $profile = $ENV{DBIC_TRACE_PROFILE}) { |
474 | require DBIx::Class::Storage::Debug::PrettyPrint; | |
474 | require DBIx::Class::Storage::Debug::PrettyTrace; | |
475 | 475 | my @pp_args; |
476 | 476 | |
477 | 477 | if ($profile =~ /^\.?\//) { |
496 | 496 | # *without* throwing an exception |
497 | 497 | # This is a rather serious problem in the debug codepath |
498 | 498 | # Insulate the condition here with a try{} until a review of |
499 | # DBIx::Class::Storage::Debug::PrettyPrint takes place | |
499 | # DBIx::Class::Storage::Debug::PrettyTrace takes place | |
500 | 500 | # we do rethrow the error unconditionally, the only reason |
501 | 501 | # to try{} is to preserve the precise state of $@ (down |
502 | 502 | # to the scalar (if there is one) address level) |
504 | 504 | # Yes I am aware this is fragile and TxnScopeGuard needs |
505 | 505 | # a better fix. This is another yak to shave... :( |
506 | 506 | try { |
507 | DBIx::Class::Storage::Debug::PrettyPrint->new(@pp_args); | |
507 | DBIx::Class::Storage::Debug::PrettyTrace->new(@pp_args); | |
508 | 508 | } catch { |
509 | 509 | $self->throw_exception($_); |
510 | 510 | } |
631 | 631 | |
632 | 632 | =head2 DBIC_TRACE_PROFILE |
633 | 633 | |
634 | If C<DBIC_TRACE_PROFILE> is set, L<DBIx::Class::Storage::Debug::PrettyPrint> | |
634 | If C<DBIC_TRACE_PROFILE> is set, L<DBIx::Class::Storage::Debug::PrettyTrace> | |
635 | 635 | will be used to format the output from C<DBIC_TRACE>. The value it |
636 | 636 | is set to is the C<profile> that it will be used. If the value is a |
637 | 637 | filename the file is read with L<Config::Any> and the results are |
645 | 645 | =head1 SEE ALSO |
646 | 646 | |
647 | 647 | L<DBIx::Class::Storage::DBI> - reference storage implementation using |
648 | SQL::Abstract and DBI. | |
648 | DBI and a subclass of SQL::Abstract::Classic ( or similar ) | |
649 | 649 | |
650 | 650 | =head1 FURTHER QUESTIONS? |
651 | 651 |
63 | 63 | use Carp 'croak'; |
64 | 64 | use Storable 'nfreeze'; |
65 | 65 | use Scalar::Util qw(weaken blessed reftype refaddr); |
66 | use List::Util qw(first); | |
67 | 66 | use Sub::Quote qw(qsub quote_sub); |
68 | 67 | |
69 | 68 | use base 'Exporter'; |
71 | 70 | sigwarn_silencer modver_gt_or_eq modver_gt_or_eq_and_lt |
72 | 71 | fail_on_internal_wantarray fail_on_internal_call |
73 | 72 | refdesc refcount hrefaddr |
74 | scope_guard is_exception detected_reinvoked_destructor | |
73 | scope_guard is_exception detected_reinvoked_destructor emit_loud_diag | |
75 | 74 | quote_sub qsub perlstring serialize |
76 | 75 | UNRESOLVABLE_CONDITION |
77 | 76 | ); |
116 | 115 | local $Storable::canonical = 1; |
117 | 116 | nfreeze($_[0]); |
118 | 117 | } |
118 | ||
119 | ||
120 | my $seen_loud_screams; | |
121 | sub emit_loud_diag { | |
122 | my $args = { ref $_[0] eq 'HASH' ? %{$_[0]} : @_ }; | |
123 | ||
124 | unless ( defined $args->{msg} and length $args->{msg} ) { | |
125 | emit_loud_diag( | |
126 | msg => "No 'msg' value supplied to emit_loud_diag()" | |
127 | ); | |
128 | exit 70; | |
129 | } | |
130 | ||
131 | my $msg = "\n" . join( ': ', | |
132 | ( $0 eq '-e' ? () : $0 ), | |
133 | $args->{msg} | |
134 | ); | |
135 | ||
136 | # when we die - we usually want to keep doing it | |
137 | $args->{emit_dups} = !!$args->{confess} | |
138 | unless exists $args->{emit_dups}; | |
139 | ||
140 | local $Carp::CarpLevel = | |
141 | ( $args->{skip_frames} || 0 ) | |
142 | + | |
143 | $Carp::CarpLevel | |
144 | + | |
145 | # hide our own frame | |
146 | 1 | |
147 | ; | |
148 | ||
149 | my $longmess = Carp::longmess(); | |
150 | ||
151 | # different object references will thwart deduplication without this | |
152 | ( my $key = "${msg}\n${longmess}" ) =~ s/\b0x[0-9a-f]+\b/0x.../gi; | |
153 | ||
154 | return $seen_loud_screams->{$key} if | |
155 | $seen_loud_screams->{$key}++ | |
156 | and | |
157 | ! $args->{emit_dups} | |
158 | ; | |
159 | ||
160 | $msg .= $longmess | |
161 | unless $msg =~ /\n\z/; | |
162 | ||
163 | print STDERR "$msg\n" | |
164 | or | |
165 | print STDOUT "\n!!!STDERR ISN'T WRITABLE!!!:$msg\n"; | |
166 | ||
167 | return $seen_loud_screams->{$key} | |
168 | unless $args->{confess}; | |
169 | ||
170 | # increment *again*, because... Carp. | |
171 | $Carp::CarpLevel++; | |
172 | # not $msg - Carp will reapply the longmess on its own | |
173 | Carp::confess($args->{msg}); | |
174 | } | |
175 | ||
119 | 176 | |
120 | 177 | sub scope_guard (&) { |
121 | 178 | croak 'Calling scope_guard() in void context makes no sense' |
10 | 10 | # $VERSION declaration must stay up here, ahead of any other package |
11 | 11 | # declarations, as to not confuse various modules attempting to determine |
12 | 12 | # this ones version, whether that be s.c.o. or Module::Metadata, etc |
13 | $VERSION = '0.082841'; | |
13 | $VERSION = '0.082842'; | |
14 | ||
15 | { | |
16 | package | |
17 | DBIx::Class::_ENV_; | |
18 | ||
19 | require constant; | |
20 | constant->import( DEVREL => ( ($DBIx::Class::VERSION =~ /_/) ? 1 : 0 ) ); | |
21 | } | |
14 | 22 | |
15 | 23 | $VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases |
16 | 24 | |
110 | 118 | |
111 | 119 | =over |
112 | 120 | |
113 | =item * IRC: irc.perl.org#dbix-class | |
114 | ||
115 | =for html | |
116 | <a href="https://chat.mibbit.com/#dbix-class@irc.perl.org">(click for instant chatroom login)</a> | |
117 | ||
118 | =item * Mailing list: L<http://lists.scsys.co.uk/mailman/listinfo/dbix-class> | |
119 | ||
120 | =item * RT Bug Tracker: L<https://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class> | |
121 | ||
122 | =item * Twitter: L<https://www.twitter.com/dbix_class> | |
123 | ||
124 | =item * Web Site: L<http://www.dbix-class.org/> | |
121 | =item * RT Bug Tracker: L<https://rt.cpan.org/Public/Dist/Display.html?Name=DBIx-Class> | |
122 | ||
123 | =item * Email: L<mailto:bug-DBIx-Class@rt.cpan.org> | |
124 | ||
125 | =item * Twitter: L<https://twitter.com/intent/tweet?text=%40ribasushi%20%23DBIC> | |
125 | 126 | |
126 | 127 | =back |
127 | 128 | |
196 | 197 | # Create a result set to search for artists. |
197 | 198 | # This does not query the DB. |
198 | 199 | my $johns_rs = $schema->resultset('Artist')->search( |
199 | # Build your WHERE using an SQL::Abstract structure: | |
200 | # Build your WHERE using an SQL::Abstract::Classic-compatible structure: | |
200 | 201 | { name => { like => 'John%' } } |
201 | 202 | ); |
202 | 203 | |
288 | 289 | |
289 | 290 | =item * Current git repository: L<https://github.com/Perl5/DBIx-Class> |
290 | 291 | |
291 | =item * Travis-CI log: L<https://travis-ci.org/Perl5/DBIx-Class/branches> | |
292 | =item * Travis-CI log: L<https://travis-ci.com/github/Perl5/DBIx-Class/branches> | |
292 | 293 | |
293 | 294 | =back |
294 | 295 |
23 | 23 | |
24 | 24 | =over |
25 | 25 | |
26 | =item * IRC: irc.perl.org#dbix-class | |
27 | ||
28 | =for html | |
29 | <a href="https://chat.mibbit.com/#dbix-class@irc.perl.org">(click for instant chatroom login)</a> | |
30 | ||
31 | =item * Mailing list: L<http://lists.scsys.co.uk/mailman/listinfo/dbix-class> | |
32 | ||
33 | =item * RT Bug Tracker: L<https://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class> | |
34 | ||
35 | =item * Twitter: L<https://www.twitter.com/dbix_class> | |
36 | ||
37 | =item * Web Site: L<http://www.dbix-class.org/> | |
26 | =item * RT Bug Tracker: L<https://rt.cpan.org/Public/Dist/Display.html?Name=DBIx-Class> | |
27 | ||
28 | =item * Email: L<mailto:bug-DBIx-Class@rt.cpan.org> | |
29 | ||
30 | =item * Twitter: L<https://twitter.com/intent/tweet?text=%40ribasushi%20%23DBIC> | |
38 | 31 | |
39 | 32 | =back |
40 | 33 | |
109 | 102 | # Create a result set to search for artists. |
110 | 103 | # This does not query the DB. |
111 | 104 | my $johns_rs = $schema->resultset('Artist')->search( |
112 | # Build your WHERE using an SQL::Abstract structure: | |
105 | # Build your WHERE using an SQL::Abstract::Classic-compatible structure: | |
113 | 106 | { name => { like => 'John%' } } |
114 | 107 | ); |
115 | 108 | |
201 | 194 | |
202 | 195 | =item * Current git repository: L<https://github.com/Perl5/DBIx-Class> |
203 | 196 | |
204 | =item * Travis-CI log: L<https://travis-ci.org/Perl5/DBIx-Class/branches> | |
197 | =item * Travis-CI log: L<https://travis-ci.com/github/Perl5/DBIx-Class/branches> | |
205 | 198 | |
206 | 199 | =back |
207 | 200 | |
215 | 208 | the seemingly most insignificant questions and suggestions have been shown |
216 | 209 | to catalyze monumental improvements in consistency, accuracy and performance. |
217 | 210 | |
218 | List of the awesome contributors who made DBIC v0.082841 possible | |
211 | List of the awesome contributors who made DBIC v0.082842 possible | |
219 | 212 | |
220 | 213 | =encoding utf8 |
221 | 214 | |
225 | 218 | |
226 | 219 | B<acca>: Alexander Kuznetsov <acca@cpan.org> |
227 | 220 | |
221 | B<acme>: Leon Brocard <acme@astray.com> | |
222 | ||
228 | 223 | B<aherzog>: Adam Herzog <adam@herzogdesigns.com> |
229 | 224 | |
230 | 225 | Alexander Keusch <cpan@keusch.at> |
626 | 621 | B<wintermute>: Toby Corkindale <tjc@cpan.org> |
627 | 622 | |
628 | 623 | B<wreis>: Wallace Reis <wreis@cpan.org> |
624 | ||
625 | x86-64 <x86mail@gmail.com> | |
629 | 626 | |
630 | 627 | B<xenoterracide>: Caleb Cushing <xenoterracide@gmail.com> |
631 | 628 |
38 | 38 | |
39 | 39 | # misc resources |
40 | 40 | abstract_from 'lib/DBIx/Class.pm'; |
41 | resources 'IRC' => 'irc://irc.perl.org/#dbix-class'; | |
42 | 41 | resources 'repository' => 'https://github.com/Perl5/DBIx-Class'; |
43 | resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class'; | |
44 | resources 'bugtracker' => 'http://rt.cpan.org/NoAuth/Bugs.html?Dist=DBIx-Class'; | |
42 | resources 'bugtracker' => 'https://rt.cpan.org/Public/Dist/Display.html?Name=DBIx-Class'; | |
45 | 43 | |
46 | 44 | # nothing determined at runtime, except for possibly SQLT dep |
47 | 45 | # (see the check around DBICTEST_SQLT_DEPLOY in Makefile.PL) |
59 | 57 | DBIx::Class::Storage::BlockRunner |
60 | 58 | DBIx::Class::Carp |
61 | 59 | DBIx::Class::_Util |
62 | DBIx::Class::ResultSet::Pager | |
63 | 60 | /); |
64 | 61 | |
65 | 62 | # keep the Makefile.PL eval happy |
2 | 2 | use strict; |
3 | 3 | use warnings; |
4 | 4 | |
5 | use Module::Runtime 'use_module'; | |
6 | 5 | use SQL::Translator; |
7 | 6 | use Path::Class 'file'; |
8 | 7 | use Getopt::Long; |
26 | 25 | if @{$args->{'deploy-to'}||[]} > 1; |
27 | 26 | |
28 | 27 | local $ENV{DBI_DSN}; |
29 | my $schema = use_module( $args->{'schema-class'}[0] )->connect( | |
28 | eval "require $args->{'schema-class'}[0]" || die $@; | |
29 | my $schema = $args->{'schema-class'}[0]->connect( | |
30 | 30 | $args->{'deploy-to'} |
31 | 31 | ? ( "DBI:SQLite:$args->{'deploy-to'}[0]", undef, undef, { on_connect_do => "PRAGMA synchronous = OFF" } ) |
32 | 32 | : () |
0 | #!/bin/bash | |
1 | ||
2 | set -e | |
3 | ||
4 | [[ -e Makefile.PL ]] || ( echo "Not in the right dir" && exit 1 ) | |
5 | ||
6 | clear | |
7 | echo | |
8 | ||
9 | export TRAVIS=true | |
10 | export TRAVIS_REPO_SLUG="x/DBIx-Class" | |
11 | export DBI_DSN="dbi:ODBC:server=NonexistentServerAddress" | |
12 | export DBI_DRIVER="ADO" | |
13 | ||
14 | toggle_booleans=( \ | |
15 | $( grep -ohP '\bDBIC_[0-9_A-Z]+' -r lib/ --exclude-dir Optional | sort -u | grep -vP '^(DBIC_TRACE(_PROFILE)?|DBIC_.+_DEBUG)$' ) \ | |
16 | DBIC_SHUFFLE_UNORDERED_RESULTSETS \ | |
17 | DBICTEST_ASSERT_NO_SPURIOUS_EXCEPTION_ACTION \ | |
18 | DBICTEST_RUN_ALL_TESTS \ | |
19 | DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER \ | |
20 | ) | |
21 | ||
22 | for var in "${toggle_booleans[@]}" | |
23 | do | |
24 | if [[ -z "${!var}" ]] ; then | |
25 | export $var=1 | |
26 | echo -n "$var " | |
27 | fi | |
28 | done | |
29 | echo -e "\n\n^^ variables above **automatically** set to '1'" | |
30 | ||
31 | provecmd="nice prove -QlrswTj10" | |
32 | ||
33 | echo -e " | |
34 | Executing \`$provecmd $@\` via $(which perl) within the following environment: | |
35 | ||
36 | $(env | grep -P 'TEST|HARNESS|MAKE|TRAVIS|PERL|DBIC|PATH|SHELL' | LC_ALL=C sort | cat -v) | |
37 | " | |
38 | ||
39 | $provecmd "$@" |
0 | #!/usr/bin/env perl | |
1 | ||
2 | use warnings; | |
3 | use strict; | |
4 | ||
5 | use HTTP::Tiny; | |
6 | use JSON::PP; | |
7 | ||
8 | ( my $build_id = $ARGV[0]||'' ) =~ /^[0-9]+$/ | |
9 | or die "Expecting a numeric build id as argument\n"; | |
10 | ||
11 | my $base_url = "http://api.travis-ci.com/build/$build_id?include=build.jobs"; | |
12 | print "Retrieving $base_url\n"; | |
13 | ||
14 | my $resp = ( | |
15 | my $ua = HTTP::Tiny->new( default_headers => { 'Travis-API-Version' => 3 } ) | |
16 | )->get( $base_url ); | |
17 | ||
18 | die "Unable to retrieve $resp->{url}: $resp->{status}\n$resp->{content}\n\n" | |
19 | unless $resp->{success}; | |
20 | ||
21 | my @jobs = ( map | |
22 | { ( ($_->{id}||'') =~ /^([0-9]+)$/ ) ? [ $1 => $_->{number} ] : () } | |
23 | @{( eval { decode_json( $resp->{content} )->{jobs} } || [] )} | |
24 | ) or die "Unable to find any jobs:\n$resp->{content}\n\n"; | |
25 | ||
26 | my $dir = "TravisCI_build_$build_id"; | |
27 | ||
28 | mkdir $dir | |
29 | unless -d $dir; | |
30 | ||
31 | for my $job (@jobs) { | |
32 | my $log_url = "http://api.travis-ci.com/v3/job/$job->[0]/log.txt"; | |
33 | my $dest_fn = "$dir/job_$job->[1].$job->[0].log.gz"; | |
34 | ||
35 | print "Retrieving $log_url into $dest_fn\n"; | |
36 | ||
37 | $resp = $ua->mirror( $log_url, $dest_fn, { | |
38 | headers => { 'Accept-Encoding' => 'gzip' } | |
39 | }); | |
40 | warn "Error retrieving $resp->{url}: $resp->{status}\n$resp->{content}\n\n" | |
41 | unless $resp->{success}; | |
42 | } |
1 | 1 | use warnings; |
2 | 2 | |
3 | 3 | use Test::More; |
4 | use Test::Warn; | |
4 | 5 | use lib qw(t/lib); |
6 | ||
5 | 7 | use DBICTest; |
6 | 8 | my $schema = DBICTest->init_schema(); |
7 | ||
8 | plan tests => 19; | |
9 | 9 | |
10 | 10 | # select from a class with resultset_attributes |
11 | 11 | my $resultset = $schema->resultset('BooksInLibrary'); |
18 | 18 | |
19 | 19 | # and inserts? |
20 | 20 | my $see_spot; |
21 | $see_spot = eval { $owner->books->find_or_create({ title => "See Spot Run" }) }; | |
22 | if ($@) { print $@ } | |
23 | ok(!$@, 'find_or_create on resultset with attribute for non-existent entry did not throw'); | |
21 | $see_spot = eval { | |
22 | warnings_exist { | |
23 | $owner->books->find_or_create({ title => "See Spot Run" }) | |
24 | } qr/Missing value for primary key column 'id' on BooksInLibrary - perhaps you forgot to set its 'is_auto_increment'/; | |
25 | }; | |
26 | is ($@, '', 'find_or_create on resultset with attribute for non-existent entry did not throw'); | |
24 | 27 | ok(defined $see_spot, 'successfully did insert on resultset with attribute for non-existent entry'); |
25 | 28 | |
26 | 29 | my $see_spot_rs = $owner->books->search({ title => "See Spot Run" }); |
81 | 84 | ok( !$@, 'many_to_many set_$rel(\@objects) did not throw'); |
82 | 85 | is($pointy_objects->count, $pointy_count, 'many_to_many set_$rel($hash) count correct'); |
83 | 86 | is($round_objects->count, $round_count, 'many_to_many set_$rel($hash) other rel count correct'); |
87 | ||
88 | done_testing; |
303 | 303 | # Only do this when we do have the bits to look inside CVs properly, |
304 | 304 | # without it we are liable to pick up object defaults that are locked |
305 | 305 | # in method closures |
306 | if (DBICTest::Util::LeakTracer::CV_TRACING) { | |
306 | # | |
307 | # Some elaborate SQLAC-replacements leak, do not worry about it for now | |
308 | if ( | |
309 | DBICTest::Util::LeakTracer::CV_TRACING | |
310 | and | |
311 | ! $ENV{DBICTEST_SWAPOUT_SQLAC_WITH} | |
312 | ) { | |
307 | 313 | visit_refs( |
308 | 314 | refs => [ $base_collection ], |
309 | 315 | action => sub { |
454 | 460 | delete $weak_registry->{$addr} |
455 | 461 | unless $cleared->{bheos_pptiehinthashfieldhash}++; |
456 | 462 | } |
463 | elsif ($names =~ /^B::Hooks::EndOfScope::PP::HintHash::__GraveyardTransport/m) { | |
464 | # a workaround for perl-level double free: these "leak" by design | |
465 | delete $weak_registry->{$addr}; | |
466 | } | |
457 | 467 | elsif ($names =~ /^DateTime::TimeZone::UTC/m) { |
458 | 468 | # DT is going through a refactor it seems - let it leak zones for now |
459 | 469 | delete $weak_registry->{$addr}; |
3 | 3 | my ($initial_inc_contents, $expected_dbic_deps, $require_sites); |
4 | 4 | BEGIN { |
5 | 5 | # these envvars *will* bring in more stuff than the baseline |
6 | delete @ENV{qw(DBICTEST_SQLT_DEPLOY DBIC_TRACE)}; | |
6 | delete @ENV{qw( | |
7 | DBICTEST_SWAPOUT_SQLAC_WITH | |
8 | DBICTEST_SQLT_DEPLOY | |
9 | DBIC_TRACE | |
10 | )}; | |
7 | 11 | |
8 | 12 | # make sure extras do not load even when this is set |
9 | 13 | $ENV{PERL_STRICTURES_EXTRA} = 1; |
71 | 75 | if $ENV{PERL5OPT}; |
72 | 76 | |
73 | 77 | plan skip_all => 'Dependency load patterns are radically different before perl 5.10' |
74 | if $] < 5.010; | |
78 | if "$]" < 5.010; | |
79 | ||
80 | # these envvars *will* bring in more stuff than the baseline | |
81 | delete @ENV{qw( | |
82 | DBIC_TRACE | |
83 | DBIC_SHUFFLE_UNORDERED_RESULTSETS | |
84 | DBICTEST_SQLT_DEPLOY | |
85 | DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER | |
86 | DBICTEST_VIA_REPLICATED | |
87 | DBICTEST_DEBUG_CONCURRENCY_LOCKS | |
88 | )}; | |
89 | ||
90 | $ENV{DBICTEST_ANFANG_DEFANG} = 1; | |
91 | ||
92 | # make sure extras do not load even when this is set | |
93 | $ENV{PERL_STRICTURES_EXTRA} = 1; | |
75 | 94 | |
76 | 95 | # add what we loaded so far |
77 | 96 | for (keys %INC) { |
112 | 131 | |
113 | 132 | Hash::Merge |
114 | 133 | Scalar::Util |
115 | List::Util | |
116 | 134 | Storable |
117 | 135 | |
118 | 136 | Class::Accessor::Grouped |
119 | 137 | Class::C3::Componentised |
120 | SQL::Abstract | |
138 | SQL::Abstract::Util | |
121 | 139 | )); |
122 | 140 | |
123 | 141 | require DBICTest::Schema; |
143 | 161 | { |
144 | 162 | register_lazy_loadable_requires(qw( |
145 | 163 | DBI |
164 | SQL::Abstract::Classic | |
146 | 165 | )); |
147 | 166 | |
148 | 167 | my $s = DBICTest::Schema->connect('dbi:SQLite::memory:'); |
158 | 177 | my $art = $s->resultset('Artist')->create({ name => \[ '?' => 'foo'], rank => 42 }); |
159 | 178 | $art->discard_changes; |
160 | 179 | $art->update({ rank => 69, name => 'foo' }); |
180 | $s->resultset('Artist')->all; | |
161 | 181 | assert_no_missing_expected_requires(); |
162 | 182 | } |
163 | 183 | |
165 | 185 | { |
166 | 186 | local $ENV{DBICTEST_SQLITE_REVERSE_DEFAULT_ORDER}; |
167 | 187 | { |
168 | # in general we do not want DBICTest to load before sqla, but it is | |
188 | # in general we do not want DBICTest to load before sqlac, but it is | |
169 | 189 | # ok to cheat here |
170 | local $INC{'SQL/Abstract.pm'}; | |
190 | local $INC{'SQL/Abstract/Classic.pm'}; | |
171 | 191 | require DBICTest; |
172 | 192 | } |
173 | 193 | my $s = DBICTest->init_schema; |
103 | 103 | 'DBIx::Class::Admin::Descriptive', |
104 | 104 | 'DBIx::Class::Admin::Usage', |
105 | 105 | |
106 | # this subclass is expected to inherit whatever crap comes | |
107 | # from the parent | |
108 | 'DBIx::Class::ResultSet::Pager', | |
109 | ||
110 | 106 | # utility classes, not part of the inheritance chain |
111 | 107 | 'DBIx::Class::ResultSource::RowParser::Util', |
112 | 108 | 'DBIx::Class::_Util', |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | use lib qw(t/lib); | |
5 | use DBICTest; | |
6 | use Storable qw/dclone/; | |
7 | ||
8 | my $schema = DBICTest->init_schema(); | |
9 | ||
10 | is ($schema->resultset("CD")->count, 5, 'Initial count sanity check'); | |
11 | ||
12 | my $qcnt; | |
13 | $schema->storage->debugcb(sub { $qcnt++ }); | |
14 | $schema->storage->debug (1); | |
15 | ||
16 | my $rs = $schema->resultset("CD"); | |
17 | ||
18 | # first page | |
19 | $qcnt = 0; | |
20 | my $it = $rs->search( | |
21 | {}, | |
22 | { order_by => 'title', | |
23 | rows => 3, | |
24 | page => 1 } | |
25 | ); | |
26 | my $pager = $it->pager; | |
27 | is ($qcnt, 0, 'No queries on rs/pager creation'); | |
28 | ||
29 | is ($pager->entries_per_page, 3, 'Pager created with correct entries_per_page'); | |
30 | ok ($pager->current_page(-1), 'Set nonexistent page'); | |
31 | is ($pager->current_page, 1, 'Page set behaves correctly'); | |
32 | ok ($pager->current_page(2), 'Set 2nd page'); | |
33 | ||
34 | is ($qcnt, 0, 'No queries on total_count-independent methods'); | |
35 | ||
36 | is( $it->pager->entries_on_this_page, 2, "entries_on_this_page ok for page 2" ); | |
37 | ||
38 | is ($qcnt, 1, 'Count fired to get pager page entries'); | |
39 | ||
40 | $qcnt = 0; | |
41 | is ($pager->previous_page, 1, 'Correct previous_page'); | |
42 | is ($pager->next_page, undef, 'No more pages'); | |
43 | is ($qcnt, 0, 'No more counts - amount of entries cached in pager'); | |
44 | ||
45 | is( $it->count, 3, "count on paged rs ok" ); | |
46 | is ($qcnt, 1, 'An $rs->count still fires properly'); | |
47 | ||
48 | is( $it->next->title, "Caterwaulin' Blues", "iterator->next ok" ); | |
49 | ||
50 | $it->next; | |
51 | $it->next; | |
52 | ||
53 | is( $it->next, undef, "next past end of page ok" ); | |
54 | ||
55 | ||
56 | # second page, testing with array | |
57 | my @page2 = $rs->search( | |
58 | {}, | |
59 | { order_by => 'title', | |
60 | rows => 3, | |
61 | page => 2 } | |
62 | ); | |
63 | ||
64 | is( $page2[0]->title, "Generic Manufactured Singles", "second page first title ok" ); | |
65 | ||
66 | # page a standard resultset | |
67 | $it = $rs->search( | |
68 | {}, | |
69 | { order_by => 'title', | |
70 | rows => 3 } | |
71 | ); | |
72 | my $page = $it->page(2); | |
73 | ||
74 | is( $page->count, 2, "standard resultset paged rs count ok" ); | |
75 | ||
76 | is( $page->next->title, "Generic Manufactured Singles", "second page of standard resultset ok" ); | |
77 | ||
78 | ||
79 | # test software-based limit paging | |
80 | $it = $rs->search( | |
81 | {}, | |
82 | { order_by => 'title', | |
83 | rows => 3, | |
84 | page => 2, | |
85 | software_limit => 1 } | |
86 | ); | |
87 | is( $it->pager->entries_on_this_page, 2, "software entries_on_this_page ok" ); | |
88 | ||
89 | is( $it->pager->previous_page, 1, "software previous_page ok" ); | |
90 | ||
91 | is( $it->count, 2, "software count on paged rs ok" ); | |
92 | ||
93 | is( $it->next->title, "Generic Manufactured Singles", "software iterator->next ok" ); | |
94 | ||
95 | # test paging with chained searches | |
96 | $it = $rs->search( | |
97 | {}, | |
98 | { rows => 2, | |
99 | page => 2 } | |
100 | )->search( undef, { order_by => 'title' } ); | |
101 | ||
102 | is( $it->count, 2, "chained searches paging ok" ); | |
103 | ||
104 | # test page with offset | |
105 | $it = $rs->search({}, { | |
106 | rows => 2, | |
107 | page => 2, | |
108 | offset => 1, | |
109 | order_by => 'cdid' | |
110 | }); | |
111 | ||
112 | my $row = $rs->search({}, { | |
113 | order_by => 'cdid', | |
114 | offset => 3, | |
115 | rows => 1 | |
116 | })->single; | |
117 | ||
118 | is($row->cdid, $it->first->cdid, 'page with offset'); | |
119 | ||
120 | ||
121 | # test pager on non-title page behavior | |
122 | $qcnt = 0; | |
123 | $it = $rs->search({}, { rows => 3 })->page (2); | |
124 | ok ($it->pager); | |
125 | is ($qcnt, 0, 'No count on past-first-page pager instantiation'); | |
126 | ||
127 | is ($it->pager->current_page, 2, 'Page set properby by $rs'); | |
128 | is( $it->pager->total_entries, 5, 'total_entries correct' ); | |
129 | ||
130 | $rs->create ({ artist => 1, title => 'MOAR!', year => 2010 }); | |
131 | is( $it->count, 3, 'Dynamic count on filling up page' ); | |
132 | $rs->create ({ artist => 1, title => 'MOAR!!!', year => 2011 }); | |
133 | is( $it->count, 3, 'Count still correct (does not overflow' ); | |
134 | ||
135 | $qcnt = 0; | |
136 | is( $it->pager->total_entries, 5, 'total_entries properly cached at old value' ); | |
137 | is ($qcnt, 0, 'No queries'); | |
138 | ||
139 | # test fresh pager with explicit total count assignment | |
140 | $qcnt = 0; | |
141 | $pager = $rs->search({}, { rows => 4 })->page (2)->pager; | |
142 | $pager->total_entries (13); | |
143 | ||
144 | is ($pager->current_page, 2, 'Correct start page'); | |
145 | is ($pager->next_page, 3, 'One more page'); | |
146 | is ($pager->last_page, 4, 'And one more page'); | |
147 | is ($pager->previous_page, 1, 'One page in front'); | |
148 | ||
149 | is ($qcnt, 0, 'No queries with explicitly sey total count'); | |
150 | ||
151 | # test cached resultsets | |
152 | my $init_cnt = $rs->count; | |
153 | ||
154 | $it = $rs->search({}, { rows => 3, cache => 1 })->page(2); | |
155 | is ($it->count, 3, '3 rows'); | |
156 | is (scalar $it->all, 3, '3 objects'); | |
157 | ||
158 | isa_ok($it->pager,'Data::Page','Get a pager back ok'); | |
159 | is($it->pager->total_entries,7); | |
160 | is($it->pager->current_page,2); | |
161 | is($it->pager->entries_on_this_page,3); | |
162 | ||
163 | $it = $it->page(3); | |
164 | is ($it->count, 1, 'One row'); | |
165 | is (scalar $it->all, 1, 'One object'); | |
166 | ||
167 | isa_ok($it->pager,'Data::Page','Get a pager back ok'); | |
168 | is($it->pager->total_entries,7); | |
169 | is($it->pager->current_page,3); | |
170 | is($it->pager->entries_on_this_page,1); | |
171 | ||
172 | ||
173 | $it->delete; | |
174 | is ($rs->count, $init_cnt - 1, 'One row deleted as expected'); | |
175 | ||
176 | is ($it->count, 1, 'One row (cached)'); | |
177 | is (scalar $it->all, 1, 'One object (cached)'); | |
178 | ||
179 | # test fresh rs creation with modified defaults | |
180 | my $p = sub { $schema->resultset('CD')->page(1)->pager->entries_per_page; }; | |
181 | ||
182 | is($p->(), 10, 'default rows is 10'); | |
183 | ||
184 | $schema->default_resultset_attributes({ rows => 5 }); | |
185 | ||
186 | is($p->(), 5, 'default rows is 5'); | |
187 | ||
188 | # does serialization work (preserve laziness, while preserving state if exits) | |
189 | $qcnt = 0; | |
190 | $it = $rs->search( | |
191 | {}, | |
192 | { order_by => 'title', | |
193 | rows => 5, | |
194 | page => 2 } | |
195 | ); | |
196 | $pager = $it->pager; | |
197 | is ($qcnt, 0, 'No queries on rs/pager creation'); | |
198 | ||
199 | $it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) }; | |
200 | is ($qcnt, 0, 'No queries on rs/pager freeze/thaw'); | |
201 | ||
202 | is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2" ); | |
203 | ||
204 | is ($qcnt, 1, 'Count fired to get pager page entries'); | |
205 | ||
206 | $rs->create({ title => 'bah', artist => 1, year => 2011 }); | |
207 | ||
208 | $qcnt = 0; | |
209 | $it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) }; | |
210 | is ($qcnt, 0, 'No queries on rs/pager freeze/thaw'); | |
211 | ||
212 | is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2, even though underlying count changed" ); | |
213 | ||
214 | is ($qcnt, 0, 'No count fired on pre-existing total count'); | |
215 | ||
216 | done_testing; |
8 | 8 | use DBIx::Class::Optional::Dependencies (); |
9 | 9 | use lib qw(t/lib); |
10 | 10 | use DBICTest; |
11 | use SQL::Abstract 'is_literal_value'; | |
12 | use DBIx::Class::_Util 'is_exception'; | |
11 | use SQL::Abstract::Util 'is_literal_value'; | |
12 | use DBIx::Class::_Util qw( is_exception sigwarn_silencer ); | |
13 | 13 | |
14 | 14 | plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_pg') |
15 | 15 | unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_pg'); |
210 | 210 | __PACKAGE__->column_info_from_storage(1); |
211 | 211 | __PACKAGE__->set_primary_key('id'); |
212 | 212 | |
213 | # FIXME - for some reason column_info_from_storage does not properly find | |
214 | # the is_auto_increment setting... | |
215 | __PACKAGE__->column_info('id')->{is_auto_increment} = 1; | |
213 | 216 | } |
214 | 217 | SKIP: { |
215 | 218 | skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002; |
443 | 446 | $schema->source('CD')->name('dbic_t_schema.cd'); |
444 | 447 | $schema->source('Track')->name('dbic_t_schema.track'); |
445 | 448 | lives_ok { |
449 | ||
450 | # workaround for PG 9.5+, fix pending in mainline | |
451 | local $SIG{__WARN__} = sigwarn_silencer( qr/SET CONSTRAINTS can only be used in transaction blocks/ ); | |
452 | ||
446 | 453 | $schema->storage->with_deferred_fk_checks(sub { |
447 | 454 | $schema->resultset('Track')->create({ |
448 | 455 | trackid => 999, cd => 999, position => 1, title => 'deferred FK track' |
467 | 474 | # but it also should not warn |
468 | 475 | warnings_like { |
469 | 476 | |
470 | # workaround for PG 9.5, fix pending in mainline | |
471 | local $schema->storage->_dbh->{PrintWarn} = 0; | |
477 | # workaround for PG 9.5+, fix pending in mainline | |
478 | local $SIG{__WARN__} = sigwarn_silencer( qr/SET CONSTRAINTS can only be used in transaction blocks/ ); | |
472 | 479 | |
473 | 480 | eval { |
474 | 481 | $schema->storage->with_deferred_fk_checks(sub { |
280 | 280 | } |
281 | 281 | |
282 | 282 | # rel name over 30 char limit with user condition |
283 | # This requires walking the SQLA data structure. | |
283 | # This requires walking the WHERE data structure. | |
284 | 284 | { |
285 | 285 | $query = $schema->resultset('Artist')->search({ |
286 | 286 | 'cds_very_very_very_long_relationship_name.title' => 'EP C' |
497 | 497 | 'updated money value to NULL round-trip'; |
498 | 498 | } |
499 | 499 | } |
500 | ||
501 | # Test leakage of PK on implicit retrieval | |
502 | { | |
503 | ||
504 | my $next_owner = $schema->resultset('Owners')->get_column('id')->max + 1; | |
505 | my $next_book = $schema->resultset('BooksInLibrary')->get_column('id')->max + 1; | |
506 | ||
507 | cmp_ok( | |
508 | $next_owner, | |
509 | '!=', | |
510 | $next_book, | |
511 | 'Preexisting auto-inc PKs staggered' | |
512 | ); | |
513 | ||
514 | my $yet_another_owner = $schema->resultset('Owners')->create({ name => 'YAO' }); | |
515 | my $yet_another_book; | |
516 | warnings_exist { | |
517 | $yet_another_book = $yet_another_owner->create_related( books => { title => 'YAB' }) | |
518 | } qr/Missing value for primary key column 'id' on BooksInLibrary - perhaps you forgot to set its 'is_auto_increment'/; | |
519 | ||
520 | is( | |
521 | $yet_another_owner->id, | |
522 | $next_owner, | |
523 | 'Expected Owner id' | |
524 | ); | |
525 | ||
526 | is( | |
527 | $yet_another_book->id, | |
528 | $next_book, | |
529 | 'Expected Book id' | |
530 | ); | |
531 | } | |
500 | 532 | } |
501 | 533 | } |
502 | 534 |
40 | 40 | |
41 | 41 | my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/; |
42 | 42 | next unless $dsn; |
43 | ||
44 | ||
45 | # FIXME - work around https://github.com/google/sanitizers/issues/934 | |
46 | $prefix eq 'DBICTEST_FIREBIRD_ODBC' | |
47 | and | |
48 | $Config::Config{config_args} =~ m{fsanitize\=address} | |
49 | and | |
50 | skip( "ODBC Firebird driver doesn't yet work with ASAN: https://github.com/google/sanitizers/issues/934", 1 ); | |
51 | ||
43 | 52 | |
44 | 53 | skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1) |
45 | 54 | unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix}); |
14 | 14 | |
15 | 15 | __PACKAGE__->set_table('Movies'); |
16 | 16 | __PACKAGE__->columns(All => qw(id title)); |
17 | ||
18 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
19 | # (and the warning that goes with it) | |
20 | # This is the same behavior as it was pre 0.082900 | |
21 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
17 | 22 | |
18 | 23 | sub create_sql { |
19 | 24 | return qq{ |
12 | 12 | __PACKAGE__->columns(TEMP => qw/ nonpersistent /); |
13 | 13 | __PACKAGE__->add_constructor(salary_between => 'salary >= ? AND salary <= ?'); |
14 | 14 | |
15 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
16 | # (and the warning that goes with it) | |
17 | # This is the same behavior as it was pre 0.082900 | |
18 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
19 | ||
15 | 20 | sub mutator_name_for { "set_$_[1]" } |
16 | 21 | |
17 | 22 | sub create_sql { |
12 | 12 | __PACKAGE__->has_a( actor => 'Actor' ); |
13 | 13 | __PACKAGE__->has_a( alias => 'Actor' ); |
14 | 14 | |
15 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
16 | # (and the warning that goes with it) | |
17 | # This is the same behavior as it was pre 0.082900 | |
18 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
19 | ||
15 | 20 | sub create_sql { |
16 | 21 | return qq{ |
17 | 22 | id INTEGER PRIMARY KEY, |
17 | 17 | Class::DBI::Column->new('columnb' => {mutator => 'columnb_as_write'}), |
18 | 18 | )); |
19 | 19 | |
20 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
21 | # (and the warning that goes with it) | |
22 | # This is the same behavior as it was pre 0.082900 | |
23 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
24 | ||
20 | 25 | sub create_sql { |
21 | 26 | return qq{ |
22 | 27 | id INTEGER PRIMARY KEY, |
10 | 10 | __PACKAGE__->columns('Essential', qw( Title )); |
11 | 11 | __PACKAGE__->columns('Directors', qw( Director CoDirector )); |
12 | 12 | __PACKAGE__->columns('Other', qw( Rating NumExplodingSheep HasVomit )); |
13 | ||
14 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
15 | # (and the warning that goes with it) | |
16 | # This is the same behavior as it was pre 0.082900 | |
17 | __PACKAGE__->column_info('title')->{is_auto_increment} = 0; | |
13 | 18 | |
14 | 19 | sub create_sql { |
15 | 20 | return qq{ |
17 | 17 | __PACKAGE__->has_a( |
18 | 18 | update_datetime => 'MyDateStamp', |
19 | 19 | ); |
20 | ||
21 | ||
22 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
23 | # (and the warning that goes with it) | |
24 | # This is the same behavior as it was pre 0.082900 | |
25 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
20 | 26 | |
21 | 27 | sub create_sql { |
22 | 28 | # SQLite doesn't support Datetime datatypes. |
16 | 16 | deflate => 'mysql_datetime' |
17 | 17 | ); |
18 | 18 | |
19 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
20 | # (and the warning that goes with it) | |
21 | # This is the same behavior as it was pre 0.082900 | |
22 | __PACKAGE__->column_info('id')->{is_auto_increment} = 0; | |
23 | ||
19 | 24 | __PACKAGE__->add_trigger(before_create => \&set_dts); |
20 | 25 | __PACKAGE__->add_trigger(before_update => \&set_dts); |
21 | 26 |
14 | 14 | inflate => sub { Date::Simple->new(shift) }, |
15 | 15 | deflate => 'format', |
16 | 16 | ); |
17 | ||
18 | # Disables the implicit autoinc-on-non-supplied-pk behavior | |
19 | # (and the warning that goes with it) | |
20 | # This is the same behavior as it was pre 0.082900 | |
21 | __PACKAGE__->column_info('myid')->{is_auto_increment} = 0; | |
22 | ||
17 | 23 | #__PACKAGE__->find_column('tdate')->placeholder("IF(1, CURDATE(), ?)"); |
18 | 24 | |
19 | 25 | sub create_sql { |
4 | 4 | |
5 | 5 | use Test::More; |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 7 | |
9 | 8 | my ($ROWS, $OFFSET) = ( |
10 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
11 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
10 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
12 | 11 | ); |
13 | 12 | |
14 | 13 | my $schema = DBICTest->init_schema(); |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | use DBIx::Class::Storage::Debug::PrettyTrace; | |
5 | ||
6 | my $cap; | |
7 | open my $fh, '>', \$cap; | |
8 | ||
9 | my $pp = DBIx::Class::Storage::Debug::PrettyTrace->new({ | |
10 | profile => 'none', | |
11 | fill_in_placeholders => 1, | |
12 | placeholder_surround => [qw(' ')], | |
13 | show_progress => 0, | |
14 | }); | |
15 | ||
16 | $pp->debugfh($fh); | |
17 | ||
18 | $pp->query_start('INSERT INTO self_ref_alias (alias, self_ref) VALUES ( ?, ? )', qw('__BULK_INSERT__' '1')); | |
19 | is( | |
20 | $cap, | |
21 | qq{INSERT INTO self_ref_alias( alias, self_ref ) VALUES( ?, ? ) : '__BULK_INSERT__', '1'\n}, | |
22 | 'SQL Logged' | |
23 | ); | |
24 | ||
25 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | no warnings 'once'; | |
3 | ||
4 | use Test::More; | |
5 | use Test::Exception; | |
6 | use Try::Tiny; | |
7 | use File::Spec; | |
8 | use lib qw(t/lib); | |
9 | use DBICTest; | |
10 | use Path::Class qw/file/; | |
11 | ||
12 | # something deep in Path::Class - mainline ditched it altogether | |
13 | plan skip_all => "Test is finicky under -T before 5.10" | |
14 | if "$]" < 5.010 and ${^TAINT}; | |
15 | ||
16 | BEGIN { delete @ENV{qw(DBIC_TRACE DBIC_TRACE_PROFILE DBICTEST_SQLITE_USE_FILE)} } | |
17 | ||
18 | my $schema = DBICTest->init_schema(); | |
19 | ||
20 | my $lfn = file("t/var/sql-$$.log"); | |
21 | unlink $lfn or die $! | |
22 | if -e $lfn; | |
23 | ||
24 | # make sure we are testing the vanilla debugger and not ::PrettyTrace | |
25 | require DBIx::Class::Storage::Statistics; | |
26 | $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new); | |
27 | ||
28 | ok ( $schema->storage->debug(1), 'debug' ); | |
29 | $schema->storage->debugfh($lfn->openw); | |
30 | $schema->storage->debugfh->autoflush(1); | |
31 | $schema->resultset('CD')->count; | |
32 | ||
33 | my @loglines = $lfn->slurp; | |
34 | is (@loglines, 1, 'one line of log'); | |
35 | like($loglines[0], qr/^SELECT COUNT/, 'File log via debugfh success'); | |
36 | ||
37 | $schema->storage->debugfh(undef); | |
38 | ||
39 | { | |
40 | local $ENV{DBIC_TRACE} = "=$lfn"; | |
41 | unlink $lfn; | |
42 | ||
43 | $schema->resultset('CD')->count; | |
44 | ||
45 | my $schema2 = DBICTest->init_schema(no_deploy => 1); | |
46 | $schema2->storage->_do_query('SELECT 1'); # _do_query() logs via standard mechanisms | |
47 | ||
48 | my @loglines = $lfn->slurp; | |
49 | is(@loglines, 2, '2 lines of log'); | |
50 | like($loglines[0], qr/^SELECT COUNT/, 'Env log from schema1 success'); | |
51 | like($loglines[1], qr/^SELECT 1:/, 'Env log from schema2 success'); | |
52 | ||
53 | $schema->storage->debugobj->debugfh(undef) | |
54 | } | |
55 | ||
56 | END { | |
57 | unlink $lfn if $lfn; | |
58 | } | |
59 | ||
60 | open(STDERRCOPY, '>&STDERR'); | |
61 | ||
62 | my $exception_line_number; | |
63 | # STDERR will be closed, no T::B diag in blocks | |
64 | my $exception = try { | |
65 | close(STDERR); | |
66 | $exception_line_number = __LINE__ + 1; # important for test, do not reformat | |
67 | $schema->resultset('CD')->search({})->count; | |
68 | } catch { | |
69 | $_ | |
70 | } finally { | |
71 | # restore STDERR | |
72 | open(STDERR, '>&STDERRCOPY'); | |
73 | }; | |
74 | ||
75 | ok $exception =~ / | |
76 | \QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)\E | |
77 | .+ | |
78 | \Qat @{[__FILE__]} line $exception_line_number\E$ | |
79 | /xms | |
80 | or diag "Unexpected exception text:\n\n$exception\n"; | |
81 | ||
82 | my @warnings; | |
83 | $exception = try { | |
84 | local $SIG{__WARN__} = sub { push @warnings, @_ if $_[0] =~ /character/i }; | |
85 | close STDERR; | |
86 | open(STDERR, '>', File::Spec->devnull) or die $!; | |
87 | $schema->resultset('CD')->search({ title => "\x{1f4a9}" })->count; | |
88 | ''; | |
89 | } catch { | |
90 | $_; | |
91 | } finally { | |
92 | # restore STDERR | |
93 | close STDERR; | |
94 | open(STDERR, '>&STDERRCOPY'); | |
95 | }; | |
96 | ||
97 | die "How did that fail... $exception" | |
98 | if $exception; | |
99 | ||
100 | is_deeply(\@warnings, [], 'No warnings with unicode on STDERR'); | |
101 | ||
102 | # test debugcb and debugobj protocol | |
103 | { | |
104 | my $rs = $schema->resultset('CD')->search( { | |
105 | artist => 1, | |
106 | cdid => { -between => [ 1, 3 ] }, | |
107 | title => { '!=' => \[ '?', undef ] } | |
108 | }); | |
109 | ||
110 | my $sql_trace = 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( artist = ? AND ( cdid BETWEEN ? AND ? ) AND title != ? ) )'; | |
111 | my @bind_trace = qw( '1' '1' '3' NULL ); # quotes are in fact part of the trace </facepalm> | |
112 | ||
113 | ||
114 | my @args; | |
115 | $schema->storage->debugcb(sub { push @args, @_ } ); | |
116 | ||
117 | $rs->all; | |
118 | ||
119 | is_deeply( \@args, [ | |
120 | "SELECT", | |
121 | sprintf( "%s: %s\n", $sql_trace, join ', ', @bind_trace ), | |
122 | ]); | |
123 | ||
124 | { | |
125 | package DBICTest::DebugObj; | |
126 | our @ISA = 'DBIx::Class::Storage::Statistics'; | |
127 | ||
128 | sub query_start { | |
129 | my $self = shift; | |
130 | ( $self->{_traced_sql}, @{$self->{_traced_bind}} ) = @_; | |
131 | } | |
132 | } | |
133 | ||
134 | my $do = $schema->storage->debugobj(DBICTest::DebugObj->new); | |
135 | ||
136 | $rs->all; | |
137 | ||
138 | is( $do->{_traced_sql}, $sql_trace ); | |
139 | ||
140 | is_deeply ( $do->{_traced_bind}, \@bind_trace ); | |
141 | } | |
142 | ||
143 | # recreate test as seen in DBIx::Class::QueryLog | |
144 | # the rationale is that if someone uses a non-IO::Handle object | |
145 | # on CPAN, many are *bound* to use one on darkpan. Thus this | |
146 | # test to ensure there is no future silent breakage | |
147 | { | |
148 | my $output = ""; | |
149 | ||
150 | { | |
151 | package DBICTest::_Printable; | |
152 | ||
153 | sub print { | |
154 | my ($self, @args) = @_; | |
155 | $output .= join('', @args); | |
156 | } | |
157 | } | |
158 | ||
159 | $schema->storage->debugobj(undef); | |
160 | $schema->storage->debug(1); | |
161 | $schema->storage->debugfh( bless {}, "DBICTest::_Printable" ); | |
162 | $schema->storage->txn_do( sub { $schema->resultset('Artist')->count } ); | |
163 | ||
164 | like ( | |
165 | $output, | |
166 | qr/ | |
167 | \A | |
168 | ^ \QBEGIN WORK\E \s*? | |
169 | ^ \QSELECT COUNT( * ) FROM artist me:\E \s*? | |
170 | ^ \QCOMMIT\E \s*? | |
171 | \z | |
172 | /xm | |
173 | ); | |
174 | ||
175 | $schema->storage->debug(0); | |
176 | $schema->storage->debugfh(undef); | |
177 | } | |
178 | ||
179 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | ||
5 | use DBIx::Class::Storage::Debug::PrettyTrace; | |
6 | ||
7 | my $cap; | |
8 | open my $fh, '>', \$cap; | |
9 | ||
10 | my $pp = DBIx::Class::Storage::Debug::PrettyTrace->new({ | |
11 | profile => 'none', | |
12 | squash_repeats => 1, | |
13 | fill_in_placeholders => 1, | |
14 | placeholder_surround => ['', ''], | |
15 | show_progress => 0, | |
16 | }); | |
17 | ||
18 | $pp->debugfh($fh); | |
19 | ||
20 | $pp->query_start('SELECT * FROM frew WHERE id = ?', q('1')); | |
21 | is( | |
22 | $cap, | |
23 | qq(SELECT * FROM frew WHERE id = '1'\n), | |
24 | 'SQL Logged' | |
25 | ); | |
26 | ||
27 | open $fh, '>', \$cap; | |
28 | $pp->query_start('SELECT * FROM frew WHERE id = ?', q('2')); | |
29 | is( | |
30 | $cap, | |
31 | qq(... : '2'\n), | |
32 | 'Repeated SQL ellided' | |
33 | ); | |
34 | ||
35 | open $fh, '>', \$cap; | |
36 | $pp->query_start('SELECT * FROM frew WHERE id = ?', q('3')); | |
37 | is( | |
38 | $cap, | |
39 | qq(... : '3'\n), | |
40 | 'Repeated SQL ellided' | |
41 | ); | |
42 | ||
43 | open $fh, '>', \$cap; | |
44 | $pp->query_start('SELECT * FROM frew WHERE id = ?', q('4')); | |
45 | is( | |
46 | $cap, | |
47 | qq(... : '4'\n), | |
48 | 'Repeated SQL ellided' | |
49 | ); | |
50 | ||
51 | open $fh, '>', \$cap; | |
52 | $pp->query_start('SELECT * FROM bar WHERE id = ?', q('4')); | |
53 | is( | |
54 | $cap, | |
55 | qq(SELECT * FROM bar WHERE id = '4'\n), | |
56 | 'New SQL Logged' | |
57 | ); | |
58 | ||
59 | open $fh, '>', \$cap; | |
60 | $pp->query_start('SELECT * FROM frew WHERE id = ?', q('1')); | |
61 | is( | |
62 | $cap, | |
63 | qq(SELECT * FROM frew WHERE id = '1'\n), | |
64 | 'New SQL Logged' | |
65 | ); | |
66 | ||
67 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | use lib qw(t/lib); | |
3 | use DBICTest; | |
4 | use Test::More; | |
5 | ||
6 | BEGIN { | |
7 | require DBIx::Class; | |
8 | plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_prettydebug') | |
9 | unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_prettydebug'); | |
10 | } | |
11 | ||
12 | BEGIN { delete @ENV{qw(DBIC_TRACE_PROFILE)} } | |
13 | ||
14 | { | |
15 | my $schema = DBICTest->init_schema; | |
16 | ||
17 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Statistics'); | |
18 | } | |
19 | ||
20 | { | |
21 | local $ENV{DBIC_TRACE_PROFILE} = 'console'; | |
22 | ||
23 | my $schema = DBICTest->init_schema; | |
24 | ||
25 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Debug::PrettyTrace');; | |
26 | is($schema->storage->debugobj->_sqlat->indent_string, ' ', 'indent string set correctly from console profile'); | |
27 | } | |
28 | ||
29 | { | |
30 | local $ENV{DBIC_TRACE_PROFILE} = './t/lib/awesome.json'; | |
31 | ||
32 | my $schema = DBICTest->init_schema; | |
33 | ||
34 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Debug::PrettyTrace');; | |
35 | is($schema->storage->debugobj->_sqlat->indent_string, 'frioux', 'indent string set correctly from file-based profile'); | |
36 | } | |
37 | ||
38 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | use DBIx::Class::Storage::Debug::PrettyTrace; | |
5 | ||
6 | my $cap; | |
7 | open my $fh, '>', \$cap; | |
8 | ||
9 | my $pp = DBIx::Class::Storage::Debug::PrettyTrace->new({ | |
10 | show_progress => 1, | |
11 | clear_line => 'CLEAR', | |
12 | executing => 'GOGOGO', | |
13 | }); | |
14 | ||
15 | $pp->debugfh($fh); | |
16 | ||
17 | $pp->query_start('SELECT * FROM frew WHERE id = 1'); | |
18 | is( | |
19 | $cap, | |
20 | qq(SELECT * FROM frew WHERE id = 1 : \nGOGOGO), | |
21 | 'SQL Logged' | |
22 | ); | |
23 | $pp->query_end('SELECT * FROM frew WHERE id = 1'); | |
24 | is( | |
25 | $cap, | |
26 | qq(SELECT * FROM frew WHERE id = 1 : \nGOGOGOCLEAR), | |
27 | 'SQL Logged' | |
28 | ); | |
29 | ||
30 | done_testing; |
28 | 28 | for my $prefix (keys %$env2optdep) { SKIP: { |
29 | 29 | |
30 | 30 | my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/; |
31 | ||
32 | 31 | next unless $dsn; |
33 | 32 | |
34 | note "Testing with ${prefix}_DSN"; | |
33 | ||
34 | # FIXME - work around https://github.com/google/sanitizers/issues/934 | |
35 | $prefix eq 'DBICTEST_FIREBIRD_ODBC' | |
36 | and | |
37 | $Config::Config{config_args} =~ m{fsanitize\=address} | |
38 | and | |
39 | skip( "ODBC Firebird driver doesn't yet work with ASAN: https://github.com/google/sanitizers/issues/934", 1 ); | |
40 | ||
35 | 41 | |
36 | 42 | skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1) |
37 | 43 | unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix}); |
44 | ||
45 | note "Testing with ${prefix}_DSN"; | |
38 | 46 | |
39 | 47 | $schema = DBICTest::Schema->connect($dsn, $user, $pass, { |
40 | 48 | quote_char => '"', |
3 | 3 | use strict; |
4 | 4 | use warnings; |
5 | 5 | |
6 | use base qw(DBICTest::Base DBIx::Class::ResultSet); | |
6 | BEGIN { | |
7 | my @subclassing = qw(DBICTest::Base DBIx::Class::ResultSet); | |
8 | ||
9 | if( ! $ENV{DBICTEST_MOOIFIED_RESULTSETS} ) { | |
10 | # plain old vanilla base.pm | |
11 | require base; | |
12 | base->import(@subclassing); | |
13 | } | |
14 | else { | |
15 | # do a string eval to make sure Moo doesn't get confused | |
16 | require Carp; | |
17 | eval <<'EOM' | |
18 | ||
19 | ||
20 | use Moo; | |
21 | extends @subclassing; | |
22 | ||
23 | # ::RS::new() expects my ($class, $rsrc, $args) = @_ | |
24 | # Moo(se) expects a single hashref ( $args ), and makes it mandatory | |
25 | # | |
26 | # Ensure that unless we are called from a test - DBIC always fills it in | |
27 | sub BUILDARGS { | |
28 | if( | |
29 | ! defined $_[2] | |
30 | and | |
31 | # not a direct call from a test file | |
32 | (caller(1))[1] !~ m{ (?: ^ | \/ | \\ ) t [\/\\] .+ \.t $ }x | |
33 | ) { | |
34 | $Carp::CarpLevel += 2; | |
35 | Carp::confess( "...::ResultSet->new() called without supplying an ( empty ) hashref as argument: this fails with Moo(se) and incomplete BUILDARGS. Problematic stacktrace begins" ); | |
36 | } | |
37 | ||
38 | $_[2] || {}; | |
39 | } | |
40 | ||
41 | ||
42 | EOM | |
43 | ||
44 | } | |
45 | } | |
7 | 46 | |
8 | 47 | sub all_hri { |
9 | 48 | return [ shift->search ({}, { result_class => 'DBIx::Class::ResultClass::HashRefInflator' })->all ]; |
5 | 5 | use base qw(DBICTest::Base DBIx::Class::Schema); |
6 | 6 | |
7 | 7 | use Fcntl qw(:DEFAULT :seek :flock); |
8 | use Scalar::Util 'weaken'; | |
8 | 9 | use Time::HiRes 'sleep'; |
9 | 10 | use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry); |
10 | 11 | use DBICTest::Util qw( local_umask await_flock dbg DEBUG_TEST_CONCURRENCY_LOCKS ); |
109 | 110 | if ($locker->{lock_name} and ($ENV{DBICTEST_LOCK_HOLDER}||0) == $$) { |
110 | 111 | DEBUG_TEST_CONCURRENCY_LOCKS |
111 | 112 | and dbg "$locker->{type} LOCK RELEASED (END): $locker->{lock_name}"; |
113 | ||
114 | # we were using a lock-able RDBMS: if we are failing - dump the last diag | |
115 | if ( | |
116 | $locker->{rdbms_connection_diag} | |
117 | and | |
118 | $INC{'Test/Builder.pm'} | |
119 | and | |
120 | my $tb = do { | |
121 | local $@; | |
122 | my $t = eval { Test::Builder->new } | |
123 | or warn "Test::Builder->new failed:\n$@\n"; | |
124 | $t; | |
125 | } | |
126 | ) { | |
127 | $tb->diag( "\nabove test failure almost certainly happened against:\n$locker->{rdbms_connection_diag}" ) | |
128 | if ( | |
129 | !$tb->is_passing | |
130 | or | |
131 | !defined( $tb->has_plan ) | |
132 | or | |
133 | ( $tb->has_plan ne 'no_plan' and $tb->has_plan != $tb->current_test ) | |
134 | ) | |
135 | } | |
112 | 136 | } |
113 | 137 | } |
114 | 138 | |
115 | 139 | my $weak_registry = {}; |
116 | 140 | |
117 | 141 | sub connection { |
118 | my $self = shift->next::method(@_); | |
142 | my( $proto, @args ) = @_; | |
143 | ||
144 | if( $ENV{DBICTEST_SWAPOUT_SQLAC_WITH} ) { | |
145 | ||
146 | my( $sqlac_like ) = $ENV{DBICTEST_SWAPOUT_SQLAC_WITH} =~ /(.+)/; | |
147 | Class::C3::Componentised->ensure_class_loaded( $sqlac_like ); | |
148 | ||
149 | require DBIx::Class::SQLMaker::ClassicExtensions; | |
150 | require SQL::Abstract::Classic; | |
151 | ||
152 | Class::C3::Componentised->inject_base( | |
153 | 'DBICTest::SQLAC::SwapOut', | |
154 | 'DBIx::Class::SQLMaker::ClassicExtensions', | |
155 | $sqlac_like, | |
156 | 'SQL::Abstract::Classic', | |
157 | ); | |
158 | ||
159 | # perl can be pretty disgusting... | |
160 | push @args, {} | |
161 | unless ref( $args[-1] ) eq 'HASH'; | |
162 | ||
163 | $args[-1] = { %{ $args[-1] } }; | |
164 | ||
165 | if( ref( $args[-1]{on_connect_call} ) ne 'ARRAY' ) { | |
166 | $args[-1]{on_connect_call} = [ | |
167 | $args[-1]{on_connect_call} | |
168 | ? [ $args[-1]{on_connect_call} ] | |
169 | : () | |
170 | ]; | |
171 | } | |
172 | elsif( ref( $args[-1]{on_connect_call}[0] ) ne 'ARRAY' ) { | |
173 | $args[-1]{on_connect_call} = [ map | |
174 | { [ $_ ] } | |
175 | @{ $args[-1]{on_connect_call} } | |
176 | ]; | |
177 | } | |
178 | ||
179 | push @{ $args[-1]{on_connect_call} }, ( | |
180 | [ rebase_sqlmaker => 'DBICTest::SQLAC::SwapOut' ], | |
181 | ); | |
182 | } | |
183 | ||
184 | my $self = $proto->next::method( @args ); | |
119 | 185 | |
120 | 186 | # MASSIVE FIXME |
121 | 187 | # we can't really lock based on DSN, as we do not yet have a way to tell that e.g. |
144 | 210 | and |
145 | 211 | ( ! $ENV{DBICTEST_LOCK_HOLDER} or $ENV{DBICTEST_LOCK_HOLDER} == $$ ) |
146 | 212 | and |
147 | ref($_[0]) ne 'CODE' | |
213 | ref($args[0]) ne 'CODE' | |
148 | 214 | and |
149 | ($_[0]||'') !~ /^ (?i:dbi) \: SQLite (?: \: | \W ) .*? (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x | |
215 | ($args[0]||'') !~ /^ (?i:dbi) \: SQLite (?: \: | \W ) .*? (?: dbname\= )? (?: \:memory\: | t [\/\\] var [\/\\] DBIxClass\-) /x | |
150 | 216 | ) { |
151 | 217 | |
152 | 218 | my $locktype; |
242 | 308 | |
243 | 309 | my $cur_connect_call = $self->storage->on_connect_call; |
244 | 310 | |
311 | # without this weaken() the sub added below *sometimes* leaks | |
312 | # ( can't reproduce locally :/ ) | |
313 | weaken( my $wlocker = $locker ); | |
314 | ||
245 | 315 | $self->storage->on_connect_call([ |
246 | 316 | (ref $cur_connect_call eq 'ARRAY' |
247 | 317 | ? @$cur_connect_call |
248 | 318 | : ($cur_connect_call || ()) |
249 | 319 | ), |
250 | [sub { | |
251 | populate_weakregistry( $weak_registry, shift->_dbh ) | |
252 | }], | |
320 | [ sub { populate_weakregistry( $weak_registry, $_[0]->_dbh ) } ], | |
321 | ( !$wlocker ? () : ( | |
322 | require Data::Dumper::Concise | |
323 | and | |
324 | [ sub { ($wlocker||{})->{rdbms_connection_diag} = Data::Dumper::Concise::Dumper( $_[0]->_describe_connection() ) } ], | |
325 | )), | |
253 | 326 | ]); |
254 | 327 | } |
255 | 328 |
24 | 24 | &$ov; |
25 | 25 | }; |
26 | 26 | } |
27 | ||
28 | # our own test suite doesn't need to see this | |
29 | delete $ENV{DBICDEVREL_SWAPOUT_SQLAC_WITH}; | |
27 | 30 | } |
28 | 31 | |
29 | 32 | use Path::Class qw/file dir/; |
219 | 222 | return ( |
220 | 223 | ($ENV{TRAVIS}||'') eq 'true' |
221 | 224 | and |
222 | ($ENV{TRAVIS_REPO_SLUG}||'') =~ m|\w+/dbix-class$| | |
225 | ($ENV{TRAVIS_REPO_SLUG}||'') =~ m|\w+/DBIx-Class$| | |
223 | 226 | ) |
224 | 227 | } |
225 | 228 |
0 | package DBICTest::SQLMRebase; | |
1 | ||
2 | use warnings; | |
3 | use strict; | |
4 | ||
5 | our @ISA = qw( DBIx::Class::SQLMaker::ClassicExtensions SQL::Abstract::Classic ); | |
6 | ||
7 | __PACKAGE__->mk_group_accessors( simple => '__select_counter' ); | |
8 | ||
9 | sub select { | |
10 | $_[0]->{__select_counter}++; | |
11 | shift->next::method(@_); | |
12 | } | |
13 | ||
14 | 1; |
8 | 8 | __PACKAGE__->table('books'); |
9 | 9 | __PACKAGE__->add_columns( |
10 | 10 | 'id' => { |
11 | # part of a test (auto-retrieval of PK regardless of autoinc status) | |
12 | # DO NOT define | |
13 | #is_auto_increment => 1, | |
14 | ||
11 | 15 | data_type => 'integer', |
12 | is_auto_increment => 1, | |
13 | 16 | }, |
14 | 17 | 'source' => { |
15 | 18 | data_type => 'varchar', |
11 | 11 | 'id' => { |
12 | 12 | data_type => 'timestamp', |
13 | 13 | default_value => \'current_timestamp', |
14 | retrieve_on_insert => 1, | |
14 | 15 | }, |
15 | 16 | ); |
16 | 17 |
10 | 10 | use DBICTest::Util qw( stacktrace visit_namespaces ); |
11 | 11 | use constant { |
12 | 12 | CV_TRACING => !DBICTest::RunMode->is_plain && DBIx::Class::Optional::Dependencies->req_ok_for ('test_leaks_heavy'), |
13 | SKIP_SCALAR_REFS => ( "$]" < 5.008004 ), | |
13 | 14 | }; |
14 | 15 | |
15 | 16 | use base 'Exporter'; |
29 | 30 | |
30 | 31 | # a registry could be fed to itself or another registry via recursive sweeps |
31 | 32 | return $target if $reg_of_regs{$refaddr}; |
33 | ||
34 | return $target if SKIP_SCALAR_REFS and reftype($target) eq 'SCALAR'; | |
32 | 35 | |
33 | 36 | weaken( $reg_of_regs{ hrefaddr($weak_registry) } = $weak_registry ) |
34 | 37 | unless( $reg_of_regs{ hrefaddr($weak_registry) } ); |
180 | 183 | |
181 | 184 | } keys %{"${pkg}::"} ], |
182 | 185 | ) unless $pkg =~ /^ (?: |
183 | DB | next | B | .+? ::::ISA (?: ::CACHE ) | Class::C3 | |
186 | DB | next | B | .+? ::::ISA (?: ::CACHE ) | Class::C3 | B::Hooks::EndOfScope::PP::HintHash::.+ | |
184 | 187 | ) $/x; |
185 | 188 | } |
186 | 189 | ); |
101 | 101 | $global_exclusive_lock = 1; |
102 | 102 | } |
103 | 103 | elsif ($exp eq ':DiffSQL') { |
104 | require DBIx::Class::SQLMaker; | |
104 | 105 | require SQL::Abstract::Test; |
105 | 106 | my $into = caller(0); |
106 | 107 | for (qw(is_same_sql_bind is_same_sql is_same_bind)) { |
0 | use warnings; | |
1 | use strict; | |
2 | ||
3 | use Test::More; | |
4 | use Test::Exception; | |
5 | ||
6 | use DBIx::Class::ResultSet::Pager; | |
7 | ||
8 | my $page = DBIx::Class::ResultSet::Pager->new(7, 10, 12); | |
9 | isa_ok($page, 'DBIx::Class::ResultSet::Pager'); | |
10 | ||
11 | is($page->first_page, 1, "Adjusted to first possible page"); | |
12 | ||
13 | $page = DBIx::Class::ResultSet::Pager->new(0, 10, -1); | |
14 | isa_ok($page, 'DBIx::Class::ResultSet::Pager'); | |
15 | ||
16 | is($page->first_page, 1, "Adjusted to first possible page"); | |
17 | ||
18 | throws_ok { | |
19 | my $page = DBIx::Class::ResultSet::Pager->new(12, -1, 1); | |
20 | } | |
21 | qr/one entry per page/, "Can't have entries-per-page less than 1"; | |
22 | ||
23 | # The new empty constructor means we might be empty, let's check for sensible defaults | |
24 | $page = DBIx::Class::ResultSet::Pager->new; | |
25 | is($page->entries_per_page, 10); | |
26 | is($page->total_entries, 0); | |
27 | is($page->entries_on_this_page, 0); | |
28 | is($page->first_page, 1); | |
29 | is($page->last_page, 1); | |
30 | is($page->first, 0); | |
31 | is($page->last, 0); | |
32 | is($page->previous_page, undef); | |
33 | is($page->current_page, 1); | |
34 | is($page->next_page, undef); | |
35 | is($page->skipped, 0); | |
36 | my @integers = (0 .. 100); | |
37 | @integers = $page->splice(\@integers); | |
38 | my $integers = join ',', @integers; | |
39 | is($integers, ''); | |
40 | ||
41 | $page->current_page(undef); | |
42 | is($page->current_page, 1); | |
43 | ||
44 | $page->current_page(-5); | |
45 | is($page->current_page, 1); | |
46 | ||
47 | $page->current_page(5); | |
48 | is($page->current_page, 1); | |
49 | ||
50 | is_deeply( | |
51 | $page->total_entries(100), | |
52 | $page, | |
53 | "Set-chaining works on total_entries", | |
54 | ); | |
55 | ||
56 | is_deeply( | |
57 | $page->entries_per_page(20), | |
58 | $page, | |
59 | "Set-chaining works on entries_per_page", | |
60 | ); | |
61 | ||
62 | is_deeply( | |
63 | $page->current_page(2), | |
64 | $page, | |
65 | "Set-chaining works on current_page", | |
66 | ); | |
67 | ||
68 | ||
69 | is($page->first, 21); | |
70 | $page->current_page(3); | |
71 | is($page->first, 41); | |
72 | ||
73 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | ||
5 | use DBIx::Class::ResultSet::Pager; | |
6 | ||
7 | my $name; | |
8 | ||
9 | foreach my $line (<DATA>) { | |
10 | chomp $line; | |
11 | next unless $line; | |
12 | ||
13 | if ( $line =~ /^# ?(.+)/ ) { | |
14 | $name = $1; | |
15 | next; | |
16 | } | |
17 | ||
18 | print "Line is: $line\n"; | |
19 | my @vals = map { /^undef$/ ? undef : /^''$/ ? '' : $_ } split /\s+/, | |
20 | $line; | |
21 | ||
22 | my $page = DBIx::Class::ResultSet::Pager->new( @vals[ 0, 1, 2 ] ); | |
23 | print "Old style\n"; | |
24 | check( $page, $name, @vals ); | |
25 | ||
26 | $page = DBIx::Class::ResultSet::Pager->new(); | |
27 | $page->total_entries( $vals[0] ); | |
28 | $page->entries_per_page( $vals[1] ); | |
29 | $page->current_page( $vals[2] ); | |
30 | print "New style\n"; | |
31 | check( $page, $name, @vals ); | |
32 | } | |
33 | ||
34 | my $page = DBIx::Class::ResultSet::Pager->new( 0, 10 ); | |
35 | isa_ok( $page, 'DBIx::Class::ResultSet::Pager' ); | |
36 | my @empty; | |
37 | my @spliced = $page->splice( \@empty ); | |
38 | is( scalar(@spliced), 0, "Splice on empty is empty" ); | |
39 | ||
40 | sub check { | |
41 | my ( $page, $name, @vals ) = @_; | |
42 | isa_ok( $page, 'DBIx::Class::ResultSet::Pager' ); | |
43 | ||
44 | is( $page->first_page, $vals[3], "$name: first page" ); | |
45 | is( $page->last_page, $vals[4], "$name: last page" ); | |
46 | is( $page->first, $vals[5], "$name: first" ); | |
47 | is( $page->last, $vals[6], "$name: last" ); | |
48 | is( $page->previous_page, $vals[7], "$name: previous_page" ); | |
49 | is( $page->current_page, $vals[8], "$name: current_page" ); | |
50 | is( $page->next_page, $vals[9], "$name: next_page" ); | |
51 | ||
52 | my @integers = ( 0 .. $vals[0] - 1 ); | |
53 | @integers = $page->splice( \@integers ); | |
54 | my $integers = join ',', @integers; | |
55 | is( $integers, $vals[10], "$name: splice" ); | |
56 | is( $page->entries_on_this_page, $vals[11], | |
57 | "$name: entries_on_this_page" ); | |
58 | ||
59 | my $skipped = $vals[5] - 1; | |
60 | $skipped = 0 if $skipped < 0; | |
61 | is( $page->skipped, $skipped, "$name: skipped" ); | |
62 | $page->change_entries_per_page( $vals[12] ); | |
63 | is( $page->current_page, $vals[13], "$name: change_entries_per_page" ); | |
64 | } | |
65 | ||
66 | done_testing; | |
67 | ||
68 | # Format of test data: 0=number of entries, 1=entries per page, 2=current page, | |
69 | # 3=first page, 4=last page, 5=first entry on page, 6=last entry on page, | |
70 | # 7=previous page, 8=current page, 9=next page, 10=current entries, | |
71 | # 11=current number of entries, 12=new entries per page, 13=new page | |
72 | ||
73 | __DATA__ | |
74 | # Initial test | |
75 | 50 10 1 1 5 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 15 1 | |
76 | 50 10 2 1 5 11 20 1 2 3 10,11,12,13,14,15,16,17,18,19 10 15 1 | |
77 | 50 10 3 1 5 21 30 2 3 4 20,21,22,23,24,25,26,27,28,29 10 15 2 | |
78 | 50 10 4 1 5 31 40 3 4 5 30,31,32,33,34,35,36,37,38,39 10 15 3 | |
79 | 50 10 5 1 5 41 50 4 5 undef 40,41,42,43,44,45,46,47,48,49 10 15 3 | |
80 | ||
81 | # Under 10 | |
82 | 1 10 1 1 1 1 1 undef 1 undef 0 1 15 1 | |
83 | 2 10 1 1 1 1 2 undef 1 undef 0,1 2 15 1 | |
84 | 3 10 1 1 1 1 3 undef 1 undef 0,1,2 3 15 1 | |
85 | 4 10 1 1 1 1 4 undef 1 undef 0,1,2,3 4 15 1 | |
86 | 5 10 1 1 1 1 5 undef 1 undef 0,1,2,3,4 5 15 1 | |
87 | 6 10 1 1 1 1 6 undef 1 undef 0,1,2,3,4,5 6 15 1 | |
88 | 7 10 1 1 1 1 7 undef 1 undef 0,1,2,3,4,5,6 7 15 1 | |
89 | 8 10 1 1 1 1 8 undef 1 undef 0,1,2,3,4,5,6,7 8 15 1 | |
90 | 9 10 1 1 1 1 9 undef 1 undef 0,1,2,3,4,5,6,7,8 9 15 1 | |
91 | 10 10 1 1 1 1 10 undef 1 undef 0,1,2,3,4,5,6,7,8,9 10 15 1 | |
92 | ||
93 | # Over 10 | |
94 | 11 10 1 1 2 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 10 1 | |
95 | 11 10 2 1 2 11 11 1 2 undef 10 1 10 2 | |
96 | 12 10 1 1 2 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 10 1 | |
97 | 12 10 2 1 2 11 12 1 2 undef 10,11 2 10 2 | |
98 | 13 10 1 1 2 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 10 1 | |
99 | 13 10 2 1 2 11 13 1 2 undef 10,11,12 3 10 2 | |
100 | ||
101 | # Under 20 | |
102 | 19 10 1 1 2 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 4 1 | |
103 | 19 10 2 1 2 11 19 1 2 undef 10,11,12,13,14,15,16,17,18 9 4 3 | |
104 | 20 10 1 1 2 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 4 1 | |
105 | 20 10 2 1 2 11 20 1 2 undef 10,11,12,13,14,15,16,17,18,19 10 4 3 | |
106 | ||
107 | # Over 20 | |
108 | 21 10 1 1 3 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 19 1 | |
109 | 21 10 2 1 3 11 20 1 2 3 10,11,12,13,14,15,16,17,18,19 10 19 1 | |
110 | 21 10 3 1 3 21 21 2 3 undef 20 1 19 2 | |
111 | 22 10 1 1 3 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 19 1 | |
112 | 22 10 2 1 3 11 20 1 2 3 10,11,12,13,14,15,16,17,18,19 10 19 1 | |
113 | 22 10 3 1 3 21 22 2 3 undef 20,21 2 19 2 | |
114 | 23 10 1 1 3 1 10 undef 1 2 0,1,2,3,4,5,6,7,8,9 10 19 1 | |
115 | 23 10 2 1 3 11 20 1 2 3 10,11,12,13,14,15,16,17,18,19 10 19 1 | |
116 | 23 10 3 1 3 21 23 2 3 undef 20,21,22 3 19 2 | |
117 | ||
118 | # Zero test | |
119 | 0 10 1 1 1 0 0 undef 1 undef '' 0 5 1 |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | use lib qw(t/lib); | |
5 | use DBICTest; | |
6 | use Storable qw/dclone/; | |
7 | ||
8 | my $schema = DBICTest->init_schema(); | |
9 | ||
10 | is ($schema->resultset("CD")->count, 5, 'Initial count sanity check'); | |
11 | ||
12 | my $qcnt; | |
13 | $schema->storage->debugcb(sub { $qcnt++ }); | |
14 | $schema->storage->debug (1); | |
15 | ||
16 | my $rs = $schema->resultset("CD"); | |
17 | ||
18 | # first page | |
19 | $qcnt = 0; | |
20 | my $it = $rs->search( | |
21 | {}, | |
22 | { order_by => 'title', | |
23 | rows => 3, | |
24 | page => 1 } | |
25 | ); | |
26 | my $pager = $it->pager; | |
27 | is ($qcnt, 0, 'No queries on rs/pager creation'); | |
28 | ||
29 | is ($pager->entries_per_page, 3, 'Pager created with correct entries_per_page'); | |
30 | ok ($pager->current_page(-1), 'Set nonexistent page'); | |
31 | is ($pager->current_page, 1, 'Page set behaves correctly'); | |
32 | ok ($pager->current_page(2), 'Set 2nd page'); | |
33 | ||
34 | is ($qcnt, 0, 'No queries on total_count-independent methods'); | |
35 | ||
36 | is( $it->pager->entries_on_this_page, 2, "entries_on_this_page ok for page 2" ); | |
37 | ||
38 | is ($qcnt, 1, 'Count fired to get pager page entries'); | |
39 | ||
40 | $qcnt = 0; | |
41 | is ($pager->previous_page, 1, 'Correct previous_page'); | |
42 | is ($pager->next_page, undef, 'No more pages'); | |
43 | is ($qcnt, 0, 'No more counts - amount of entries cached in pager'); | |
44 | ||
45 | is( $it->count, 3, "count on paged rs ok" ); | |
46 | is ($qcnt, 1, 'An $rs->count still fires properly'); | |
47 | ||
48 | is( $it->next->title, "Caterwaulin' Blues", "iterator->next ok" ); | |
49 | ||
50 | $it->next; | |
51 | $it->next; | |
52 | ||
53 | is( $it->next, undef, "next past end of page ok" ); | |
54 | ||
55 | ||
56 | # second page, testing with array | |
57 | my @page2 = $rs->search( | |
58 | {}, | |
59 | { order_by => 'title', | |
60 | rows => 3, | |
61 | page => 2 } | |
62 | ); | |
63 | ||
64 | is( $page2[0]->title, "Generic Manufactured Singles", "second page first title ok" ); | |
65 | ||
66 | # page a standard resultset | |
67 | $it = $rs->search( | |
68 | {}, | |
69 | { order_by => 'title', | |
70 | rows => 3 } | |
71 | ); | |
72 | my $page = $it->page(2); | |
73 | ||
74 | is( $page->count, 2, "standard resultset paged rs count ok" ); | |
75 | ||
76 | is( $page->next->title, "Generic Manufactured Singles", "second page of standard resultset ok" ); | |
77 | ||
78 | ||
79 | # test software-based limit paging | |
80 | $it = $rs->search( | |
81 | {}, | |
82 | { order_by => 'title', | |
83 | rows => 3, | |
84 | page => 2, | |
85 | software_limit => 1 } | |
86 | ); | |
87 | is( $it->pager->entries_on_this_page, 2, "software entries_on_this_page ok" ); | |
88 | ||
89 | is( $it->pager->previous_page, 1, "software previous_page ok" ); | |
90 | ||
91 | is( $it->count, 2, "software count on paged rs ok" ); | |
92 | ||
93 | is( $it->next->title, "Generic Manufactured Singles", "software iterator->next ok" ); | |
94 | ||
95 | # test paging with chained searches | |
96 | $it = $rs->search( | |
97 | {}, | |
98 | { rows => 2, | |
99 | page => 2 } | |
100 | )->search( undef, { order_by => 'title' } ); | |
101 | ||
102 | is( $it->count, 2, "chained searches paging ok" ); | |
103 | ||
104 | # test page with offset | |
105 | $it = $rs->search({}, { | |
106 | rows => 2, | |
107 | page => 2, | |
108 | offset => 1, | |
109 | order_by => 'cdid' | |
110 | }); | |
111 | ||
112 | my $row = $rs->search({}, { | |
113 | order_by => 'cdid', | |
114 | offset => 3, | |
115 | rows => 1 | |
116 | })->single; | |
117 | ||
118 | is($row->cdid, $it->first->cdid, 'page with offset'); | |
119 | ||
120 | ||
121 | # test pager on non-title page behavior | |
122 | $qcnt = 0; | |
123 | $it = $rs->search({}, { rows => 3 })->page (2); | |
124 | ok ($it->pager); | |
125 | is ($qcnt, 0, 'No count on past-first-page pager instantiation'); | |
126 | ||
127 | is ($it->pager->current_page, 2, 'Page set properby by $rs'); | |
128 | is( $it->pager->total_entries, 5, 'total_entries correct' ); | |
129 | ||
130 | $rs->create ({ artist => 1, title => 'MOAR!', year => 2010 }); | |
131 | is( $it->count, 3, 'Dynamic count on filling up page' ); | |
132 | $rs->create ({ artist => 1, title => 'MOAR!!!', year => 2011 }); | |
133 | is( $it->count, 3, 'Count still correct (does not overflow' ); | |
134 | ||
135 | $qcnt = 0; | |
136 | is( $it->pager->total_entries, 5, 'total_entries properly cached at old value' ); | |
137 | is ($qcnt, 0, 'No queries'); | |
138 | ||
139 | # test fresh pager with explicit total count assignment | |
140 | $qcnt = 0; | |
141 | $pager = $rs->search({}, { rows => 4 })->page (2)->pager; | |
142 | $pager->total_entries (13); | |
143 | ||
144 | is ($pager->current_page, 2, 'Correct start page'); | |
145 | is ($pager->next_page, 3, 'One more page'); | |
146 | is ($pager->last_page, 4, 'And one more page'); | |
147 | is ($pager->previous_page, 1, 'One page in front'); | |
148 | ||
149 | is ($qcnt, 0, 'No queries with explicitly sey total count'); | |
150 | ||
151 | # test cached resultsets | |
152 | my $init_cnt = $rs->count; | |
153 | ||
154 | $it = $rs->search({}, { rows => 3, cache => 1 })->page(2); | |
155 | is ($it->count, 3, '3 rows'); | |
156 | is (scalar $it->all, 3, '3 objects'); | |
157 | ||
158 | isa_ok($it->pager,'DBIx::Class::ResultSet::Pager','Get a pager back ok'); | |
159 | is($it->pager->total_entries,7); | |
160 | is($it->pager->current_page,2); | |
161 | is($it->pager->entries_on_this_page,3); | |
162 | ||
163 | $it = $it->page(3); | |
164 | is ($it->count, 1, 'One row'); | |
165 | is (scalar $it->all, 1, 'One object'); | |
166 | ||
167 | isa_ok($it->pager,'DBIx::Class::ResultSet::Pager','Get a pager back ok'); | |
168 | is($it->pager->total_entries,7); | |
169 | is($it->pager->current_page,3); | |
170 | is($it->pager->entries_on_this_page,1); | |
171 | ||
172 | ||
173 | $it->delete; | |
174 | is ($rs->count, $init_cnt - 1, 'One row deleted as expected'); | |
175 | ||
176 | is ($it->count, 1, 'One row (cached)'); | |
177 | is (scalar $it->all, 1, 'One object (cached)'); | |
178 | ||
179 | # test fresh rs creation with modified defaults | |
180 | my $p = sub { $schema->resultset('CD')->page(1)->pager->entries_per_page; }; | |
181 | ||
182 | is($p->(), 10, 'default rows is 10'); | |
183 | ||
184 | $schema->default_resultset_attributes({ rows => 5 }); | |
185 | ||
186 | is($p->(), 5, 'default rows is 5'); | |
187 | ||
188 | # does serialization work (preserve laziness, while preserving state if exits) | |
189 | $qcnt = 0; | |
190 | $it = $rs->search( | |
191 | {}, | |
192 | { order_by => 'title', | |
193 | rows => 5, | |
194 | page => 2 } | |
195 | ); | |
196 | $pager = $it->pager; | |
197 | is ($qcnt, 0, 'No queries on rs/pager creation'); | |
198 | ||
199 | $it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) }; | |
200 | is ($qcnt, 0, 'No queries on rs/pager freeze/thaw'); | |
201 | ||
202 | is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2" ); | |
203 | ||
204 | is ($qcnt, 1, 'Count fired to get pager page entries'); | |
205 | ||
206 | $rs->create({ title => 'bah', artist => 1, year => 2011 }); | |
207 | ||
208 | $qcnt = 0; | |
209 | $it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) }; | |
210 | is ($qcnt, 0, 'No queries on rs/pager freeze/thaw'); | |
211 | ||
212 | is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2, even though underlying count changed" ); | |
213 | ||
214 | is ($qcnt, 0, 'No count fired on pre-existing total count'); | |
215 | ||
216 | done_testing; |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | ||
9 | my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; | |
10 | my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype; | |
7 | ||
8 | my $ROWS = DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype; | |
9 | my $OFFSET = DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype; | |
11 | 10 | |
12 | 11 | my $schema = DBICTest->init_schema(); |
13 | 12 |
3 | 3 | use Test::More; |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | use List::Util 'min'; | |
7 | ||
6 | 8 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 9 | |
9 | 10 | my ($ROWS, $OFFSET) = ( |
10 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
11 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
11 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
12 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
12 | 13 | ); |
13 | 14 | |
14 | 15 | my $schema = DBICTest->init_schema(quote_names => 1); |
79 | 80 | |
80 | 81 | my $rs = $filtered_cd_rs->search({}, { $limit ? (rows => $limit) : (), offset => $offset }); |
81 | 82 | |
82 | my $used_limit = $limit || DBIx::Class::SQLMaker->__max_int; | |
83 | my $used_limit = $limit || $schema->storage->sql_maker->__max_int; | |
83 | 84 | my $offset_str = $offset ? 'OFFSET ?' : ''; |
84 | 85 | |
85 | 86 | is_same_sql_bind( |
130 | 131 | |
131 | 132 | is_deeply( |
132 | 133 | $rs->all_hri, |
133 | [ @{$hri_contents}[$offset .. List::Util::min( $used_limit+$offset-1, $#$hri_contents)] ], | |
134 | [ @{$hri_contents}[$offset .. min( $used_limit+$offset-1, $#$hri_contents)] ], | |
134 | 135 | "Correct slice of the resultset returned with limit '$limit', offset '$offset'", |
135 | 136 | ); |
136 | 137 | } |
6 | 6 | use Test::Exception; |
7 | 7 | use lib qw(t/lib); |
8 | 8 | use DBICTest ':DiffSQL'; |
9 | use DBIx::Class::SQLMaker::LimitDialects; | |
10 | ||
11 | my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; | |
9 | ||
10 | my $ROWS = DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype; | |
12 | 11 | |
13 | 12 | my $schema = DBICTest->init_schema(); |
14 | 13 |
15 | 15 | DBICTest::Schema::CD->table('cd'); |
16 | 16 | } |
17 | 17 | |
18 | use DBIx::Class::_Util 'scope_guard'; | |
18 | 19 | use DBICTest; |
19 | 20 | |
20 | 21 | my $schema = DBICTest->init_schema; |
60 | 61 | }, { join => { fourkeys_to_twokeys => 'twokeys' }} |
61 | 62 | ); |
62 | 63 | |
64 | my $read_count_inc = 0; | |
65 | ||
63 | 66 | is ($fks->count, 4, 'Joined FourKey count correct (2x2)'); |
64 | 67 | $schema->is_executed_sql_bind( sub { |
65 | $fks->update ({ read_count => \ 'read_count + 1' }) | |
68 | $fks->update ({ read_count => \ 'read_count + 1' }); | |
69 | $read_count_inc++; | |
66 | 70 | }, [[ |
67 | 71 | 'UPDATE fourkeys |
68 | 72 | SET read_count = read_count + 1 |
72 | 76 | 'c', |
73 | 77 | ]], 'Correct update-SQL with multijoin with pruning' ); |
74 | 78 | |
75 | is ($fa->discard_changes->read_count, 11, 'Update ran only once on discard-join resultset'); | |
76 | is ($fb->discard_changes->read_count, 21, 'Update ran only once on discard-join resultset'); | |
79 | is ($fa->discard_changes->read_count, 10 + $read_count_inc, 'Update ran only once on discard-join resultset'); | |
80 | is ($fb->discard_changes->read_count, 20 + $read_count_inc, 'Update ran only once on discard-join resultset'); | |
77 | 81 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); |
78 | 82 | |
79 | 83 | # make the multi-join stick |
81 | 85 | { 'fourkeys_to_twokeys.pilot_sequence' => { '!=' => 666 } }, |
82 | 86 | { order_by => [ $fks->result_source->primary_columns ] }, |
83 | 87 | ); |
84 | $schema->is_executed_sql_bind( sub { | |
85 | $fks_multi->update ({ read_count => \ 'read_count + 1' }) | |
86 | }, [ | |
87 | [ 'BEGIN' ], | |
88 | [ | |
89 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
90 | FROM fourkeys me | |
91 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
92 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
93 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? | |
94 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
95 | ORDER BY foo, bar, hello, goodbye | |
96 | ', | |
97 | (1, 2) x 2, | |
98 | 666, | |
99 | (1, 2) x 2, | |
100 | 'c', | |
101 | ], | |
102 | [ | |
103 | 'UPDATE fourkeys | |
104 | SET read_count = read_count + 1 | |
105 | WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) | |
106 | ', | |
107 | ( (1) x 4, (2) x 4 ), | |
108 | ], | |
109 | [ 'COMMIT' ], | |
110 | ], 'Correct update-SQL with multijoin without pruning' ); | |
111 | ||
112 | is ($fa->discard_changes->read_count, 12, 'Update ran only once on joined resultset'); | |
113 | is ($fb->discard_changes->read_count, 22, 'Update ran only once on joined resultset'); | |
114 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); | |
115 | ||
116 | $schema->is_executed_sql_bind( sub { | |
117 | my $res = $fks_multi->search (\' "blah" = "bleh" ')->delete; | |
118 | ok ($res, 'operation is true'); | |
119 | cmp_ok ($res, '==', 0, 'zero rows affected'); | |
120 | }, [ | |
121 | [ 'BEGIN' ], | |
122 | [ | |
123 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
124 | FROM fourkeys me | |
125 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
126 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
127 | WHERE "blah" = "bleh" AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? | |
128 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
129 | ORDER BY foo, bar, hello, goodbye | |
130 | ', | |
131 | (1, 2) x 2, | |
132 | 666, | |
133 | (1, 2) x 2, | |
134 | 'c', | |
135 | ], | |
136 | [ 'COMMIT' ], | |
137 | ], 'Correct null-delete-SQL with multijoin without pruning' ); | |
138 | ||
139 | $schema->is_executed_sql_bind( sub { | |
140 | $fks->search({ 'twokeys.artist' => { '!=' => 666 } })->update({ read_count => \ 'read_count + 1' }); | |
141 | }, [ | |
142 | [ 'BEGIN' ], | |
143 | [ | |
144 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
145 | FROM fourkeys me | |
146 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
147 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
148 | LEFT JOIN twokeys twokeys | |
149 | ON twokeys.artist = fourkeys_to_twokeys.t_artist AND twokeys.cd = fourkeys_to_twokeys.t_cd | |
150 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? AND twokeys.artist != ? | |
151 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
152 | ', | |
153 | (1, 2) x 4, | |
154 | 'c', | |
155 | 666, | |
156 | ], | |
157 | [ | |
158 | 'UPDATE fourkeys | |
159 | SET read_count = read_count + 1 | |
160 | WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) | |
161 | ', | |
162 | ( (1) x 4, (2) x 4 ), | |
163 | ], | |
164 | [ 'COMMIT' ], | |
165 | ], 'Correct update-SQL with premultiplied restricting join without pruning' ); | |
166 | ||
167 | is ($fa->discard_changes->read_count, 13, 'Update ran only once on joined resultset'); | |
168 | is ($fb->discard_changes->read_count, 23, 'Update ran only once on joined resultset'); | |
169 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); | |
88 | ||
89 | # Versions of libsqlite before 3.14 do not support multicolumn-in | |
90 | # namely WHERE ( foo, bar ) IN ( SELECT foo, bar FROM ... ) | |
91 | # | |
92 | # Run both variants to ensure the SQL is correct, and also observe whether | |
93 | # the autodetection worked correctly for the current SQLite version | |
94 | { | |
95 | my $detected_can_mci = $schema->storage->_use_multicolumn_in ? 1 : 0; | |
96 | ||
97 | for my $force_use_mci (0, 1) { | |
98 | ||
99 | my $orig_use_mci = $schema->storage->_use_multicolumn_in; | |
100 | my $sg = scope_guard { | |
101 | $schema->storage->_use_multicolumn_in($orig_use_mci); | |
102 | }; | |
103 | $schema->storage->_use_multicolumn_in( $force_use_mci); | |
104 | ||
105 | $schema->is_executed_sql_bind( sub { | |
106 | my $executed = 0; | |
107 | eval { | |
108 | $fks_multi->update ({ read_count => \ 'read_count + 1' }); | |
109 | $executed = 1; | |
110 | $read_count_inc++; | |
111 | }; | |
112 | ||
113 | is( | |
114 | $executed, | |
115 | ( ( ! $detected_can_mci and $force_use_mci) ? 0 : 1 ), | |
116 | "Executed status as expected with multicolumn-in capability ($detected_can_mci) combined with forced-mci-use ($force_use_mci)" | |
117 | ); | |
118 | ||
119 | }, [ | |
120 | $force_use_mci | |
121 | ?( | |
122 | [ | |
123 | 'UPDATE fourkeys | |
124 | SET read_count = read_count + 1 | |
125 | WHERE | |
126 | (foo, bar, hello, goodbye) IN ( | |
127 | SELECT me.foo, me.bar, me.hello, me.goodbye | |
128 | FROM fourkeys me | |
129 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys ON | |
130 | fourkeys_to_twokeys.f_bar = me.bar | |
131 | AND fourkeys_to_twokeys.f_foo = me.foo | |
132 | AND fourkeys_to_twokeys.f_goodbye = me.goodbye | |
133 | AND fourkeys_to_twokeys.f_hello = me.hello | |
134 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? | |
135 | ORDER BY foo, bar, hello, goodbye | |
136 | ) | |
137 | ', | |
138 | ( 1, 2) x 2, | |
139 | 666, | |
140 | ( 1, 2) x 2, | |
141 | 'c', | |
142 | ] | |
143 | ) | |
144 | :( | |
145 | [ 'BEGIN' ], | |
146 | [ | |
147 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
148 | FROM fourkeys me | |
149 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
150 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
151 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? | |
152 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
153 | ORDER BY foo, bar, hello, goodbye | |
154 | ', | |
155 | (1, 2) x 2, | |
156 | 666, | |
157 | (1, 2) x 2, | |
158 | 'c', | |
159 | ], | |
160 | [ | |
161 | 'UPDATE fourkeys | |
162 | SET read_count = read_count + 1 | |
163 | WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) | |
164 | ', | |
165 | ( (1) x 4, (2) x 4 ), | |
166 | ], | |
167 | [ 'COMMIT' ], | |
168 | ) | |
169 | ], "Correct update-SQL with multijoin without pruning ( use_multicolumn_in forced to: $force_use_mci )" ); | |
170 | ||
171 | is ($fa->discard_changes->read_count, 10 + $read_count_inc, 'Update ran expected amount of times on joined resultset'); | |
172 | is ($fb->discard_changes->read_count, 20 + $read_count_inc, 'Update ran expected amount of times on joined resultset'); | |
173 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); | |
174 | ||
175 | $schema->is_executed_sql_bind( sub { | |
176 | my $executed = 0; | |
177 | eval { | |
178 | my $res = $fks_multi->search (\' "blah" = "bleh" ')->delete; | |
179 | $executed = 1; | |
180 | ok ($res, 'operation is true'); | |
181 | cmp_ok ($res, '==', 0, 'zero rows affected'); | |
182 | }; | |
183 | ||
184 | is( | |
185 | $executed, | |
186 | ( ( ! $detected_can_mci and $force_use_mci) ? 0 : 1 ), | |
187 | "Executed status as expected with multicolumn-in capability ($detected_can_mci) combined with forced-mci-use ($force_use_mci)" | |
188 | ); | |
189 | ||
190 | }, [ | |
191 | $force_use_mci | |
192 | ? ( | |
193 | [ | |
194 | 'DELETE FROM fourkeys | |
195 | WHERE ( foo, bar, hello, goodbye ) IN ( | |
196 | SELECT me.foo, me.bar, me.hello, me.goodbye | |
197 | FROM fourkeys me | |
198 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
199 | ON fourkeys_to_twokeys.f_bar = me.bar | |
200 | AND fourkeys_to_twokeys.f_foo = me.foo | |
201 | AND fourkeys_to_twokeys.f_goodbye = me.goodbye | |
202 | AND fourkeys_to_twokeys.f_hello = me.hello | |
203 | WHERE | |
204 | "blah" = "bleh" | |
205 | AND | |
206 | ( bar = ? OR bar = ? ) | |
207 | AND | |
208 | ( foo = ? OR foo = ? ) | |
209 | AND | |
210 | fourkeys_to_twokeys.pilot_sequence != ? | |
211 | AND | |
212 | ( goodbye = ? OR goodbye = ? ) | |
213 | AND | |
214 | ( hello = ? OR hello = ? ) | |
215 | AND | |
216 | sensors != ? | |
217 | ORDER BY foo, bar, hello, goodbye | |
218 | )', | |
219 | (1, 2) x 2, | |
220 | 666, | |
221 | (1, 2) x 2, | |
222 | 'c', | |
223 | ] | |
224 | ) | |
225 | : ( | |
226 | [ 'BEGIN' ], | |
227 | [ | |
228 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
229 | FROM fourkeys me | |
230 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
231 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
232 | WHERE "blah" = "bleh" AND ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND fourkeys_to_twokeys.pilot_sequence != ? AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? | |
233 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
234 | ORDER BY foo, bar, hello, goodbye | |
235 | ', | |
236 | (1, 2) x 2, | |
237 | 666, | |
238 | (1, 2) x 2, | |
239 | 'c', | |
240 | ], | |
241 | [ 'COMMIT' ], | |
242 | ) | |
243 | ], 'Correct null-delete-SQL with multijoin without pruning' ); | |
244 | ||
245 | is ($fa->discard_changes->read_count, 10 + $read_count_inc, 'Noop update did not touch anything'); | |
246 | is ($fb->discard_changes->read_count, 20 + $read_count_inc, 'Noop update did not touch anything'); | |
247 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); | |
248 | ||
249 | ||
250 | $schema->is_executed_sql_bind( sub { | |
251 | my $executed = 0; | |
252 | ||
253 | eval { | |
254 | $fks->search({ 'twokeys.artist' => { '!=' => 666 } })->update({ read_count => \ 'read_count + 1' }); | |
255 | $executed = 1; | |
256 | $read_count_inc++; | |
257 | }; | |
258 | ||
259 | is( | |
260 | $executed, | |
261 | ( ( ! $detected_can_mci and $force_use_mci) ? 0 : 1 ), | |
262 | "Executed status as expected with multicolumn-in capability ($detected_can_mci) combined with forced-mci-use ($force_use_mci)" | |
263 | ); | |
264 | }, [ | |
265 | $force_use_mci | |
266 | ? ( | |
267 | [ | |
268 | 'UPDATE fourkeys SET read_count = read_count + 1 | |
269 | WHERE ( foo, bar, hello, goodbye ) IN ( | |
270 | SELECT me.foo, me.bar, me.hello, me.goodbye | |
271 | FROM fourkeys me | |
272 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys ON | |
273 | fourkeys_to_twokeys.f_bar = me.bar | |
274 | AND fourkeys_to_twokeys.f_foo = me.foo | |
275 | AND fourkeys_to_twokeys.f_goodbye = me.goodbye | |
276 | AND fourkeys_to_twokeys.f_hello = me.hello | |
277 | LEFT JOIN twokeys twokeys | |
278 | ON twokeys.artist = fourkeys_to_twokeys.t_artist AND twokeys.cd = fourkeys_to_twokeys.t_cd | |
279 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? AND twokeys.artist != ? | |
280 | )', | |
281 | (1, 2) x 4, | |
282 | 'c', | |
283 | 666, | |
284 | ] | |
285 | ) | |
286 | : ( | |
287 | [ 'BEGIN' ], | |
288 | [ | |
289 | 'SELECT me.foo, me.bar, me.hello, me.goodbye | |
290 | FROM fourkeys me | |
291 | LEFT JOIN fourkeys_to_twokeys fourkeys_to_twokeys | |
292 | ON fourkeys_to_twokeys.f_bar = me.bar AND fourkeys_to_twokeys.f_foo = me.foo AND fourkeys_to_twokeys.f_goodbye = me.goodbye AND fourkeys_to_twokeys.f_hello = me.hello | |
293 | LEFT JOIN twokeys twokeys | |
294 | ON twokeys.artist = fourkeys_to_twokeys.t_artist AND twokeys.cd = fourkeys_to_twokeys.t_cd | |
295 | WHERE ( bar = ? OR bar = ? ) AND ( foo = ? OR foo = ? ) AND ( goodbye = ? OR goodbye = ? ) AND ( hello = ? OR hello = ? ) AND sensors != ? AND twokeys.artist != ? | |
296 | GROUP BY me.foo, me.bar, me.hello, me.goodbye | |
297 | ', | |
298 | (1, 2) x 4, | |
299 | 'c', | |
300 | 666, | |
301 | ], | |
302 | [ | |
303 | 'UPDATE fourkeys | |
304 | SET read_count = read_count + 1 | |
305 | WHERE ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) OR ( bar = ? AND foo = ? AND goodbye = ? AND hello = ? ) | |
306 | ', | |
307 | ( (1) x 4, (2) x 4 ), | |
308 | ], | |
309 | [ 'COMMIT' ], | |
310 | ) | |
311 | ], 'Correct update-SQL with premultiplied restricting join without pruning' ); | |
312 | ||
313 | is ($fa->discard_changes->read_count, 10 + $read_count_inc, 'Update ran expected amount of times on joined resultset'); | |
314 | is ($fb->discard_changes->read_count, 20 + $read_count_inc, 'Update ran expected amount of times on joined resultset'); | |
315 | is ($fc->discard_changes->read_count, 30, 'Update did not touch outlier'); | |
316 | } | |
317 | } | |
170 | 318 | |
171 | 319 | # |
172 | 320 | # Make sure multicolumn in or the equivalent functions correctly |
184 | 332 | |
185 | 333 | is ($sub_rs->count, 2, 'Only two rows from fourkeys match'); |
186 | 334 | |
187 | # attempts to delete a grouped rs should fail miserably | |
188 | throws_ok ( | |
189 | sub { $sub_rs->search ({}, { distinct => 1 })->delete }, | |
190 | qr/attempted a delete operation on a resultset which does group_by/, | |
191 | 'Grouped rs update/delete not allowed', | |
192 | ); | |
335 | # ensure we do not do something dumb on MCI-not-supporting engines | |
336 | { | |
337 | my $orig_use_mci = $schema->storage->_use_multicolumn_in; | |
338 | my $sg = scope_guard { | |
339 | $schema->storage->_use_multicolumn_in($orig_use_mci); | |
340 | }; | |
341 | $schema->storage->_use_multicolumn_in(0); | |
342 | ||
343 | # attempts to delete a global-grouped rs should fail miserably | |
344 | throws_ok ( | |
345 | sub { $sub_rs->search ({}, { distinct => 1 })->delete }, | |
346 | qr/attempted a delete operation on a resultset which does group_by on columns other than the primary keys/, | |
347 | 'Grouped rs update/delete not allowed', | |
348 | ); | |
349 | } | |
193 | 350 | |
194 | 351 | # grouping on PKs only should pass |
195 | 352 | $sub_rs->search ( |
17 | 17 | __PACKAGE__->table('users'); |
18 | 18 | |
19 | 19 | __PACKAGE__->add_columns( |
20 | qw/user_id email password | |
21 | firstname lastname active | |
22 | admin/ | |
20 | user_id => { retrieve_on_insert => 1 }, | |
21 | qw( email password firstname lastname active admin ), | |
23 | 22 | ); |
24 | 23 | |
25 | 24 | __PACKAGE__->set_primary_key('user_id'); |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 7 | |
9 | my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; | |
8 | my $ROWS = DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype; | |
10 | 9 | |
11 | 10 | my $schema = DBICTest->init_schema(); |
12 | 11 |
3 | 3 | use Test::More; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest ':DiffSQL'; |
6 | use SQL::Abstract qw(is_plain_value is_literal_value); | |
6 | use SQL::Abstract::Util qw(is_plain_value is_literal_value); | |
7 | 7 | use List::Util 'shuffle'; |
8 | 8 | use Data::Dumper; |
9 | 9 | $Data::Dumper::Terse = 1; |
27 | 27 | { cond => \[ '?', "foo" ], sql => '= ?', bind => [ |
28 | 28 | [ {} => 'foo' ], |
29 | 29 | [ {} => 'foo' ], |
30 | ]}, | |
31 | { cond => { '@>' => { -value => [ 1,2,3 ] } }, sql => '@> ?', bind => [ | |
32 | [ { dbic_colname => "title", sqlt_datatype => "varchar", sqlt_size => 100 } => [1, 2, 3] ], | |
33 | [ { dbic_colname => "year", sqlt_datatype => "varchar", sqlt_size => 100 } => [1, 2, 3] ], | |
30 | 34 | ]}, |
31 | 35 | ) { |
32 | 36 | my $rs = $schema->resultset('CD')->search({}, { columns => 'title' }); |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 7 | use DBIx::Class::_Util 'sigwarn_silencer'; |
9 | 8 | |
10 | my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; | |
9 | my $ROWS = DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype; | |
11 | 10 | |
12 | 11 | my $schema = DBICTest->init_schema(); |
13 | 12 | my $art_rs = $schema->resultset('Artist'); |
164 | 163 | for my $i (0 .. $#tests) { |
165 | 164 | my $t = $tests[$i]; |
166 | 165 | for my $p (1, 2) { # repeat everything twice, make sure we do not clobber search arguments |
167 | local $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ ); | |
166 | local $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract::Classic syntax are deprecated/ ); | |
168 | 167 | |
169 | 168 | is_same_sql_bind ( |
170 | 169 | $t->{rs}->search ($t->{search}, $t->{attrs})->as_query, |
6 | 6 | |
7 | 7 | use lib qw(t/lib); |
8 | 8 | use DBICTest ':DiffSQL'; |
9 | use DBIx::Class::SQLMaker::LimitDialects; | |
10 | 9 | |
11 | 10 | my ($ROWS, $OFFSET) = ( |
12 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
13 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
11 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
12 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
14 | 13 | ); |
15 | 14 | |
16 | 15 | my $schema = DBICTest->init_schema(); |
91 | 91 | [], |
92 | 92 | ); |
93 | 93 | } |
94 | ||
95 | 94 | } |
96 | 95 | |
97 | 96 | |
98 | ||
99 | # Make sure the carp/croak override in SQLA works (via SQLMaker) | |
97 | # Make sure the carp/croak override in SQLAC works (via SQLMaker) | |
100 | 98 | my $file = quotemeta (__FILE__); |
101 | 99 | throws_ok (sub { |
102 | 100 | $schema->resultset ('Artist')->search ({}, { order_by => { -asc => 'stuff', -desc => 'staff' } } )->as_query; |
374 | 374 | efcc_result => { 'me.title' => 'Spoonful of bees' }, |
375 | 375 | }, |
376 | 376 | |
377 | # original from RT#132390 | |
378 | { | |
379 | where => { | |
380 | array_col => { '@>' => { -value => [ 1,2,3 ] } } | |
381 | }, | |
382 | cc_result => { | |
383 | array_col => { '@>' => { -value => [ 1,2,3 ] } } | |
384 | }, | |
385 | sql => 'WHERE array_col @> ?', | |
386 | efcc_result => {}, | |
387 | }, | |
388 | ||
377 | 389 | # crazy literals |
378 | 390 | { |
379 | 391 | where => { |
442 | 454 | }, |
443 | 455 | ); |
444 | 456 | |
445 | # these die as of SQLA 1.80 - make sure we do not transform them | |
457 | # these die as of SQLAC 1.80 - make sure we do not transform them | |
446 | 458 | # into something usable instead |
447 | 459 | for my $lhs (undef, '', { -ident => 'foo' }, { -value => 'foo' } ) { |
448 | 460 | no warnings 'uninitialized'; |
483 | 495 | } |
484 | 496 | } |
485 | 497 | |
486 | # these are deprecated as of SQLA 1.79 - make sure we do not transform | |
498 | # these are deprecated as of SQLAC 1.79 - make sure we do not transform | |
487 | 499 | # them without losing the warning |
488 | 500 | for my $lhs (undef, '') { |
489 | 501 | for my $rhs ( \"baz", \[ "baz" ] ) { |
24 | 24 | |
25 | 25 | use DBICTest ':DiffSQL'; |
26 | 26 | |
27 | use DBIx::Class::SQLMaker::LimitDialects; | |
28 | my $ROWS = DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype; | |
29 | my $TOTAL = DBIx::Class::SQLMaker::LimitDialects->__total_bindtype; | |
27 | my $ROWS = DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype; | |
28 | my $TOTAL = DBIx::Class::SQLMaker::ClassicExtensions->__total_bindtype; | |
30 | 29 | |
31 | 30 | for my $q ( '', '"' ) { |
32 | 31 |
8 | 8 | use DBIx::Class::SQLMaker; |
9 | 9 | my $sa = DBIx::Class::SQLMaker->new; |
10 | 10 | |
11 | $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract syntax are deprecated/ ); | |
11 | $SIG{__WARN__} = sigwarn_silencer( qr/\Q{from} structures with conditions not conforming to the SQL::Abstract::Classic syntax are deprecated/ ); | |
12 | 12 | |
13 | 13 | my @j = ( |
14 | 14 | { child => 'person' }, |
3 | 3 | use Test::More; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest ':DiffSQL'; |
6 | use DBIx::Class::SQLMaker::LimitDialects; | |
7 | 6 | |
8 | 7 | my ($LIMIT, $OFFSET) = ( |
9 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
10 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
8 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
11 | 10 | ); |
12 | 11 | |
13 | 12 | my $schema = DBICTest->init_schema; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use List::Util 'min'; |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 7 | my ($ROWS, $TOTAL, $OFFSET) = ( |
9 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
10 | DBIx::Class::SQLMaker::LimitDialects->__total_bindtype, | |
11 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
8 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__total_bindtype, | |
10 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
12 | 11 | ); |
13 | 12 | |
14 | 13 |
2 | 2 | use Test::More; |
3 | 3 | use lib qw(t/lib); |
4 | 4 | use DBICTest ':DiffSQL'; |
5 | use DBIx::Class::SQLMaker::LimitDialects; | |
6 | my $OFFSET = DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype; | |
7 | my $TOTAL = DBIx::Class::SQLMaker::LimitDialects->__total_bindtype; | |
5 | ||
6 | my $OFFSET = DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype; | |
7 | my $TOTAL = DBIx::Class::SQLMaker::ClassicExtensions->__total_bindtype; | |
8 | 8 | |
9 | 9 | my $schema = DBICTest->init_schema ( |
10 | 10 | storage_type => 'DBIx::Class::Storage::DBI::MSSQL', |
3 | 3 | use Test::More; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest ':DiffSQL'; |
6 | use DBIx::Class::SQLMaker::LimitDialects; | |
7 | 6 | |
8 | 7 | my ($TOTAL, $OFFSET) = ( |
9 | DBIx::Class::SQLMaker::LimitDialects->__total_bindtype, | |
10 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
8 | DBIx::Class::SQLMaker::ClassicExtensions->__total_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
11 | 10 | ); |
12 | 11 | |
13 | 12 | my $schema = DBICTest->init_schema; |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest ':DiffSQL'; |
7 | use DBIx::Class::SQLMaker::LimitDialects; | |
8 | 7 | |
9 | 8 | my ($TOTAL, $OFFSET, $ROWS) = ( |
10 | DBIx::Class::SQLMaker::LimitDialects->__total_bindtype, | |
11 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
12 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__total_bindtype, | |
10 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
11 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
13 | 12 | ); |
14 | 13 | |
15 | 14 | my $s = DBICTest->init_schema (no_deploy => 1, ); |
3 | 3 | use Test::More; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest ':DiffSQL'; |
6 | use DBIx::Class::SQLMaker::LimitDialects; | |
7 | 6 | |
8 | 7 | my ($LIMIT, $OFFSET) = ( |
9 | DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype, | |
10 | DBIx::Class::SQLMaker::LimitDialects->__offset_bindtype, | |
8 | DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype, | |
9 | DBIx::Class::SQLMaker::ClassicExtensions->__offset_bindtype, | |
11 | 10 | ); |
12 | 11 | |
13 | 12 | my $schema = DBICTest->init_schema; |
12 | 12 | use lib qw(t/lib); |
13 | 13 | use DBICTest ':DiffSQL'; |
14 | 14 | use DBIx::Class::SQLMaker::Oracle; |
15 | ||
16 | # FIXME - TEMPORARY until this merges with master | |
17 | use constant IGNORE_NONLOCAL_BINDTYPES => 1; | |
15 | 18 | |
16 | 19 | # |
17 | 20 | # Offline test for connect_by |
173 | 176 | [ [ a_timestamp => '2010-05-26 18:22:00' ], [ id => UREF ], [ a_computed_column => UREF ], [ charfield => UREF ] ], |
174 | 177 | 'sql_maker generates insert returning for multiple columns' |
175 | 178 | ); |
179 | ||
180 | ||
181 | # offline version of a couple live tests | |
182 | ||
183 | my $schema = DBICTest->init_schema( | |
184 | # pretend this is Oracle | |
185 | storage_type => '::DBI::Oracle::Generic', | |
186 | quote_names => $q, | |
187 | ); | |
188 | ||
189 | # This one is testing ROWNUM, thus not directly executable on SQLite | |
190 | is_same_sql_bind( | |
191 | $schema->resultset('CD')->search(undef, { | |
192 | prefetch => 'very_long_artist_relationship', | |
193 | rows => 3, | |
194 | offset => 0, | |
195 | })->as_query, | |
196 | "( | |
197 | SELECT ${q}me${q}.${q}cdid${q}, ${q}me${q}.${q}artist${q}, ${q}me${q}.${q}title${q}, ${q}me${q}.${q}year${q}, ${q}me${q}.${q}genreid${q}, ${q}me${q}.${q}single_track${q}, | |
198 | ${q}VryLngArtstRltnshpA_5L2NK8TAMJ${q}, ${q}VryLngArtstRltnshpN_AZ6MM6EO7A${q}, ${q}VryLngArtstRltnshpR_D3D5S4YO5D${q}, ${q}VryLngArtstRltnshpC_94JLUHA0OX${q} | |
199 | FROM ( | |
200 | SELECT ${q}me${q}.${q}cdid${q}, ${q}me${q}.${q}artist${q}, ${q}me${q}.${q}title${q}, ${q}me${q}.${q}year${q}, ${q}me${q}.${q}genreid${q}, ${q}me${q}.${q}single_track${q}, | |
201 | ${q}very_long_artist_relationship${q}.${q}artistid${q} AS ${q}VryLngArtstRltnshpA_5L2NK8TAMJ${q}, | |
202 | ${q}very_long_artist_relationship${q}.${q}name${q} AS ${q}VryLngArtstRltnshpN_AZ6MM6EO7A${q}, | |
203 | ${q}very_long_artist_relationship${q}.${q}rank${q} AS ${q}VryLngArtstRltnshpR_D3D5S4YO5D${q}, | |
204 | ${q}very_long_artist_relationship${q}.${q}charfield${q} AS ${q}VryLngArtstRltnshpC_94JLUHA0OX${q} | |
205 | FROM cd ${q}me${q} | |
206 | JOIN ${q}artist${q} ${q}very_long_artist_relationship${q} | |
207 | ON ${q}very_long_artist_relationship${q}.${q}artistid${q} = ${q}me${q}.${q}artist${q} | |
208 | ||
209 | ) ${q}me${q} | |
210 | WHERE ROWNUM <= ? | |
211 | )", | |
212 | [ | |
213 | [ $sqla_oracle->__rows_bindtype => 3 ], | |
214 | ], | |
215 | 'Basic test of identifiers over the 30 char limit' | |
216 | ); | |
217 | ||
218 | ||
219 | # but the rest are directly runnable | |
220 | $schema->is_executed_sql_bind( | |
221 | sub { | |
222 | my @rows = $schema->resultset('Artist')->search( | |
223 | { 'cds_very_very_very_long_relationship_name.title' => { '!=', 'EP C' } }, | |
224 | { | |
225 | prefetch => 'cds_very_very_very_long_relationship_name', | |
226 | order_by => 'cds_very_very_very_long_relationship_name.title', | |
227 | } | |
228 | )->all; | |
229 | ||
230 | isa_ok( | |
231 | $rows[0], | |
232 | 'DBICTest::Schema::Artist', | |
233 | 'At least one artist from db', | |
234 | ); | |
235 | }, | |
236 | [[ | |
237 | "SELECT ${q}me${q}.${q}artistid${q}, ${q}me${q}.${q}name${q}, ${q}me${q}.${q}rank${q}, ${q}me${q}.${q}charfield${q}, | |
238 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}cdid${q}, | |
239 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q}, | |
240 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q}, | |
241 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}year${q}, | |
242 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}genreid${q}, | |
243 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}single_track${q} | |
244 | FROM ${q}artist${q} ${q}me${q} | |
245 | LEFT JOIN cd ${q}CdsVryVryVryLngRltn_3BW932XK2E${q} | |
246 | ON ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q} = ${q}me${q}.${q}artistid${q} | |
247 | WHERE ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} != ? | |
248 | ORDER BY ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} | |
249 | ", | |
250 | ( IGNORE_NONLOCAL_BINDTYPES ? 'EP C' : [{ | |
251 | dbic_colname => 'cds_very_very_very_long_relationship_name.title', | |
252 | sqlt_datatype => 'varchar', | |
253 | sqlt_size => 100, | |
254 | } => 'EP C' ] ), | |
255 | ]], | |
256 | 'rel name over 30 char limit with user condition, requiring walking the WHERE data structure', | |
257 | ); | |
258 | ||
259 | my $pain_rs = $schema->resultset('Artist')->search( | |
260 | { 'me.artistid' => 1 }, | |
261 | { | |
262 | join => 'cds_very_very_very_long_relationship_name', | |
263 | select => 'cds_very_very_very_long_relationship_name.title', | |
264 | as => 'title', | |
265 | group_by => 'cds_very_very_very_long_relationship_name.title', | |
266 | } | |
267 | ); | |
268 | ||
269 | $schema->is_executed_sql_bind( | |
270 | sub { | |
271 | my $megapain_rs = $pain_rs->search( | |
272 | {}, | |
273 | { | |
274 | prefetch => { cds_very_very_very_long_relationship_name => 'very_long_artist_relationship' }, | |
275 | having => { 'cds_very_very_very_long_relationship_name.title' => { '!=', '' } }, | |
276 | }, | |
277 | ); | |
278 | ||
279 | isa_ok( | |
280 | ( $megapain_rs->all )[0], | |
281 | 'DBICTest::Schema::Artist', | |
282 | 'At least one artist from db', | |
283 | ); | |
284 | ||
285 | ok( | |
286 | defined( ( $megapain_rs->get_column('title')->all )[0] ), | |
287 | 'get_column returns a non-null result' | |
288 | ); | |
289 | }, | |
290 | [ | |
291 | [ | |
292 | "SELECT ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q}, | |
293 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}cdid${q}, | |
294 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q}, | |
295 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q}, | |
296 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}year${q}, | |
297 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}genreid${q}, | |
298 | ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}single_track${q}, | |
299 | ${q}very_long_artist_relationship${q}.${q}artistid${q}, | |
300 | ${q}very_long_artist_relationship${q}.${q}name${q}, | |
301 | ${q}very_long_artist_relationship${q}.${q}rank${q}, | |
302 | ${q}very_long_artist_relationship${q}.${q}charfield${q} | |
303 | FROM ${q}artist${q} ${q}me${q} | |
304 | LEFT JOIN cd ${q}CdsVryVryVryLngRltn_3BW932XK2E${q} | |
305 | ON ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q} = ${q}me${q}.${q}artistid${q} | |
306 | LEFT JOIN ${q}artist${q} ${q}very_long_artist_relationship${q} | |
307 | ON ${q}very_long_artist_relationship${q}.${q}artistid${q} = ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q} | |
308 | WHERE ${q}me${q}.${q}artistid${q} = ? | |
309 | GROUP BY ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} | |
310 | HAVING ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} != ? | |
311 | ", | |
312 | [{ | |
313 | dbic_colname => 'me.artistid', | |
314 | sqlt_datatype => 'integer', | |
315 | } => 1 ], | |
316 | ( IGNORE_NONLOCAL_BINDTYPES ? '' : [{ | |
317 | dbic_colname => 'cds_very_very_very_long_relationship_name.title', | |
318 | sqlt_datatype => 'varchar', | |
319 | sqlt_size => 100, | |
320 | } => '' ] ), | |
321 | ], | |
322 | [ | |
323 | "SELECT ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} | |
324 | FROM ${q}artist${q} ${q}me${q} | |
325 | LEFT JOIN cd ${q}CdsVryVryVryLngRltn_3BW932XK2E${q} | |
326 | ON ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q} = ${q}me${q}.${q}artistid${q} | |
327 | WHERE ${q}me${q}.${q}artistid${q} = ? | |
328 | GROUP BY ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} | |
329 | HAVING ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} != ? | |
330 | ", | |
331 | [{ | |
332 | dbic_colname => 'me.artistid', | |
333 | sqlt_datatype => 'integer', | |
334 | } => 1 ], | |
335 | ( IGNORE_NONLOCAL_BINDTYPES ? '' : [{ | |
336 | dbic_colname => 'cds_very_very_very_long_relationship_name.title', | |
337 | sqlt_datatype => 'varchar', | |
338 | sqlt_size => 100, | |
339 | } => '' ] ), | |
340 | ], | |
341 | ], | |
342 | 'rel names over the 30 char limit using group_by/having and join' | |
343 | ); | |
344 | ||
345 | ||
346 | is_same_sql_bind( | |
347 | $pain_rs->count_rs->as_query, | |
348 | "( | |
349 | SELECT COUNT( * ) | |
350 | FROM ( | |
351 | SELECT ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} AS ${q}CdsVryVryVryLngRltn_7TT4PIXZGX${q} | |
352 | FROM ${q}artist${q} ${q}me${q} | |
353 | LEFT JOIN cd ${q}CdsVryVryVryLngRltn_3BW932XK2E${q} ON ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}artist${q} = ${q}me${q}.${q}artistid${q} | |
354 | WHERE ${q}me${q}.${q}artistid${q} = ? | |
355 | GROUP BY ${q}CdsVryVryVryLngRltn_3BW932XK2E${q}.${q}title${q} | |
356 | ) ${q}me${q} | |
357 | )", | |
358 | [ | |
359 | [{ | |
360 | dbic_colname => 'me.artistid', | |
361 | sqlt_datatype => 'integer', | |
362 | } => 1 ], | |
363 | ], | |
364 | 'Expected count subquery', | |
365 | ); | |
176 | 366 | } |
177 | 367 | |
178 | 368 | done_testing; |
66 | 66 | )}, |
67 | 67 | [ |
68 | 68 | [ { sqlt_datatype => 'integer', dbic_colname => 'me.rank' } => 13 ], |
69 | [ DBIx::Class::SQLMaker::LimitDialects->__rows_bindtype => 1 ], | |
69 | [ DBIx::Class::SQLMaker::ClassicExtensions->__rows_bindtype => 1 ], | |
70 | 70 | [ { sqlt_datatype => 'integer', dbic_colname => 'me.rank' } => 13 ], |
71 | 71 | ], |
72 | 72 | 'correct SQL with aggregate boolean order on Pg', |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | # test relies on the original default | |
4 | BEGIN { delete @ENV{qw( DBICTEST_SWAPOUT_SQLAC_WITH )} } | |
5 | ||
6 | use Test::More; | |
7 | ||
8 | use lib qw(t/lib); | |
9 | use DBICTest ':DiffSQL'; | |
10 | ||
11 | my $base_schema = DBICTest->init_schema( | |
12 | no_deploy => 1, | |
13 | ); | |
14 | ||
15 | my $schema = $base_schema->connect( | |
16 | sub { | |
17 | $base_schema->storage->dbh | |
18 | }, | |
19 | { | |
20 | on_connect_call => [ [ rebase_sqlmaker => 'DBICTest::SQLMRebase' ] ], | |
21 | }, | |
22 | ); | |
23 | ||
24 | ok (! $base_schema->storage->connected, 'No connection on base schema yet'); | |
25 | ok (! $schema->storage->connected, 'No connection on experimental schema yet'); | |
26 | ||
27 | $schema->storage->ensure_connected; | |
28 | ||
29 | is( | |
30 | $schema->storage->sql_maker->__select_counter, | |
31 | undef, | |
32 | "No statements registered yet", | |
33 | ); | |
34 | ||
35 | is_deeply( | |
36 | mro::get_linear_isa( ref( $schema->storage->sql_maker ) ), | |
37 | [ | |
38 | qw( | |
39 | DBIx::Class::SQLMaker::SQLite__REBASED_ON__DBICTest::SQLMRebase | |
40 | DBIx::Class::SQLMaker::SQLite | |
41 | DBIx::Class::SQLMaker | |
42 | DBICTest::SQLMRebase | |
43 | DBIx::Class::SQLMaker::ClassicExtensions | |
44 | ), | |
45 | @{ mro::get_linear_isa( 'DBIx::Class' ) }, | |
46 | @{ mro::get_linear_isa( 'SQL::Abstract::Classic' ) }, | |
47 | ], | |
48 | 'Expected SQLM object inheritance after rebase', | |
49 | ); | |
50 | ||
51 | ||
52 | $schema->resultset('Artist')->count_rs->as_query; | |
53 | ||
54 | is( | |
55 | $schema->storage->sql_maker->__select_counter, | |
56 | 1, | |
57 | "1 SELECT fired off, tickling override", | |
58 | ); | |
59 | ||
60 | ||
61 | $base_schema->resultset('Artist')->count_rs->as_query; | |
62 | ||
63 | is( | |
64 | ref( $base_schema->storage->sql_maker ), | |
65 | 'DBIx::Class::SQLMaker::SQLite', | |
66 | 'Expected core SQLM object on original schema remains', | |
67 | ); | |
68 | ||
69 | is( | |
70 | $schema->storage->sql_maker->__select_counter, | |
71 | 1, | |
72 | "No further SELECTs seen by experimental override", | |
73 | ); | |
74 | ||
75 | ||
76 | done_testing; |
176 | 176 | # make sure connection-less storages do not throw on _determine_driver |
177 | 177 | # but work with ENV at the same time |
178 | 178 | SKIP: for my $env_dsn (undef, (DBICTest->_database)[0] ) { |
179 | skip 'Subtest relies on being connected to SQLite', 1 | |
180 | if $env_dsn and $env_dsn !~ /\:SQLite\:/; | |
179 | skip( 'Subtest relies on being connected to SQLite without overrides', 1 ) if ( | |
180 | $ENV{DBICTEST_SWAPOUT_SQLAC_WITH} | |
181 | or | |
182 | ( $env_dsn and $env_dsn !~ /\:SQLite\:/ ) | |
183 | ); | |
181 | 184 | |
182 | 185 | local $ENV{DBI_DSN} = $env_dsn || ''; |
183 | 186 |
0 | use strict; | |
1 | use warnings; | |
2 | use lib qw(t/lib); | |
3 | use DBICTest; | |
4 | use Test::More; | |
5 | ||
6 | BEGIN { | |
7 | require DBIx::Class; | |
8 | plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_prettydebug') | |
9 | unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_prettydebug'); | |
10 | } | |
11 | ||
12 | BEGIN { delete @ENV{qw(DBIC_TRACE_PROFILE)} } | |
13 | ||
14 | { | |
15 | my $schema = DBICTest->init_schema; | |
16 | ||
17 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Statistics'); | |
18 | } | |
19 | ||
20 | { | |
21 | local $ENV{DBIC_TRACE_PROFILE} = 'console'; | |
22 | ||
23 | my $schema = DBICTest->init_schema; | |
24 | ||
25 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Debug::PrettyPrint');; | |
26 | is($schema->storage->debugobj->_sqlat->indent_string, ' ', 'indent string set correctly from console profile'); | |
27 | } | |
28 | ||
29 | { | |
30 | local $ENV{DBIC_TRACE_PROFILE} = './t/lib/awesome.json'; | |
31 | ||
32 | my $schema = DBICTest->init_schema; | |
33 | ||
34 | isa_ok($schema->storage->debugobj, 'DBIx::Class::Storage::Debug::PrettyPrint');; | |
35 | is($schema->storage->debugobj->_sqlat->indent_string, 'frioux', 'indent string set correctly from file-based profile'); | |
36 | } | |
37 | ||
38 | done_testing; |
0 | use strict; | |
1 | use warnings; | |
2 | no warnings 'once'; | |
3 | ||
4 | use Test::More; | |
5 | use Test::Exception; | |
6 | use Try::Tiny; | |
7 | use File::Spec; | |
8 | use lib qw(t/lib); | |
9 | use DBICTest; | |
10 | use Path::Class qw/file/; | |
11 | ||
12 | # something deep in Path::Class - mainline ditched it altogether | |
13 | plan skip_all => "Test is finicky under -T before 5.10" | |
14 | if "$]" < 5.010 and ${^TAINT}; | |
15 | ||
16 | BEGIN { delete @ENV{qw(DBIC_TRACE DBIC_TRACE_PROFILE DBICTEST_SQLITE_USE_FILE)} } | |
17 | ||
18 | my $schema = DBICTest->init_schema(); | |
19 | ||
20 | my $lfn = file("t/var/sql-$$.log"); | |
21 | unlink $lfn or die $! | |
22 | if -e $lfn; | |
23 | ||
24 | # make sure we are testing the vanilla debugger and not ::PrettyPrint | |
25 | require DBIx::Class::Storage::Statistics; | |
26 | $schema->storage->debugobj(DBIx::Class::Storage::Statistics->new); | |
27 | ||
28 | ok ( $schema->storage->debug(1), 'debug' ); | |
29 | $schema->storage->debugfh($lfn->openw); | |
30 | $schema->storage->debugfh->autoflush(1); | |
31 | $schema->resultset('CD')->count; | |
32 | ||
33 | my @loglines = $lfn->slurp; | |
34 | is (@loglines, 1, 'one line of log'); | |
35 | like($loglines[0], qr/^SELECT COUNT/, 'File log via debugfh success'); | |
36 | ||
37 | $schema->storage->debugfh(undef); | |
38 | ||
39 | { | |
40 | local $ENV{DBIC_TRACE} = "=$lfn"; | |
41 | unlink $lfn; | |
42 | ||
43 | $schema->resultset('CD')->count; | |
44 | ||
45 | my $schema2 = DBICTest->init_schema(no_deploy => 1); | |
46 | $schema2->storage->_do_query('SELECT 1'); # _do_query() logs via standard mechanisms | |
47 | ||
48 | my @loglines = $lfn->slurp; | |
49 | is(@loglines, 2, '2 lines of log'); | |
50 | like($loglines[0], qr/^SELECT COUNT/, 'Env log from schema1 success'); | |
51 | like($loglines[1], qr/^SELECT 1:/, 'Env log from schema2 success'); | |
52 | ||
53 | $schema->storage->debugobj->debugfh(undef) | |
54 | } | |
55 | ||
56 | END { | |
57 | unlink $lfn if $lfn; | |
58 | } | |
59 | ||
60 | open(STDERRCOPY, '>&STDERR'); | |
61 | ||
62 | my $exception_line_number; | |
63 | # STDERR will be closed, no T::B diag in blocks | |
64 | my $exception = try { | |
65 | close(STDERR); | |
66 | $exception_line_number = __LINE__ + 1; # important for test, do not reformat | |
67 | $schema->resultset('CD')->search({})->count; | |
68 | } catch { | |
69 | $_ | |
70 | } finally { | |
71 | # restore STDERR | |
72 | open(STDERR, '>&STDERRCOPY'); | |
73 | }; | |
74 | ||
75 | ok $exception =~ / | |
76 | \QDuplication of STDERR for debug output failed (perhaps your STDERR is closed?)\E | |
77 | .+ | |
78 | \Qat @{[__FILE__]} line $exception_line_number\E$ | |
79 | /xms | |
80 | or diag "Unexpected exception text:\n\n$exception\n"; | |
81 | ||
82 | my @warnings; | |
83 | $exception = try { | |
84 | local $SIG{__WARN__} = sub { push @warnings, @_ if $_[0] =~ /character/i }; | |
85 | close STDERR; | |
86 | open(STDERR, '>', File::Spec->devnull) or die $!; | |
87 | $schema->resultset('CD')->search({ title => "\x{1f4a9}" })->count; | |
88 | ''; | |
89 | } catch { | |
90 | $_; | |
91 | } finally { | |
92 | # restore STDERR | |
93 | close STDERR; | |
94 | open(STDERR, '>&STDERRCOPY'); | |
95 | }; | |
96 | ||
97 | die "How did that fail... $exception" | |
98 | if $exception; | |
99 | ||
100 | is_deeply(\@warnings, [], 'No warnings with unicode on STDERR'); | |
101 | ||
102 | # test debugcb and debugobj protocol | |
103 | { | |
104 | my $rs = $schema->resultset('CD')->search( { | |
105 | artist => 1, | |
106 | cdid => { -between => [ 1, 3 ] }, | |
107 | title => { '!=' => \[ '?', undef ] } | |
108 | }); | |
109 | ||
110 | my $sql_trace = 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( ( artist = ? AND ( cdid BETWEEN ? AND ? ) AND title != ? ) )'; | |
111 | my @bind_trace = qw( '1' '1' '3' NULL ); # quotes are in fact part of the trace </facepalm> | |
112 | ||
113 | ||
114 | my @args; | |
115 | $schema->storage->debugcb(sub { push @args, @_ } ); | |
116 | ||
117 | $rs->all; | |
118 | ||
119 | is_deeply( \@args, [ | |
120 | "SELECT", | |
121 | sprintf( "%s: %s\n", $sql_trace, join ', ', @bind_trace ), | |
122 | ]); | |
123 | ||
124 | { | |
125 | package DBICTest::DebugObj; | |
126 | our @ISA = 'DBIx::Class::Storage::Statistics'; | |
127 | ||
128 | sub query_start { | |
129 | my $self = shift; | |
130 | ( $self->{_traced_sql}, @{$self->{_traced_bind}} ) = @_; | |
131 | } | |
132 | } | |
133 | ||
134 | my $do = $schema->storage->debugobj(DBICTest::DebugObj->new); | |
135 | ||
136 | $rs->all; | |
137 | ||
138 | is( $do->{_traced_sql}, $sql_trace ); | |
139 | ||
140 | is_deeply ( $do->{_traced_bind}, \@bind_trace ); | |
141 | } | |
142 | ||
143 | # recreate test as seen in DBIx::Class::QueryLog | |
144 | # the rationale is that if someone uses a non-IO::Handle object | |
145 | # on CPAN, many are *bound* to use one on darkpan. Thus this | |
146 | # test to ensure there is no future silent breakage | |
147 | { | |
148 | my $output = ""; | |
149 | ||
150 | { | |
151 | package DBICTest::_Printable; | |
152 | ||
153 | sub print { | |
154 | my ($self, @args) = @_; | |
155 | $output .= join('', @args); | |
156 | } | |
157 | } | |
158 | ||
159 | $schema->storage->debugobj(undef); | |
160 | $schema->storage->debug(1); | |
161 | $schema->storage->debugfh( bless {}, "DBICTest::_Printable" ); | |
162 | $schema->storage->txn_do( sub { $schema->resultset('Artist')->count } ); | |
163 | ||
164 | like ( | |
165 | $output, | |
166 | qr/ | |
167 | \A | |
168 | ^ \QBEGIN WORK\E \s*? | |
169 | ^ \QSELECT COUNT( * ) FROM artist me:\E \s*? | |
170 | ^ \QCOMMIT\E \s*? | |
171 | \z | |
172 | /xm | |
173 | ); | |
174 | ||
175 | $schema->storage->debug(0); | |
176 | $schema->storage->debugfh(undef); | |
177 | } | |
178 | ||
179 | done_testing; |
15 | 15 | use strict; |
16 | 16 | |
17 | 17 | use base 'DBIx::Class::Storage::DBI'; |
18 | ||
19 | __PACKAGE__->sql_limit_dialect ('LimitOffset'); | |
18 | 20 | |
19 | 21 | sub _populate_dbh { |
20 | 22 | my $self = shift; |
20 | 20 | |
21 | 21 | use Test::Moose; |
22 | 22 | use Test::Exception; |
23 | use List::Util 'first'; | |
24 | 23 | use Scalar::Util 'reftype'; |
25 | 24 | use File::Spec; |
26 | 25 | use Moose(); |
377 | 376 | ## Silence warning about not supporting the is_replicating method if using the |
378 | 377 | ## sqlite dbs. |
379 | 378 | $replicated->schema->storage->debugobj->silence(1) |
380 | if first { $_ =~ /$var_dir/ } @replicant_names; | |
379 | if grep { $_ =~ /$var_dir/ } @replicant_names; | |
381 | 380 | |
382 | 381 | isa_ok $replicated->schema->storage->balancer->current_replicant |
383 | 382 | => 'DBIx::Class::Storage::DBI'; |
425 | 424 | ## Silence warning about not supporting the is_replicating method if using the |
426 | 425 | ## sqlite dbs. |
427 | 426 | $replicated->schema->storage->debugobj->silence(1) |
428 | if first { $_ =~ /$var_dir/ } @replicant_names; | |
427 | if grep { $_ =~ /$var_dir/ } @replicant_names; | |
429 | 428 | |
430 | 429 | $replicated->schema->storage->pool->validate_replicants; |
431 | 430 | |
608 | 607 | ## Silence warning about not supporting the is_replicating method if using the |
609 | 608 | ## sqlite dbs. |
610 | 609 | $replicated->schema->storage->debugobj->silence(1) |
611 | if first { $_ =~ /$var_dir/ } @replicant_names; | |
610 | if grep { $_ =~ /$var_dir/ } @replicant_names; | |
612 | 611 | |
613 | 612 | $replicated->schema->storage->pool->validate_replicants; |
614 | 613 |
1 | 1 | use strict; |
2 | 2 | |
3 | 3 | use Test::More; |
4 | use List::Util 'first'; | |
5 | 4 | use lib qw(t/lib maint/.Generated_Pod/lib); |
6 | 5 | use DBICTest; |
7 | use namespace::clean; | |
8 | 6 | |
9 | 7 | plan skip_all => "Skipping finicky test on older perl" |
10 | 8 | if "$]" < 5.008005; |
117 | 115 | /] |
118 | 116 | }, |
119 | 117 | |
118 | 'DBIx::Class::Storage::Debug::PrettyTrace' => { | |
119 | ignore => [ qw/ | |
120 | ||
121 | query_start | |
122 | query_end | |
123 | /] | |
124 | }, | |
125 | ||
120 | 126 | 'DBIx::Class::Admin::*' => { skip => 1 }, |
121 | 127 | 'DBIx::Class::Optional::Dependencies' => { skip => 1 }, |
122 | 128 | 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 }, |
133 | 139 | |
134 | 140 | # test some specific components whose parents are exempt below |
135 | 141 | 'DBIx::Class::Relationship::Base' => {}, |
136 | 'DBIx::Class::SQLMaker::LimitDialects' => {}, | |
137 | 142 | |
138 | 143 | # internals |
139 | 144 | 'DBIx::Class::_Util' => { skip => 1 }, |
150 | 155 | |
151 | 156 | # skipped because the synopsis covers it clearly |
152 | 157 | 'DBIx::Class::InflateColumn::File' => { skip => 1 }, |
153 | ||
154 | # internal subclass, nothing to POD | |
155 | 'DBIx::Class::ResultSet::Pager' => { skip => 1 }, | |
156 | 158 | }; |
157 | 159 | |
158 | 160 | my $ex_lookup = {}; |
169 | 171 | SKIP: { |
170 | 172 | |
171 | 173 | my ($match) = |
172 | first { $module =~ $_ } | |
174 | grep { $module =~ $_ } | |
173 | 175 | (sort { length $b <=> length $a || $b cmp $a } (keys %$ex_lookup) ) |
174 | 176 | ; |
175 | 177 |