new upstream release 0.08112
Jose Luis Rivas Contreras
14 years ago
0 | 0 | Revision history for DBIx::Class |
1 | ||
2 | 0.08112 2009-09-21 10:57:00 (UTC) | |
3 | - Remove the recommends from Makefile.PL, DBIx::Class is not | |
4 | supposed to have optional dependencies. ever. | |
5 | - Mangle the DBIx/Class.pm POD to be more clear about | |
6 | copyright and license | |
7 | - Put back PG's multiple autoinc per table support, accidentally | |
8 | dropped during the serial-autodetection rewrite | |
9 | - Make sure ResultSetColumn does not depend on the (undefined) | |
10 | return value of ->cursor->reset() | |
11 | - Add single() to ResultSetColumn (same semantics as ResultSet) | |
12 | - Make sure to turn off IDENTITY_INSERT after insert() on MSSQL | |
13 | tables that needed it | |
14 | - More informative exception on failing _resolve_relationship | |
15 | - Allow undef/NULL as the sole grouping value in Ordered | |
16 | - Fix unreported rollback exceptions in TxnScopeGuard | |
17 | - Fix overly-eager left-join chain enforcing code | |
18 | - Warn about using distinct with an existing group_by | |
19 | - Warn about attempting to $rs->get_column a non-unique column | |
20 | when has_many joins are added to resultset | |
21 | - Refactor of the exception handling system (now everything is a | |
22 | DBIx::Class::Exception object) | |
1 | 23 | |
2 | 24 | 0.08111 2009-09-06 21:58:00 (UTC) |
3 | 25 | - The hashref to connection_info now accepts a 'dbh_maker' |
40 | 62 | - Support for MSSQL 'money' type |
41 | 63 | - Support for 'smalldatetime' type used in MSSQL and Sybase for |
42 | 64 | InflateColumn::DateTime |
43 | - support for Postgres 'timestamp without timezone' type in | |
65 | - Support for Postgres 'timestamp without timezone' type in | |
44 | 66 | InflateColumn::DateTime (RT#48389) |
45 | 67 | - Added new MySQL specific on_connect_call macro 'set_strict_mode' |
46 | 68 | (also known as make_mysql_not_suck_as_much) |
427 | 427 | t/prefetch/double_prefetch.t |
428 | 428 | t/prefetch/grouped.t |
429 | 429 | t/prefetch/incomplete.t |
430 | t/prefetch/join_type.t | |
430 | 431 | t/prefetch/multiple_hasmany.t |
431 | 432 | t/prefetch/standard.t |
432 | 433 | t/prefetch/via_search_related.t |
29 | 29 | - examples |
30 | 30 | - inc |
31 | 31 | - t |
32 | recommends: | |
33 | SQL::Translator: 0.11002 | |
34 | 32 | requires: |
35 | 33 | Carp::Clan: 6.0 |
36 | 34 | Class::Accessor::Grouped: 0.09000 |
56 | 54 | MailingList: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class |
57 | 55 | license: http://dev.perl.org/licenses/ |
58 | 56 | repository: http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/ |
59 | version: 0.08111 | |
57 | version: 0.08112 |
3 | 3 | use POSIX (); |
4 | 4 | |
5 | 5 | use 5.006001; # delete this line if you want to send patches for earlier. |
6 | ||
7 | # ****** DO NOT ADD OPTIONAL DEPENDENCIES. EVER. --mst ****** | |
6 | 8 | |
7 | 9 | name 'DBIx-Class'; |
8 | 10 | perl_version '5.006001'; |
51 | 53 | 'Hash::Merge', => '0.11', |
52 | 54 | ); |
53 | 55 | |
54 | # when changing also adjust $DBIx::Class::minimum_sqlt_version | |
55 | my $sqlt_recommends = '0.11002'; | |
56 | ||
57 | recommends 'SQL::Translator' => $sqlt_recommends; | |
58 | ||
56 | #************************************************************************# | |
57 | # Make *ABSOLUTELY SURE* that nothing on this list is a real require, # | |
58 | # since every module listed in %force_requires_if_author is deleted # | |
59 | # from the final META.yml (thus will never make it as a CPAN dependency) # | |
60 | #************************************************************************# | |
59 | 61 | my %force_requires_if_author = ( |
60 | 62 | %replication_requires, |
61 | 63 | |
64 | # when changing also adjust $DBIx::Class::Storage::DBI::minimum_sqlt_version | |
65 | 'SQL::Translator' => '0.11002', | |
66 | ||
62 | 67 | # 'Module::Install::Pod::Inherit' => '0.01', |
63 | 'SQL::Translator' => $sqlt_recommends, | |
64 | 68 | |
65 | 69 | # when changing also adjust version in t/02pod.t |
66 | 70 | 'Test::Pod' => '1.26', |
110 | 114 | ) : () |
111 | 115 | , |
112 | 116 | ); |
117 | #************************************************************************# | |
118 | # Make ABSOLUTELY SURE that nothing on the list above is a real require, # | |
119 | # since every module listed in %force_requires_if_author is deleted # | |
120 | # from the final META.yml (thus will never make it as a CPAN dependency) # | |
121 | #************************************************************************# | |
113 | 122 | |
114 | 123 | |
115 | 124 | install_script (qw| |
176 | 176 | |
177 | 177 | bricas: Brian Cassidy <bricas@cpan.org> |
178 | 178 | |
179 | brunov: Bruno Vecchi <vecchi.b@gmail.com> | |
180 | ||
179 | 181 | caelum: Rafael Kitover <rkitover@cpan.org> |
180 | 182 | |
181 | 183 | castaway: Jess Robinson |
0 | libdbix-class-perl (0.08111-1) UNRELEASED; urgency=low | |
0 | libdbix-class-perl (0.08112-1) UNRELEASED; urgency=low | |
1 | 1 | |
2 | 2 | [ Jonathan Yu ] |
3 | 3 | WAITS for advice from Peter Rabbitson/ribasushi (upstream maintainer). He |
24 | 24 | * Update environment variables for test suite in debian/rules. |
25 | 25 | * debian/copyright: update list of copyright holders. |
26 | 26 | |
27 | -- Ryan Niebur <ryan@debian.org> Fri, 25 Sep 2009 00:24:35 -0700 | |
27 | [ Jose Luis Rivas ] | |
28 | * New upstream release 0.08112 | |
29 | ||
30 | -- Jose Luis Rivas <ghostbar@debian.org> Fri, 25 Sep 2009 07:27:45 -0430 | |
28 | 31 | |
29 | 32 | libdbix-class-perl (0.08108-1) unstable; urgency=low |
30 | 33 |
22 | 22 | |
23 | 23 | my @cds; |
24 | 24 | foreach my $lp (keys %albums) { |
25 | my $artist = $schema->resultset('Artist')->search({ | |
25 | my $artist = $schema->resultset('Artist')->find({ | |
26 | 26 | name => $albums{$lp} |
27 | 27 | }); |
28 | push @cds, [$lp, $artist->first]; | |
28 | push @cds, [$lp, $artist->id]; | |
29 | 29 | } |
30 | 30 | |
31 | 31 | $schema->populate('Cd', [ |
46 | 46 | |
47 | 47 | my @tracks; |
48 | 48 | foreach my $track (keys %tracks) { |
49 | my $cdname = $schema->resultset('Cd')->search({ | |
49 | my $cd = $schema->resultset('Cd')->find({ | |
50 | 50 | title => $tracks{$track}, |
51 | 51 | }); |
52 | push @tracks, [$cdname->first, $track]; | |
52 | push @tracks, [$cd->id, $track]; | |
53 | 53 | } |
54 | 54 | |
55 | 55 | $schema->populate('Track',[ |
3 | 3 | use strict; |
4 | 4 | use warnings; |
5 | 5 | |
6 | ### | |
7 | # Keep this class for backwards compatibility | |
8 | ### | |
9 | ||
6 | 10 | use base 'Class::C3::Componentised'; |
7 | use Carp::Clan qw/^DBIx::Class/; | |
8 | ||
9 | sub inject_base { | |
10 | my ($class, $target, @to_inject) = @_; | |
11 | { | |
12 | no strict 'refs'; | |
13 | foreach my $to (reverse @to_inject) { | |
14 | my @comps = qw(DigestColumns ResultSetManager Ordered UTF8Columns); | |
15 | # Add components here that need to be loaded before Core | |
16 | foreach my $first_comp (@comps) { | |
17 | if ($to eq 'DBIx::Class::Core' && | |
18 | $target->isa("DBIx::Class::${first_comp}")) { | |
19 | carp "Possible incorrect order of components in ". | |
20 | "${target}::load_components($first_comp) call: Core loaded ". | |
21 | "before $first_comp. See the documentation for ". | |
22 | "DBIx::Class::$first_comp for more information"; | |
23 | } | |
24 | } | |
25 | unshift( @{"${target}::ISA"}, $to ) | |
26 | unless ($target eq $to || $target->isa($to)); | |
27 | } | |
28 | } | |
29 | ||
30 | $class->next::method($target, @to_inject); | |
31 | } | |
32 | 11 | |
33 | 12 | 1; |
154 | 154 | |
155 | 155 | my @cds; |
156 | 156 | foreach my $lp (keys %albums) { |
157 | my $artist = $schema->resultset('Artist')->search({ | |
157 | my $artist = $schema->resultset('Artist')->find({ | |
158 | 158 | name => $albums{$lp} |
159 | 159 | }); |
160 | push @cds, [$lp, $artist->first]; | |
160 | push @cds, [$lp, $artist->id]; | |
161 | 161 | } |
162 | 162 | |
163 | 163 | $schema->populate('Cd', [ |
178 | 178 | |
179 | 179 | my @tracks; |
180 | 180 | foreach my $track (keys %tracks) { |
181 | my $cdname = $schema->resultset('Cd')->search({ | |
181 | my $cdname = $schema->resultset('Cd')->find({ | |
182 | 182 | title => $tracks{$track}, |
183 | 183 | }); |
184 | push @tracks, [$cdname->first, $track]; | |
184 | push @tracks, [$cdname->id, $track]; | |
185 | 185 | } |
186 | 186 | |
187 | 187 | $schema->populate('Track',[ |
433 | 433 | sub move_to_group { |
434 | 434 | my( $self, $to_group, $to_position ) = @_; |
435 | 435 | |
436 | $self->throw_exception ('move_to_group() expects a group specification') | |
437 | unless defined $to_group; | |
438 | ||
439 | # if we're given a string, turn it into a hashref | |
436 | # if we're given a single value, turn it into a hashref | |
440 | 437 | unless (ref $to_group eq 'HASH') { |
441 | 438 | my @gcols = $self->_grouping_columns; |
442 | 439 |
6 | 6 | 'bool' => "_bool", |
7 | 7 | fallback => 1; |
8 | 8 | use Carp::Clan qw/^DBIx::Class/; |
9 | use DBIx::Class::Exception; | |
9 | 10 | use Data::Page; |
10 | 11 | use Storable; |
11 | 12 | use DBIx::Class::ResultSetColumn; |
569 | 570 | my $where = $self->_collapse_cond($self->{attrs}{where} || {}); |
570 | 571 | my $num_where = scalar keys %$where; |
571 | 572 | |
572 | my @unique_queries; | |
573 | my (@unique_queries, %seen_column_combinations); | |
573 | 574 | foreach my $name (@constraint_names) { |
574 | my @unique_cols = $self->result_source->unique_constraint_columns($name); | |
575 | my $unique_query = $self->_build_unique_query($query, \@unique_cols); | |
576 | ||
577 | my $num_cols = scalar @unique_cols; | |
575 | my @constraint_cols = $self->result_source->unique_constraint_columns($name); | |
576 | ||
577 | my $constraint_sig = join "\x00", sort @constraint_cols; | |
578 | next if $seen_column_combinations{$constraint_sig}++; | |
579 | ||
580 | my $unique_query = $self->_build_unique_query($query, \@constraint_cols); | |
581 | ||
582 | my $num_cols = scalar @constraint_cols; | |
578 | 583 | my $num_query = scalar keys %$unique_query; |
579 | 584 | |
580 | 585 | my $total = $num_query + $num_where; |
2191 | 2196 | a unique constraint that is not the primary key, or looking for |
2192 | 2197 | related rows. |
2193 | 2198 | |
2194 | If you want objects to be saved immediately, use L</find_or_create> instead. | |
2195 | ||
2196 | B<Note>: C<find_or_new> is probably not what you want when creating a | |
2197 | new row in a table that uses primary keys supplied by the | |
2198 | database. Passing in a primary key column with a value of I<undef> | |
2199 | will cause L</find> to attempt to search for a row with a value of | |
2200 | I<NULL>. | |
2199 | If you want objects to be saved immediately, use L</find_or_create> | |
2200 | instead. | |
2201 | ||
2202 | B<Note>: Take care when using C<find_or_new> with a table having | |
2203 | columns with default values that you intend to be automatically | |
2204 | supplied by the database (e.g. an auto_increment primary key column). | |
2205 | In normal usage, the value of such columns should NOT be included at | |
2206 | all in the call to C<find_or_new>, even when set to C<undef>. | |
2201 | 2207 | |
2202 | 2208 | =cut |
2203 | 2209 | |
2339 | 2345 | the find has completed and before the create has started. To avoid |
2340 | 2346 | this problem, use find_or_create() inside a transaction. |
2341 | 2347 | |
2342 | B<Note>: C<find_or_create> is probably not what you want when creating | |
2343 | a new row in a table that uses primary keys supplied by the | |
2344 | database. Passing in a primary key column with a value of I<undef> | |
2345 | will cause L</find> to attempt to search for a row with a value of | |
2346 | I<NULL>. | |
2348 | B<Note>: Take care when using C<find_or_create> with a table having | |
2349 | columns with default values that you intend to be automatically | |
2350 | supplied by the database (e.g. an auto_increment primary key column). | |
2351 | In normal usage, the value of such columns should NOT be included at | |
2352 | all in the call to C<find_or_create>, even when set to C<undef>. | |
2347 | 2353 | |
2348 | 2354 | See also L</find> and L</update_or_create>. For information on how to declare |
2349 | 2355 | unique constraints, see L<DBIx::Class::ResultSource/add_unique_constraint>. |
2406 | 2412 | See also L</find> and L</find_or_create>. For information on how to declare |
2407 | 2413 | unique constraints, see L<DBIx::Class::ResultSource/add_unique_constraint>. |
2408 | 2414 | |
2409 | B<Note>: C<update_or_create> is probably not what you want when | |
2410 | looking for a row in a table that uses primary keys supplied by the | |
2411 | database, unless you actually have a key value. Passing in a primary | |
2412 | key column with a value of I<undef> will cause L</find> to attempt to | |
2413 | search for a row with a value of I<NULL>. | |
2415 | B<Note>: Take care when using C<update_or_create> with a table having | |
2416 | columns with default values that you intend to be automatically | |
2417 | supplied by the database (e.g. an auto_increment primary key column). | |
2418 | In normal usage, the value of such columns should NOT be included at | |
2419 | all in the call to C<update_or_create>, even when set to C<undef>. | |
2414 | 2420 | |
2415 | 2421 | =cut |
2416 | 2422 | |
2467 | 2473 | $cd->insert; |
2468 | 2474 | } |
2469 | 2475 | |
2470 | See also L</find>, L</find_or_create> and L<find_or_new>. | |
2476 | B<Note>: Take care when using C<update_or_new> with a table having | |
2477 | columns with default values that you intend to be automatically | |
2478 | supplied by the database (e.g. an auto_increment primary key column). | |
2479 | In normal usage, the value of such columns should NOT be included at | |
2480 | all in the call to C<update_or_new>, even when set to C<undef>. | |
2481 | ||
2482 | See also L</find>, L</find_or_create> and L</find_or_new>. | |
2471 | 2483 | |
2472 | 2484 | =cut |
2473 | 2485 | |
2777 | 2789 | |
2778 | 2790 | # build columns (as long as select isn't set) into a set of as/select hashes |
2779 | 2791 | unless ( $attrs->{select} ) { |
2780 | @colbits = map { | |
2781 | ( ref($_) eq 'HASH' ) | |
2782 | ? $_ | |
2783 | : { | |
2784 | ( | |
2785 | /^\Q${alias}.\E(.+)$/ | |
2786 | ? "$1" | |
2787 | : "$_" | |
2788 | ) | |
2789 | => | |
2790 | ( | |
2791 | /\./ | |
2792 | ? "$_" | |
2793 | : "${alias}.$_" | |
2794 | ) | |
2795 | } | |
2796 | } ( ref($attrs->{columns}) eq 'ARRAY' ) ? @{ delete $attrs->{columns}} : (delete $attrs->{columns} || $source->columns ); | |
2797 | } | |
2792 | ||
2793 | my @cols = ( ref($attrs->{columns}) eq 'ARRAY' ) | |
2794 | ? @{ delete $attrs->{columns}} | |
2795 | : ( | |
2796 | ( delete $attrs->{columns} ) | |
2797 | || | |
2798 | $source->columns | |
2799 | ) | |
2800 | ; | |
2801 | ||
2802 | @colbits = map { | |
2803 | ( ref($_) eq 'HASH' ) | |
2804 | ? $_ | |
2805 | : { | |
2806 | ( | |
2807 | /^\Q${alias}.\E(.+)$/ | |
2808 | ? "$1" | |
2809 | : "$_" | |
2810 | ) | |
2811 | => | |
2812 | ( | |
2813 | /\./ | |
2814 | ? "$_" | |
2815 | : "${alias}.$_" | |
2816 | ) | |
2817 | } | |
2818 | } @cols; | |
2819 | } | |
2820 | ||
2798 | 2821 | # add the additional columns on |
2799 | 2822 | foreach ( 'include_columns', '+columns' ) { |
2800 | 2823 | push @colbits, map { |
2891 | 2914 | # generate the distinct induced group_by early, as prefetch will be carried via a |
2892 | 2915 | # subquery (since a group_by is present) |
2893 | 2916 | if (delete $attrs->{distinct}) { |
2894 | $attrs->{group_by} ||= [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ]; | |
2917 | if ($attrs->{group_by}) { | |
2918 | carp ("Useless use of distinct on a grouped resultset ('distinct' is ignored when a 'group_by' is present)"); | |
2919 | } | |
2920 | else { | |
2921 | $attrs->{group_by} = [ grep { !ref($_) || (ref($_) ne 'HASH') } @{$attrs->{select}} ]; | |
2922 | } | |
2895 | 2923 | } |
2896 | 2924 | |
2897 | 2925 | $attrs->{collapse} ||= {}; |
3086 | 3114 | |
3087 | 3115 | sub throw_exception { |
3088 | 3116 | my $self=shift; |
3117 | ||
3089 | 3118 | if (ref $self && $self->_source_handle->schema) { |
3090 | 3119 | $self->_source_handle->schema->throw_exception(@_) |
3091 | } else { | |
3092 | croak(@_); | |
3093 | } | |
3094 | ||
3120 | } | |
3121 | else { | |
3122 | DBIx::Class::Exception->throw(@_); | |
3123 | } | |
3095 | 3124 | } |
3096 | 3125 | |
3097 | 3126 | # XXX: FIXME: Attributes docs need clearing up |
3510 | 3539 | |
3511 | 3540 | =back |
3512 | 3541 | |
3513 | Set to 1 to group by all columns. | |
3542 | Set to 1 to group by all columns. If the resultset already has a group_by | |
3543 | attribute, this setting is ignored and an appropriate warning is issued. | |
3514 | 3544 | |
3515 | 3545 | =head2 where |
3516 | 3546 | |
3544 | 3574 | For more examples of using these attributes, see |
3545 | 3575 | L<DBIx::Class::Manual::Cookbook>. |
3546 | 3576 | |
3547 | =head2 from | |
3548 | ||
3549 | =over 4 | |
3550 | ||
3551 | =item Value: \@from_clause | |
3552 | ||
3553 | =back | |
3554 | ||
3555 | The C<from> attribute gives you manual control over the C<FROM> clause of SQL | |
3556 | statements generated by L<DBIx::Class>, allowing you to express custom C<JOIN> | |
3557 | clauses. | |
3558 | ||
3559 | NOTE: Use this on your own risk. This allows you to shoot off your foot! | |
3560 | ||
3561 | C<join> will usually do what you need and it is strongly recommended that you | |
3562 | avoid using C<from> unless you cannot achieve the desired result using C<join>. | |
3563 | And we really do mean "cannot", not just tried and failed. Attempting to use | |
3564 | this because you're having problems with C<join> is like trying to use x86 | |
3565 | ASM because you've got a syntax error in your C. Trust us on this. | |
3566 | ||
3567 | Now, if you're still really, really sure you need to use this (and if you're | |
3568 | not 100% sure, ask the mailing list first), here's an explanation of how this | |
3569 | works. | |
3570 | ||
3571 | The syntax is as follows - | |
3572 | ||
3573 | [ | |
3574 | { <alias1> => <table1> }, | |
3575 | [ | |
3576 | { <alias2> => <table2>, -join_type => 'inner|left|right' }, | |
3577 | [], # nested JOIN (optional) | |
3578 | { <table1.column1> => <table2.column2>, ... (more conditions) }, | |
3579 | ], | |
3580 | # More of the above [ ] may follow for additional joins | |
3581 | ] | |
3582 | ||
3583 | <table1> <alias1> | |
3584 | JOIN | |
3585 | <table2> <alias2> | |
3586 | [JOIN ...] | |
3587 | ON <table1.column1> = <table2.column2> | |
3588 | <more joins may follow> | |
3589 | ||
3590 | An easy way to follow the examples below is to remember the following: | |
3591 | ||
3592 | Anything inside "[]" is a JOIN | |
3593 | Anything inside "{}" is a condition for the enclosing JOIN | |
3594 | ||
3595 | The following examples utilize a "person" table in a family tree application. | |
3596 | In order to express parent->child relationships, this table is self-joined: | |
3597 | ||
3598 | # Person->belongs_to('father' => 'Person'); | |
3599 | # Person->belongs_to('mother' => 'Person'); | |
3600 | ||
3601 | C<from> can be used to nest joins. Here we return all children with a father, | |
3602 | then search against all mothers of those children: | |
3603 | ||
3604 | $rs = $schema->resultset('Person')->search( | |
3605 | undef, | |
3606 | { | |
3607 | alias => 'mother', # alias columns in accordance with "from" | |
3608 | from => [ | |
3609 | { mother => 'person' }, | |
3610 | [ | |
3611 | [ | |
3612 | { child => 'person' }, | |
3613 | [ | |
3614 | { father => 'person' }, | |
3615 | { 'father.person_id' => 'child.father_id' } | |
3616 | ] | |
3617 | ], | |
3618 | { 'mother.person_id' => 'child.mother_id' } | |
3619 | ], | |
3620 | ] | |
3621 | }, | |
3622 | ); | |
3623 | ||
3624 | # Equivalent SQL: | |
3625 | # SELECT mother.* FROM person mother | |
3626 | # JOIN ( | |
3627 | # person child | |
3628 | # JOIN person father | |
3629 | # ON ( father.person_id = child.father_id ) | |
3630 | # ) | |
3631 | # ON ( mother.person_id = child.mother_id ) | |
3632 | ||
3633 | The type of any join can be controlled manually. To search against only people | |
3634 | with a father in the person table, we could explicitly use C<INNER JOIN>: | |
3635 | ||
3636 | $rs = $schema->resultset('Person')->search( | |
3637 | undef, | |
3638 | { | |
3639 | alias => 'child', # alias columns in accordance with "from" | |
3640 | from => [ | |
3641 | { child => 'person' }, | |
3642 | [ | |
3643 | { father => 'person', -join_type => 'inner' }, | |
3644 | { 'father.id' => 'child.father_id' } | |
3645 | ], | |
3646 | ] | |
3647 | }, | |
3648 | ); | |
3649 | ||
3650 | # Equivalent SQL: | |
3651 | # SELECT child.* FROM person child | |
3652 | # INNER JOIN person father ON child.father_id = father.id | |
3653 | ||
3654 | You can select from a subquery by passing a resultset to from as follows. | |
3655 | ||
3656 | $schema->resultset('Artist')->search( | |
3657 | undef, | |
3658 | { alias => 'artist2', | |
3659 | from => [ { artist2 => $artist_rs->as_query } ], | |
3660 | } ); | |
3661 | ||
3662 | # and you'll get sql like this.. | |
3663 | # SELECT artist2.artistid, artist2.name, artist2.rank, artist2.charfield FROM | |
3664 | # ( SELECT me.artistid, me.name, me.rank, me.charfield FROM artists me ) artist2 | |
3665 | ||
3666 | If you need to express really complex joins, you | |
3667 | can supply literal SQL to C<from> via a scalar reference. In this case | |
3668 | the contents of the scalar will replace the table name associated with the | |
3669 | resultsource. | |
3670 | ||
3671 | WARNING: This technique might very well not work as expected on chained | |
3672 | searches - you have been warned. | |
3673 | ||
3674 | # Assuming the Event resultsource is defined as: | |
3675 | ||
3676 | MySchema::Event->add_columns ( | |
3677 | sequence => { | |
3678 | data_type => 'INT', | |
3679 | is_auto_increment => 1, | |
3680 | }, | |
3681 | location => { | |
3682 | data_type => 'INT', | |
3683 | }, | |
3684 | type => { | |
3685 | data_type => 'INT', | |
3686 | }, | |
3687 | ); | |
3688 | MySchema::Event->set_primary_key ('sequence'); | |
3689 | ||
3690 | # This will get back the latest event for every location. The column | |
3691 | # selector is still provided by DBIC, all we do is add a JOIN/WHERE | |
3692 | # combo to limit the resultset | |
3693 | ||
3694 | $rs = $schema->resultset('Event'); | |
3695 | $table = $rs->result_source->name; | |
3696 | $latest = $rs->search ( | |
3697 | undef, | |
3698 | { from => \ " | |
3699 | (SELECT e1.* FROM $table e1 | |
3700 | JOIN $table e2 | |
3701 | ON e1.location = e2.location | |
3702 | AND e1.sequence < e2.sequence | |
3703 | WHERE e2.sequence is NULL | |
3704 | ) me", | |
3705 | }, | |
3706 | ); | |
3707 | ||
3708 | # Equivalent SQL (with the DBIC chunks added): | |
3709 | ||
3710 | SELECT me.sequence, me.location, me.type FROM | |
3711 | (SELECT e1.* FROM events e1 | |
3712 | JOIN events e2 | |
3713 | ON e1.location = e2.location | |
3714 | AND e1.sequence < e2.sequence | |
3715 | WHERE e2.sequence is NULL | |
3716 | ) me; | |
3717 | ||
3718 | 3577 | =head2 for |
3719 | 3578 | |
3720 | 3579 | =over 4 |
0 | 0 | package DBIx::Class::ResultSetColumn; |
1 | ||
1 | 2 | use strict; |
2 | 3 | use warnings; |
4 | ||
3 | 5 | use base 'DBIx::Class'; |
6 | ||
7 | use Carp::Clan qw/^DBIx::Class/; | |
8 | use DBIx::Class::Exception; | |
4 | 9 | use List::Util; |
5 | 10 | |
6 | 11 | =head1 NAME |
60 | 65 | my $select = defined $as_index ? $select_list->[$as_index] : $column; |
61 | 66 | |
62 | 67 | # {collapse} would mean a has_many join was injected, which in turn means |
63 | # we need to group IF WE CAN (only if the column in question is unique) | |
68 | # we need to group *IF WE CAN* (only if the column in question is unique) | |
64 | 69 | if (!$new_attrs->{group_by} && keys %{$orig_attrs->{collapse}}) { |
65 | 70 | |
66 | 71 | # scan for a constraint that would contain our column only - that'd be proof |
75 | 80 | |
76 | 81 | if ($col eq $select or $fqcol eq $select) { |
77 | 82 | $new_attrs->{group_by} = [ $select ]; |
83 | delete $new_attrs->{distinct}; # it is ignored when group_by is present | |
78 | 84 | last; |
79 | 85 | } |
80 | 86 | } |
87 | ||
88 | if (!$new_attrs->{group_by}) { | |
89 | carp ( | |
90 | "Attempting to retrieve non-unique column '$column' on a resultset containing " | |
91 | . 'one-to-many joins will return duplicate results.' | |
92 | ); | |
93 | } | |
81 | 94 | } |
82 | 95 | |
83 | 96 | my $new = bless { _select => $select, _as => $column, _parent_resultset => $new_parent_rs }, $class; |
124 | 137 | |
125 | 138 | sub next { |
126 | 139 | my $self = shift; |
140 | ||
141 | # using cursor so we don't inflate anything | |
127 | 142 | my ($row) = $self->_resultset->cursor->next; |
143 | ||
128 | 144 | return $row; |
129 | 145 | } |
130 | 146 | |
148 | 164 | |
149 | 165 | sub all { |
150 | 166 | my $self = shift; |
167 | ||
168 | # using cursor so we don't inflate anything | |
151 | 169 | return map { $_->[0] } $self->_resultset->cursor->all; |
152 | 170 | } |
153 | 171 | |
193 | 211 | |
194 | 212 | sub first { |
195 | 213 | my $self = shift; |
196 | my ($row) = $self->_resultset->cursor->reset->next; | |
214 | ||
215 | # using cursor so we don't inflate anything | |
216 | $self->_resultset->cursor->reset; | |
217 | my ($row) = $self->_resultset->cursor->next; | |
218 | ||
219 | return $row; | |
220 | } | |
221 | ||
222 | =head2 single | |
223 | ||
224 | =over 4 | |
225 | ||
226 | =item Arguments: none | |
227 | ||
228 | =item Return Value: $value | |
229 | ||
230 | =back | |
231 | ||
232 | Much like L<DBIx::Class::ResultSet/single> fetches one and only one column | |
233 | value using the cursor directly. If additional rows are present a warning | |
234 | is issued before discarding the cursor. | |
235 | ||
236 | =cut | |
237 | ||
238 | sub single { | |
239 | my $self = shift; | |
240 | ||
241 | my $attrs = $self->_resultset->_resolved_attrs; | |
242 | my ($row) = $self->_resultset->result_source->storage->select_single( | |
243 | $attrs->{from}, $attrs->{select}, $attrs->{where}, $attrs | |
244 | ); | |
245 | ||
197 | 246 | return $row; |
198 | 247 | } |
199 | 248 | |
377 | 426 | |
378 | 427 | sub throw_exception { |
379 | 428 | my $self=shift; |
429 | ||
380 | 430 | if (ref $self && $self->{_parent_resultset}) { |
381 | $self->{_parent_resultset}->throw_exception(@_) | |
382 | } else { | |
383 | croak(@_); | |
431 | $self->{_parent_resultset}->throw_exception(@_); | |
432 | } | |
433 | else { | |
434 | DBIx::Class::Exception->throw(@_); | |
384 | 435 | } |
385 | 436 | } |
386 | 437 | |
394 | 445 | # |
395 | 446 | # Returns the underlying resultset. Creates it from the parent resultset if |
396 | 447 | # necessary. |
397 | # | |
448 | # | |
398 | 449 | sub _resultset { |
399 | 450 | my $self = shift; |
400 | 451 |
4 | 4 | |
5 | 5 | use DBIx::Class::ResultSet; |
6 | 6 | use DBIx::Class::ResultSourceHandle; |
7 | ||
8 | use DBIx::Class::Exception; | |
7 | 9 | use Carp::Clan qw/^DBIx::Class/; |
8 | 10 | |
9 | 11 | use base qw/DBIx::Class/; |
1193 | 1195 | |
1194 | 1196 | # Returns the {from} structure used to express JOIN conditions |
1195 | 1197 | sub _resolve_join { |
1196 | my ($self, $join, $alias, $seen, $jpath, $force_left) = @_; | |
1198 | my ($self, $join, $alias, $seen, $jpath, $parent_force_left) = @_; | |
1197 | 1199 | |
1198 | 1200 | # we need a supplied one, because we do in-place modifications, no returns |
1199 | 1201 | $self->throw_exception ('You must supply a seen hashref as the 3rd argument to _resolve_join') |
1204 | 1206 | |
1205 | 1207 | $jpath = [@$jpath]; |
1206 | 1208 | |
1207 | if (ref $join eq 'ARRAY') { | |
1209 | if (not defined $join) { | |
1210 | return (); | |
1211 | } | |
1212 | elsif (ref $join eq 'ARRAY') { | |
1208 | 1213 | return |
1209 | 1214 | map { |
1210 | $self->_resolve_join($_, $alias, $seen, $jpath, $force_left); | |
1215 | $self->_resolve_join($_, $alias, $seen, $jpath, $parent_force_left); | |
1211 | 1216 | } @$join; |
1212 | } elsif (ref $join eq 'HASH') { | |
1213 | return | |
1214 | map { | |
1215 | my $as = ($seen->{$_} ? join ('_', $_, $seen->{$_} + 1) : $_); # the actual seen value will be incremented below | |
1216 | local $force_left->{force} = $force_left->{force}; | |
1217 | ( | |
1218 | $self->_resolve_join($_, $alias, $seen, [@$jpath], $force_left), | |
1219 | $self->related_source($_)->_resolve_join( | |
1220 | $join->{$_}, $as, $seen, [@$jpath, $_], $force_left | |
1221 | ) | |
1222 | ); | |
1223 | } keys %$join; | |
1224 | } elsif (ref $join) { | |
1217 | } | |
1218 | elsif (ref $join eq 'HASH') { | |
1219 | ||
1220 | my @ret; | |
1221 | for my $rel (keys %$join) { | |
1222 | ||
1223 | my $rel_info = $self->relationship_info($rel) | |
1224 | or $self->throw_exception("No such relationship ${rel}"); | |
1225 | ||
1226 | my $force_left = $parent_force_left; | |
1227 | $force_left ||= lc($rel_info->{attrs}{join_type}||'') eq 'left'; | |
1228 | ||
1229 | # the actual seen value will be incremented by the recursion | |
1230 | my $as = ($seen->{$rel} ? join ('_', $rel, $seen->{$rel} + 1) : $rel); | |
1231 | ||
1232 | push @ret, ( | |
1233 | $self->_resolve_join($rel, $alias, $seen, [@$jpath], $force_left), | |
1234 | $self->related_source($rel)->_resolve_join( | |
1235 | $join->{$rel}, $as, $seen, [@$jpath, $rel], $force_left | |
1236 | ) | |
1237 | ); | |
1238 | } | |
1239 | return @ret; | |
1240 | ||
1241 | } | |
1242 | elsif (ref $join) { | |
1225 | 1243 | $self->throw_exception("No idea how to resolve join reftype ".ref $join); |
1226 | } else { | |
1227 | ||
1228 | return() unless defined $join; | |
1229 | ||
1244 | } | |
1245 | else { | |
1230 | 1246 | my $count = ++$seen->{$join}; |
1231 | 1247 | my $as = ($count > 1 ? "${join}_${count}" : $join); |
1232 | 1248 | |
1233 | my $rel_info = $self->relationship_info($join); | |
1234 | $self->throw_exception("No such relationship ${join}") unless $rel_info; | |
1235 | my $type; | |
1236 | if ($force_left) { | |
1237 | $type = 'left'; | |
1238 | } | |
1239 | else { | |
1240 | $type = $rel_info->{attrs}{join_type}; | |
1241 | $force_left = 1 if lc($type||'') eq 'left'; | |
1242 | } | |
1249 | my $rel_info = $self->relationship_info($join) | |
1250 | or $self->throw_exception("No such relationship ${join}"); | |
1243 | 1251 | |
1244 | 1252 | my $rel_src = $self->related_source($join); |
1245 | 1253 | return [ { $as => $rel_src->from, |
1246 | 1254 | -source_handle => $rel_src->handle, |
1247 | -join_type => $type, | |
1255 | -join_type => $parent_force_left | |
1256 | ? 'left' | |
1257 | : $rel_info->{attrs}{join_type} | |
1258 | , | |
1248 | 1259 | -join_path => [@$jpath, $join], |
1249 | 1260 | -alias => $as, |
1250 | 1261 | -relation_chain_depth => $seen->{-relation_chain_depth} || 0, |
1321 | 1332 | #warn "$self $k $for $v"; |
1322 | 1333 | unless ($for->has_column_loaded($v)) { |
1323 | 1334 | if ($for->in_storage) { |
1324 | $self->throw_exception( | |
1325 | "Column ${v} not loaded or not passed to new() prior to insert()" | |
1326 | ." on ${for} trying to resolve relationship (maybe you forgot " | |
1327 | ."to call ->discard_changes to get defaults from the db)" | |
1335 | $self->throw_exception(sprintf | |
1336 | 'Unable to resolve relationship from %s to %s: column %s.%s not ' | |
1337 | . 'loaded from storage (or not passed to new() prior to insert()). ' | |
1338 | . 'Maybe you forgot to call ->discard_changes to get defaults from the db.', | |
1339 | ||
1340 | $for->result_source->source_name, | |
1341 | $as, | |
1342 | $as, $v, | |
1328 | 1343 | ); |
1329 | 1344 | } |
1330 | 1345 | return $UNRESOLVABLE_CONDITION; |
1434 | 1449 | my ($self, $pre, $alias, $alias_map, $order, $collapse, $pref_path) = @_; |
1435 | 1450 | $pref_path ||= []; |
1436 | 1451 | |
1437 | if( ref $pre eq 'ARRAY' ) { | |
1452 | if (not defined $pre) { | |
1453 | return (); | |
1454 | } | |
1455 | elsif( ref $pre eq 'ARRAY' ) { | |
1438 | 1456 | return |
1439 | 1457 | map { $self->_resolve_prefetch( $_, $alias, $alias_map, $order, $collapse, [ @$pref_path ] ) } |
1440 | 1458 | @$pre; |
1457 | 1475 | $p = $p->{$_} for (@$pref_path, $pre); |
1458 | 1476 | |
1459 | 1477 | $self->throw_exception ( |
1460 | "Unable to resolve prefetch $pre - join alias map does not contain an entry for path: " | |
1478 | "Unable to resolve prefetch '$pre' - join alias map does not contain an entry for path: " | |
1461 | 1479 | . join (' -> ', @$pref_path, $pre) |
1462 | 1480 | ) if (ref $p->{-join_aliases} ne 'ARRAY' or not @{$p->{-join_aliases}} ); |
1463 | 1481 | |
1574 | 1592 | |
1575 | 1593 | sub throw_exception { |
1576 | 1594 | my $self = shift; |
1595 | ||
1577 | 1596 | if (defined $self->schema) { |
1578 | 1597 | $self->schema->throw_exception(@_); |
1579 | } else { | |
1580 | croak(@_); | |
1598 | } | |
1599 | else { | |
1600 | DBIx::Class::Exception->throw(@_); | |
1581 | 1601 | } |
1582 | 1602 | } |
1583 | 1603 |
105 | 105 | $self->{schema} = $rs->schema if $rs; |
106 | 106 | } |
107 | 107 | |
108 | carp "Unable to restore schema" unless $self->{schema}; | |
108 | carp "Unable to restore schema. Look at 'freeze' and 'thaw' methods in DBIx::Class::Schema." | |
109 | unless $self->{schema}; | |
109 | 110 | } |
110 | 111 | |
111 | 112 | =head1 AUTHOR |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base qw/DBIx::Class/; |
6 | use Carp::Clan qw/^DBIx::Class/; | |
6 | ||
7 | use DBIx::Class::Exception; | |
7 | 8 | use Scalar::Util (); |
8 | use Scope::Guard; | |
9 | 9 | |
10 | 10 | ### |
11 | 11 | ### Internal method |
167 | 167 | foreach my $key (keys %$attrs) { |
168 | 168 | if (ref $attrs->{$key}) { |
169 | 169 | ## Can we extract this lot to use with update(_or .. ) ? |
170 | confess "Can't do multi-create without result source" unless $source; | |
170 | $new->throw_exception("Can't do multi-create without result source") | |
171 | unless $source; | |
171 | 172 | my $info = $source->relationship_info($key); |
172 | 173 | if ($info && $info->{attrs}{accessor} |
173 | 174 | && $info->{attrs}{accessor} eq 'single') |
1329 | 1330 | |
1330 | 1331 | sub throw_exception { |
1331 | 1332 | my $self=shift; |
1333 | ||
1332 | 1334 | if (ref $self && ref $self->result_source && $self->result_source->schema) { |
1333 | $self->result_source->schema->throw_exception(@_); | |
1334 | } else { | |
1335 | croak(@_); | |
1335 | $self->result_source->schema->throw_exception(@_) | |
1336 | } | |
1337 | else { | |
1338 | DBIx::Class::Exception->throw(@_); | |
1336 | 1339 | } |
1337 | 1340 | } |
1338 | 1341 |
519 | 519 | return; |
520 | 520 | } |
521 | 521 | |
522 | $self->throw_exception($self->_sqlt_version_error) | |
523 | if (not $self->_sqlt_version_ok); | |
524 | ||
525 | my $db_tr = SQL::Translator->new({ | |
526 | add_drop_table => 1, | |
522 | $self->throw_exception($self->storage->_sqlt_version_error) | |
523 | if (not $self->storage->_sqlt_version_ok); | |
524 | ||
525 | my $db_tr = SQL::Translator->new({ | |
526 | add_drop_table => 1, | |
527 | 527 | parser => 'DBI', |
528 | 528 | parser_args => { dbh => $self->storage->dbh } |
529 | 529 | }); |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base qw/DBIx::Class::Cursor/; |
6 | ||
7 | __PACKAGE__->mk_group_accessors('simple' => | |
8 | qw/sth/ | |
9 | ); | |
6 | 10 | |
7 | 11 | =head1 NAME |
8 | 12 | |
72 | 76 | && $self->{attrs}{rows} |
73 | 77 | && $self->{pos} >= $self->{attrs}{rows} |
74 | 78 | ) { |
75 | $self->{sth}->finish if $self->{sth}->{Active}; | |
76 | delete $self->{sth}; | |
79 | $self->sth->finish if $self->sth->{Active}; | |
80 | $self->sth(undef); | |
77 | 81 | $self->{done} = 1; |
78 | 82 | } |
79 | 83 | return if $self->{done}; |
80 | unless ($self->{sth}) { | |
81 | $self->{sth} = ($storage->_select(@{$self->{args}}))[1]; | |
84 | unless ($self->sth) { | |
85 | $self->sth(($storage->_select(@{$self->{args}}))[1]); | |
82 | 86 | if ($self->{attrs}{software_limit}) { |
83 | 87 | if (my $offset = $self->{attrs}{offset}) { |
84 | $self->{sth}->fetch for 1 .. $offset; | |
88 | $self->sth->fetch for 1 .. $offset; | |
85 | 89 | } |
86 | 90 | } |
87 | 91 | } |
88 | my @row = $self->{sth}->fetchrow_array; | |
92 | my @row = $self->sth->fetchrow_array; | |
89 | 93 | if (@row) { |
90 | 94 | $self->{pos}++; |
91 | 95 | } else { |
92 | delete $self->{sth}; | |
96 | $self->sth(undef); | |
93 | 97 | $self->{done} = 1; |
94 | 98 | } |
95 | 99 | return @row; |
119 | 123 | my ($storage, $dbh, $self) = @_; |
120 | 124 | |
121 | 125 | $self->_check_dbh_gen; |
122 | $self->{sth}->finish if $self->{sth}->{Active}; | |
123 | delete $self->{sth}; | |
126 | $self->sth->finish if $self->sth && $self->sth->{Active}; | |
127 | $self->sth(undef); | |
124 | 128 | my ($rv, $sth) = $storage->_select(@{$self->{args}}); |
125 | 129 | return @{$sth->fetchall_arrayref}; |
126 | 130 | } |
145 | 149 | my ($self) = @_; |
146 | 150 | |
147 | 151 | # No need to care about failures here |
148 | eval { $self->{sth}->finish if $self->{sth} && $self->{sth}->{Active} }; | |
152 | eval { $self->sth->finish if $self->sth && $self->sth->{Active} }; | |
149 | 153 | $self->_soft_reset; |
154 | return undef; | |
150 | 155 | } |
151 | 156 | |
152 | 157 | sub _soft_reset { |
153 | 158 | my ($self) = @_; |
154 | 159 | |
155 | delete $self->{sth}; | |
160 | $self->sth(undef); | |
156 | 161 | delete $self->{done}; |
157 | 162 | $self->{pos} = 0; |
158 | return $self; | |
159 | 163 | } |
160 | 164 | |
161 | 165 | sub _check_dbh_gen { |
172 | 176 | |
173 | 177 | # None of the reasons this would die matter if we're in DESTROY anyways |
174 | 178 | local $@; |
175 | eval { $self->{sth}->finish if $self->{sth} && $self->{sth}->{Active} }; | |
179 | eval { $self->sth->finish if $self->sth && $self->sth->{Active} }; | |
176 | 180 | } |
177 | 181 | |
178 | 182 | 1; |
31 | 31 | } |
32 | 32 | } |
33 | 33 | |
34 | sub _unset_identity_insert { | |
35 | my ($self, $table) = @_; | |
36 | ||
37 | my $sql = sprintf ( | |
38 | 'SET IDENTITY_INSERT %s OFF', | |
39 | $self->sql_maker->_quote ($table), | |
40 | ); | |
41 | ||
42 | my $dbh = $self->_get_dbh; | |
43 | $dbh->do ($sql); | |
44 | } | |
45 | ||
34 | 46 | sub insert_bulk { |
35 | 47 | my $self = shift; |
36 | 48 | my ($source, $cols, $data) = @_; |
37 | 49 | |
38 | if (List::Util::first | |
50 | my $is_identity_insert = (List::Util::first | |
39 | 51 | { $source->column_info ($_)->{is_auto_increment} } |
40 | 52 | (@{$cols}) |
41 | ) { | |
42 | $self->_set_identity_insert ($source->name); | |
53 | ) | |
54 | ? 1 | |
55 | : 0; | |
56 | ||
57 | if ($is_identity_insert) { | |
58 | $self->_set_identity_insert ($source->name); | |
43 | 59 | } |
44 | 60 | |
45 | 61 | $self->next::method(@_); |
62 | ||
63 | if ($is_identity_insert) { | |
64 | $self->_unset_identity_insert ($source->name); | |
65 | } | |
46 | 66 | } |
47 | 67 | |
48 | 68 | # support MSSQL GUID column types |
82 | 102 | $updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid; |
83 | 103 | } |
84 | 104 | |
85 | if (List::Util::first { $_->{is_auto_increment} } (values %$supplied_col_info) ) { | |
86 | $self->_set_identity_insert ($source->name); | |
105 | my $is_identity_insert = (List::Util::first { $_->{is_auto_increment} } (values %$supplied_col_info) ) | |
106 | ? 1 | |
107 | : 0; | |
108 | ||
109 | if ($is_identity_insert) { | |
110 | $self->_set_identity_insert ($source->name); | |
87 | 111 | } |
88 | 112 | |
89 | 113 | $updated_cols = { %$updated_cols, %{ $self->next::method(@_) } }; |
114 | ||
115 | if ($is_identity_insert) { | |
116 | $self->_unset_identity_insert ($source->name); | |
117 | } | |
118 | ||
90 | 119 | |
91 | 120 | return $updated_cols; |
92 | 121 | } |
39 | 39 | sub _prep_for_execute { |
40 | 40 | my $self = shift; |
41 | 41 | |
42 | my ($op, $extra_bind, $ident) = @_; | |
43 | ||
44 | 42 | my ($sql, $bind) = $self->next::method(@_); |
45 | 43 | |
46 | # stringify args, quote via $dbh, and manually insert | |
44 | # stringify bind args, quote via $dbh, and manually insert | |
45 | #my ($op, $extra_bind, $ident, $args) = @_; | |
46 | my $ident = $_[2]; | |
47 | 47 | |
48 | 48 | my @sql_part = split /\?/, $sql; |
49 | 49 | my $new_sql; |
50 | 50 | |
51 | my $col_info = $self->_resolve_column_info($ident, [ map $_->[0], @$bind ]); | |
52 | ||
51 | 53 | foreach my $bound (@$bind) { |
52 | 54 | my $col = shift @$bound; |
53 | my $datatype = 'FIXME!!!'; | |
55 | ||
56 | my $datatype = $col_info->{$col}{data_type}; | |
57 | ||
54 | 58 | foreach my $data (@$bound) { |
55 | if(ref $data) { | |
56 | $data = ''.$data; | |
57 | } | |
58 | $data = $self->_dbh->quote($data); | |
59 | $new_sql .= shift(@sql_part) . $data; | |
59 | $data = ''.$data if ref $data; | |
60 | ||
61 | $data = $self->_prep_interpolated_value($datatype, $data) | |
62 | if $datatype; | |
63 | ||
64 | $data = $self->_dbh->quote($data) | |
65 | unless $self->interpolate_unquoted($datatype, $data); | |
66 | ||
67 | $new_sql .= shift(@sql_part) . $data; | |
60 | 68 | } |
61 | 69 | } |
62 | 70 | $new_sql .= join '', @sql_part; |
64 | 72 | return ($new_sql, []); |
65 | 73 | } |
66 | 74 | |
75 | =head2 interpolate_unquoted | |
76 | ||
77 | This method is called by L</_prep_for_execute> for every column in | |
78 | order to determine if its value should be quoted or not. The arguments | |
79 | are the current column data type and the actual bind value. The return | |
80 | value is interpreted as: true - do not quote, false - do quote. You should | |
81 | override this in you Storage::DBI::<database> subclass, if your RDBMS | |
82 | does not like quotes around certain datatypes (e.g. Sybase and integer | |
83 | columns). The default method always returns false (do quote). | |
84 | ||
85 | WARNING!!! | |
86 | ||
87 | Always validate that the bind-value is valid for the current datatype. | |
88 | Otherwise you may very well open the door to SQL injection attacks. | |
89 | ||
90 | =cut | |
91 | ||
92 | sub interpolate_unquoted { | |
93 | #my ($self, $datatype, $value) = @_; | |
94 | return 0; | |
95 | } | |
96 | ||
97 | =head2 _prep_interpolated_value | |
98 | ||
99 | Given a datatype and the value to be inserted directly into a SQL query, returns | |
100 | the necessary string to represent that value (by e.g. adding a '$' sign) | |
101 | ||
102 | =cut | |
103 | ||
104 | sub _prep_interpolated_value { | |
105 | #my ($self, $datatype, $value) = @_; | |
106 | return $_[2]; | |
107 | } | |
108 | ||
67 | 109 | =head1 AUTHORS |
68 | 110 | |
69 | Brandon Black <blblack@gmail.com> | |
70 | ||
71 | Trym Skaar <trym@tryms.no> | |
111 | See L<DBIx::Class/CONTRIBUTORS> | |
72 | 112 | |
73 | 113 | =head1 LICENSE |
74 | 114 |
4 | 4 | use base qw/DBIx::Class::Storage::DBI::MSSQL/; |
5 | 5 | use mro 'c3'; |
6 | 6 | |
7 | use Carp::Clan qw/^DBIx::Class/; | |
8 | 7 | use List::Util(); |
9 | 8 | use Scalar::Util (); |
10 | 9 | |
61 | 60 | my $self = shift; |
62 | 61 | |
63 | 62 | if (ref($self->_dbi_connect_info->[0]) eq 'CODE') { |
64 | croak 'cannot set DBI attributes on a CODE ref connect_info'; | |
63 | $self->throw_exception ('cannot set DBI attributes on a CODE ref connect_info'); | |
65 | 64 | } |
66 | 65 | |
67 | 66 | my $dbi_attrs = $self->_dbi_connect_info->[-1]; |
90 | 89 | $dbh->do('SELECT @@IDENTITY'); |
91 | 90 | }; |
92 | 91 | if ($@) { |
93 | croak <<'EOF'; | |
92 | $self->throw_exception (<<'EOF'); | |
94 | 93 | |
95 | 94 | Your drivers do not seem to support dynamic cursors (odbc_cursortype => 2), |
96 | 95 | if you're using FreeTDS, make sure to set tds_version to 8.0 or greater. |
101 | 100 | $self->_identity_method('@@identity'); |
102 | 101 | } |
103 | 102 | |
104 | sub _rebless { | |
105 | no warnings 'uninitialized'; | |
103 | sub _init { | |
106 | 104 | my $self = shift; |
107 | 105 | |
108 | if (ref($self->_dbi_connect_info->[0]) ne 'CODE' && | |
109 | eval { $self->_dbi_connect_info->[-1]{odbc_cursortype} } == 2) { | |
106 | no warnings qw/uninitialized/; | |
107 | ||
108 | if ( | |
109 | ref($self->_dbi_connect_info->[0]) ne 'CODE' | |
110 | && | |
111 | ref ($self->_dbi_connect_info->[-1]) eq 'HASH' | |
112 | && | |
113 | $self->_dbi_connect_info->[-1]{odbc_cursortype} == 2 | |
114 | ) { | |
110 | 115 | $self->_set_dynamic_cursors; |
111 | 116 | return; |
112 | 117 | } |
158 | 163 | my $dsn = $self->_dbi_connect_info->[0]; |
159 | 164 | |
160 | 165 | if (ref($dsn) eq 'CODE') { |
161 | croak 'cannot change the DBI DSN on a CODE ref connect_info'; | |
166 | $self->throw_exception('cannot change the DBI DSN on a CODE ref connect_info'); | |
162 | 167 | } |
163 | 168 | |
164 | 169 | if ($dsn !~ /MARS_Connection=/) { |
205 | 205 | "alter session set nls_timestamp_tz_format='$timestamp_tz_format'"); |
206 | 206 | } |
207 | 207 | |
208 | sub _svp_begin { | |
209 | my ($self, $name) = @_; | |
210 | ||
211 | $self->_get_dbh->do("SAVEPOINT $name"); | |
212 | } | |
213 | ||
214 | 208 | =head2 source_bind_attributes |
215 | 209 | |
216 | 210 | Handle LOB types in Oracle. Under a certain size (4k?), you can get away |
255 | 249 | return \%bind_attributes; |
256 | 250 | } |
257 | 251 | |
252 | sub _svp_begin { | |
253 | my ($self, $name) = @_; | |
254 | ||
255 | $self->_get_dbh->do("SAVEPOINT $name"); | |
256 | } | |
257 | ||
258 | 258 | # Oracle automatically releases a savepoint when you start another one with the |
259 | 259 | # same name. |
260 | 260 | sub _svp_release { 1 } |
18 | 18 | ? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins' |
19 | 19 | : 'DBIx::Class::Storage::DBI::Oracle::Generic'; |
20 | 20 | |
21 | # Load and rebless | |
22 | eval "require $class"; | |
23 | ||
24 | bless $self, $class unless $@; | |
21 | $self->ensure_class_loaded ($class); | |
22 | bless $self, $class; | |
25 | 23 | } |
26 | 24 | } |
27 | 25 |
19 | 19 | } |
20 | 20 | |
21 | 21 | sub last_insert_id { |
22 | my ($self,$source,$col) = @_; | |
23 | my $seq = ( $source->column_info($col)->{sequence} ||= $self->dbh_do('_dbh_get_autoinc_seq', $source, $col) ) | |
22 | my ($self,$source,@cols) = @_; | |
23 | ||
24 | my @values; | |
25 | ||
26 | for my $col (@cols) { | |
27 | my $seq = ( $source->column_info($col)->{sequence} ||= $self->dbh_do('_dbh_get_autoinc_seq', $source, $col) ) | |
24 | 28 | or $self->throw_exception( "could not determine sequence for " |
25 | 29 | . $source->name |
26 | 30 | . ".$col, please consider adding a " |
27 | 31 | . "schema-qualified sequence to its column info" |
28 | 32 | ); |
29 | 33 | |
30 | $self->_dbh_last_insert_id ($self->_dbh, $seq); | |
34 | push @values, $self->_dbh_last_insert_id ($self->_dbh, $seq); | |
35 | } | |
36 | ||
37 | return @values; | |
31 | 38 | } |
32 | 39 | |
33 | 40 | # there seems to be absolutely no reason to have this as a separate method, |
16 | 16 | my @didnt_load; |
17 | 17 | |
18 | 18 | for my $module (keys %replication_required) { |
19 | eval "use $module $replication_required{$module}"; | |
20 | push @didnt_load, "$module $replication_required{$module}" | |
21 | if $@; | |
19 | eval "use $module $replication_required{$module}"; | |
20 | push @didnt_load, "$module $replication_required{$module}" | |
21 | if $@; | |
22 | 22 | } |
23 | 23 | |
24 | 24 | croak("@{[ join ', ', @didnt_load ]} are missing and are required for Replication") |
32 | 32 | use DBIx::Class::Storage::DBI::Replicated::Types qw/BalancerClassNamePart DBICSchema DBICStorageDBI/; |
33 | 33 | use MooseX::Types::Moose qw/ClassName HashRef Object/; |
34 | 34 | use Scalar::Util 'reftype'; |
35 | use Carp::Clan qw/^DBIx::Class/; | |
36 | 35 | use Hash::Merge 'merge'; |
37 | 36 | |
38 | 37 | use namespace::clean -except => 'meta'; |
221 | 220 | isa=>'DBIx::Class::Storage::DBI::Replicated::Pool', |
222 | 221 | lazy_build=>1, |
223 | 222 | handles=>[qw/ |
224 | connect_replicants | |
223 | connect_replicants | |
225 | 224 | replicants |
226 | 225 | has_replicants |
227 | 226 | /], |
276 | 275 | select |
277 | 276 | select_single |
278 | 277 | columns_info_for |
279 | /], | |
278 | /], | |
280 | 279 | ); |
281 | 280 | |
282 | 281 | =head2 write_handler |
289 | 288 | is=>'ro', |
290 | 289 | isa=>Object, |
291 | 290 | lazy_build=>1, |
292 | handles=>[qw/ | |
291 | handles=>[qw/ | |
293 | 292 | on_connect_do |
294 | on_disconnect_do | |
293 | on_disconnect_do | |
295 | 294 | connect_info |
296 | 295 | throw_exception |
297 | 296 | sql_maker |
299 | 298 | create_ddl_dir |
300 | 299 | deployment_statements |
301 | 300 | datetime_parser |
302 | datetime_parser_type | |
303 | build_datetime_parser | |
301 | datetime_parser_type | |
302 | build_datetime_parser | |
304 | 303 | last_insert_id |
305 | 304 | insert |
306 | 305 | insert_bulk |
315 | 314 | sth |
316 | 315 | deploy |
317 | 316 | with_deferred_fk_checks |
318 | dbh_do | |
317 | dbh_do | |
319 | 318 | reload_row |
320 | with_deferred_fk_checks | |
319 | with_deferred_fk_checks | |
321 | 320 | _prep_for_execute |
322 | 321 | |
323 | backup | |
324 | is_datatype_numeric | |
325 | _count_select | |
326 | _subq_count_select | |
327 | _subq_update_delete | |
328 | svp_rollback | |
329 | svp_begin | |
330 | svp_release | |
322 | backup | |
323 | is_datatype_numeric | |
324 | _count_select | |
325 | _subq_count_select | |
326 | _subq_update_delete | |
327 | svp_rollback | |
328 | svp_begin | |
329 | svp_release | |
331 | 330 | /], |
332 | 331 | ); |
333 | 332 | |
363 | 362 | ); |
364 | 363 | |
365 | 364 | $self->pool($self->_build_pool) |
366 | if $self->pool; | |
365 | if $self->pool; | |
367 | 366 | } |
368 | 367 | |
369 | 368 | if (@opts{qw/balancer_type balancer_args/}) { |
375 | 374 | ); |
376 | 375 | |
377 | 376 | $self->balancer($self->_build_balancer) |
378 | if $self->balancer; | |
377 | if $self->balancer; | |
379 | 378 | } |
380 | 379 | |
381 | 380 | $self->_master_connect_info_opts(\%opts); |
412 | 411 | my ($class, $schema, $storage_type_args, @args) = @_; |
413 | 412 | |
414 | 413 | return { |
415 | schema=>$schema, | |
416 | %$storage_type_args, | |
417 | @args | |
414 | schema=>$schema, | |
415 | %$storage_type_args, | |
416 | @args | |
418 | 417 | } |
419 | 418 | } |
420 | 419 | |
451 | 450 | sub _build_balancer { |
452 | 451 | my $self = shift @_; |
453 | 452 | $self->create_balancer( |
454 | pool=>$self->pool, | |
453 | pool=>$self->pool, | |
455 | 454 | master=>$self->master, |
456 | 455 | %{$self->balancer_args}, |
457 | 456 | ); |
493 | 492 | for my $r (@args) { |
494 | 493 | $r = [ $r ] unless reftype $r eq 'ARRAY'; |
495 | 494 | |
496 | croak "coderef replicant connect_info not supported" | |
495 | $self->throw_exception('coderef replicant connect_info not supported') | |
497 | 496 | if ref $r->[0] && reftype $r->[0] eq 'CODE'; |
498 | 497 | |
499 | 498 | # any connect_info options? |
500 | 499 | my $i = 0; |
501 | 500 | $i++ while $i < @$r && (reftype($r->[$i])||'') ne 'HASH'; |
502 | 501 | |
503 | # make one if none | |
502 | # make one if none | |
504 | 503 | $r->[$i] = {} unless $r->[$i]; |
505 | 504 | |
506 | 505 | # merge if two hashes |
507 | 506 | my @hashes = @$r[$i .. $#{$r}]; |
508 | 507 | |
509 | croak "invalid connect_info options" | |
508 | $self->throw_exception('invalid connect_info options') | |
510 | 509 | if (grep { reftype($_) eq 'HASH' } @hashes) != @hashes; |
511 | 510 | |
512 | croak "too many hashrefs in connect_info" | |
511 | $self->throw_exception('too many hashrefs in connect_info') | |
513 | 512 | if @hashes > 2; |
514 | 513 | |
515 | 514 | my %opts = %{ merge(reverse @hashes) }; |
599 | 598 | ($result[0]) = ($coderef->(@args)); |
600 | 599 | } else { |
601 | 600 | $coderef->(@args); |
602 | } | |
601 | } | |
603 | 602 | }; |
604 | 603 | |
605 | 604 | ##Reset to the original state |
606 | $self->read_handler($current); | |
605 | $self->read_handler($current); | |
607 | 606 | |
608 | 607 | ##Exception testing has to come last, otherwise you might leave the |
609 | 608 | ##read_handler set to master. |
737 | 736 | if(@_) { |
738 | 737 | foreach my $source ($self->all_storages) { |
739 | 738 | $source->debug(@_); |
740 | } | |
739 | } | |
741 | 740 | } |
742 | 741 | return $self->master->debug; |
743 | 742 | } |
753 | 752 | if(@_) { |
754 | 753 | foreach my $source ($self->all_storages) { |
755 | 754 | $source->debugobj(@_); |
756 | } | |
755 | } | |
757 | 756 | } |
758 | 757 | return $self->master->debugobj; |
759 | 758 | } |
769 | 768 | if(@_) { |
770 | 769 | foreach my $source ($self->all_storages) { |
771 | 770 | $source->debugfh(@_); |
772 | } | |
771 | } | |
773 | 772 | } |
774 | 773 | return $self->master->debugfh; |
775 | 774 | } |
785 | 784 | if(@_) { |
786 | 785 | foreach my $source ($self->all_storages) { |
787 | 786 | $source->debugcb(@_); |
788 | } | |
787 | } | |
789 | 788 | } |
790 | 789 | return $self->master->debugcb; |
791 | 790 | } |
8 | 8 | /; |
9 | 9 | use mro 'c3'; |
10 | 10 | |
11 | sub _rebless { | |
11 | sub _init { | |
12 | 12 | my $self = shift; |
13 | ||
14 | 13 | $self->disable_sth_caching(1); |
15 | 14 | } |
16 | 15 |
12 | 12 | my $self = shift; |
13 | 13 | my $dbh = $self->_get_dbh; |
14 | 14 | |
15 | if (not $self->_placeholders_supported) { | |
15 | if (not $self->_typeless_placeholders_supported) { | |
16 | 16 | bless $self, |
17 | 17 | 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars'; |
18 | 18 | $self->_rebless; |
12 | 12 | use DBIx::Class::Storage::Statistics; |
13 | 13 | use Scalar::Util(); |
14 | 14 | use List::Util(); |
15 | ||
16 | # what version of sqlt do we require if deploy() without a ddl_dir is invoked | |
17 | # when changing also adjust the corresponding author_require in Makefile.PL | |
18 | my $minimum_sqlt_version = '0.11002'; | |
19 | ||
15 | 20 | |
16 | 21 | __PACKAGE__->mk_group_accessors('simple' => |
17 | 22 | qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid |
680 | 685 | |
681 | 686 | $self->_do_connection_actions(disconnect_call_ => $_) for @actions; |
682 | 687 | |
683 | $self->_dbh->rollback unless $self->_dbh_autocommit; | |
688 | $self->_dbh_rollback unless $self->_dbh_autocommit; | |
689 | ||
684 | 690 | $self->_dbh->disconnect; |
685 | 691 | $self->_dbh(undef); |
686 | 692 | $self->{_dbh_gen}++; |
834 | 840 | return $self->_sql_maker; |
835 | 841 | } |
836 | 842 | |
843 | # nothing to do by default | |
837 | 844 | sub _rebless {} |
845 | sub _init {} | |
838 | 846 | |
839 | 847 | sub _populate_dbh { |
840 | 848 | my ($self) = @_; |
901 | 909 | |
902 | 910 | $self->_driver_determined(1); |
903 | 911 | |
912 | $self->_init; # run driver-specific initializations | |
913 | ||
904 | 914 | $self->_run_connection_actions |
905 | 915 | if $started_unconnected && defined $self->_dbh; |
906 | 916 | } |
996 | 1006 | $weak_self->throw_exception("DBI Exception: $_[0]"); |
997 | 1007 | } |
998 | 1008 | else { |
1009 | # the handler may be invoked by something totally out of | |
1010 | # the scope of DBIC | |
999 | 1011 | croak ("DBI Exception: $_[0]"); |
1000 | 1012 | } |
1001 | 1013 | }; |
1105 | 1117 | if($self->{transaction_depth} == 0) { |
1106 | 1118 | $self->debugobj->txn_begin() |
1107 | 1119 | if $self->debug; |
1108 | ||
1109 | # being here implies we have AutoCommit => 1 | |
1110 | # if the user is utilizing txn_do - good for | |
1111 | # him, otherwise we need to ensure that the | |
1112 | # $dbh is healthy on BEGIN | |
1113 | my $dbh_method = $self->{_in_dbh_do} ? '_dbh' : 'dbh'; | |
1114 | $self->$dbh_method->begin_work; | |
1115 | ||
1116 | } elsif ($self->auto_savepoint) { | |
1120 | $self->_dbh_begin_work; | |
1121 | } | |
1122 | elsif ($self->auto_savepoint) { | |
1117 | 1123 | $self->svp_begin; |
1118 | 1124 | } |
1119 | 1125 | $self->{transaction_depth}++; |
1126 | } | |
1127 | ||
1128 | sub _dbh_begin_work { | |
1129 | my $self = shift; | |
1130 | ||
1131 | # if the user is utilizing txn_do - good for him, otherwise we need to | |
1132 | # ensure that the $dbh is healthy on BEGIN. | |
1133 | # We do this via ->dbh_do instead of ->dbh, so that the ->dbh "ping" | |
1134 | # will be replaced by a failure of begin_work itself (which will be | |
1135 | # then retried on reconnect) | |
1136 | if ($self->{_in_dbh_do}) { | |
1137 | $self->_dbh->begin_work; | |
1138 | } else { | |
1139 | $self->dbh_do(sub { $_[1]->begin_work }); | |
1140 | } | |
1120 | 1141 | } |
1121 | 1142 | |
1122 | 1143 | sub txn_commit { |
1125 | 1146 | my $dbh = $self->_dbh; |
1126 | 1147 | $self->debugobj->txn_commit() |
1127 | 1148 | if ($self->debug); |
1128 | $dbh->commit; | |
1149 | $self->_dbh_commit; | |
1129 | 1150 | $self->{transaction_depth} = 0 |
1130 | 1151 | if $self->_dbh_autocommit; |
1131 | 1152 | } |
1134 | 1155 | $self->svp_release |
1135 | 1156 | if $self->auto_savepoint; |
1136 | 1157 | } |
1158 | } | |
1159 | ||
1160 | sub _dbh_commit { | |
1161 | my $self = shift; | |
1162 | $self->_dbh->commit; | |
1137 | 1163 | } |
1138 | 1164 | |
1139 | 1165 | sub txn_rollback { |
1145 | 1171 | if ($self->debug); |
1146 | 1172 | $self->{transaction_depth} = 0 |
1147 | 1173 | if $self->_dbh_autocommit; |
1148 | $dbh->rollback; | |
1174 | $self->_dbh_rollback; | |
1149 | 1175 | } |
1150 | 1176 | elsif($self->{transaction_depth} > 1) { |
1151 | 1177 | $self->{transaction_depth}--; |
1166 | 1192 | $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1; |
1167 | 1193 | $self->throw_exception($error); |
1168 | 1194 | } |
1195 | } | |
1196 | ||
1197 | sub _dbh_rollback { | |
1198 | my $self = shift; | |
1199 | $self->_dbh->rollback; | |
1169 | 1200 | } |
1170 | 1201 | |
1171 | 1202 | # This used to be the top-half of _execute. It was split out to make it |
1374 | 1405 | } |
1375 | 1406 | |
1376 | 1407 | sub update { |
1377 | my $self = shift @_; | |
1378 | my $source = shift @_; | |
1379 | $self->_determine_driver; | |
1408 | my ($self, $source, @args) = @_; | |
1409 | ||
1410 | # redispatch to update method of storage we reblessed into, if necessary | |
1411 | if (not $self->_driver_determined) { | |
1412 | $self->_determine_driver; | |
1413 | goto $self->can('update'); | |
1414 | } | |
1415 | ||
1380 | 1416 | my $bind_attributes = $self->source_bind_attributes($source); |
1381 | 1417 | |
1382 | return $self->_execute('update' => [], $source, $bind_attributes, @_); | |
1418 | return $self->_execute('update' => [], $source, $bind_attributes, @args); | |
1383 | 1419 | } |
1384 | 1420 | |
1385 | 1421 | |
2153 | 2189 | return undef |
2154 | 2190 | } |
2155 | 2191 | |
2192 | # Check if placeholders are supported at all | |
2193 | sub _placeholders_supported { | |
2194 | my $self = shift; | |
2195 | my $dbh = $self->_get_dbh; | |
2196 | ||
2197 | # some drivers provide a $dbh attribute (e.g. Sybase and $dbh->{syb_dynamic_supported}) | |
2198 | # but it is inaccurate more often than not | |
2199 | eval { | |
2200 | local $dbh->{PrintError} = 0; | |
2201 | local $dbh->{RaiseError} = 1; | |
2202 | $dbh->do('select ?', {}, 1); | |
2203 | }; | |
2204 | return $@ ? 0 : 1; | |
2205 | } | |
2206 | ||
2207 | # Check if placeholders bound to non-string types throw exceptions | |
2208 | # | |
2209 | sub _typeless_placeholders_supported { | |
2210 | my $self = shift; | |
2211 | my $dbh = $self->_get_dbh; | |
2212 | ||
2213 | eval { | |
2214 | local $dbh->{PrintError} = 0; | |
2215 | local $dbh->{RaiseError} = 1; | |
2216 | # this specifically tests a bind that is NOT a string | |
2217 | $dbh->do('select 1 where 1 = ?', {}, 1); | |
2218 | }; | |
2219 | return $@ ? 0 : 1; | |
2220 | } | |
2221 | ||
2156 | 2222 | =head2 sqlt_type |
2157 | 2223 | |
2158 | 2224 | Returns the database driver name. |
2544 | 2610 | return; |
2545 | 2611 | } |
2546 | 2612 | |
2613 | # SQLT version handling | |
2614 | { | |
2615 | my $_sqlt_version_ok; # private | |
2616 | my $_sqlt_version_error; # private | |
2617 | ||
2618 | sub _sqlt_version_ok { | |
2619 | if (!defined $_sqlt_version_ok) { | |
2620 | eval "use SQL::Translator $minimum_sqlt_version"; | |
2621 | if ($@) { | |
2622 | $_sqlt_version_ok = 0; | |
2623 | $_sqlt_version_error = $@; | |
2624 | } | |
2625 | else { | |
2626 | $_sqlt_version_ok = 1; | |
2627 | } | |
2628 | } | |
2629 | return $_sqlt_version_ok; | |
2630 | } | |
2631 | ||
2632 | sub _sqlt_version_error { | |
2633 | shift->_sqlt_version_ok unless defined $_sqlt_version_ok; | |
2634 | return $_sqlt_version_error; | |
2635 | } | |
2636 | ||
2637 | sub _sqlt_minimum_version { $minimum_sqlt_version }; | |
2638 | } | |
2639 | ||
2547 | 2640 | sub DESTROY { |
2548 | 2641 | my $self = shift; |
2549 | 2642 |
1 | 1 | use strict; |
2 | 2 | use warnings; |
3 | 3 | |
4 | use base qw/Class::Accessor::Grouped/; | |
4 | use base qw/DBIx::Class/; | |
5 | 5 | use IO::File; |
6 | 6 | |
7 | 7 | __PACKAGE__->mk_group_accessors(simple => qw/callback debugfh silence/); |
1 | 1 | |
2 | 2 | use strict; |
3 | 3 | use warnings; |
4 | use Carp (); | |
4 | use Carp::Clan qw/^DBIx::Class/; | |
5 | 5 | |
6 | 6 | sub new { |
7 | 7 | my ($class, $storage) = @_; |
23 | 23 | return if $dismiss; |
24 | 24 | |
25 | 25 | my $exception = $@; |
26 | Carp::cluck("A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or an error - bad") | |
27 | unless $exception; | |
26 | ||
28 | 27 | { |
29 | 28 | local $@; |
29 | ||
30 | carp 'A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back.' | |
31 | unless $exception; | |
32 | ||
30 | 33 | eval { $storage->txn_rollback }; |
31 | 34 | my $rollback_exception = $@; |
32 | if($rollback_exception) { | |
33 | my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION"; | |
34 | 35 | |
35 | $storage->throw_exception( | |
36 | "Transaction aborted: ${exception}. " | |
37 | . "Rollback failed: ${rollback_exception}" | |
38 | ) unless $rollback_exception =~ /$exception_class/; | |
36 | if ($rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) { | |
37 | if ($exception) { | |
38 | $exception = "Transaction aborted: ${exception} " | |
39 | ."Rollback failed: ${rollback_exception}"; | |
40 | } | |
41 | else { | |
42 | carp (join ' ', | |
43 | "********************* ROLLBACK FAILED!!! ********************", | |
44 | "\nA rollback operation failed after the guard went out of scope.", | |
45 | 'This is potentially a disastrous situation, check your data for', | |
46 | "consistency: $rollback_exception" | |
47 | ); | |
48 | } | |
39 | 49 | } |
40 | 50 | } |
51 | ||
52 | $@ = $exception; | |
41 | 53 | } |
42 | 54 | |
43 | 55 | 1; |
5 | 5 | use base qw/DBIx::Class/; |
6 | 6 | use mro 'c3'; |
7 | 7 | |
8 | use Scalar::Util qw/weaken/; | |
9 | use Carp::Clan qw/^DBIx::Class/; | |
8 | use DBIx::Class::Exception; | |
9 | use Scalar::Util(); | |
10 | 10 | use IO::File; |
11 | 11 | use DBIx::Class::Storage::TxnScopeGuard; |
12 | 12 | |
82 | 82 | sub set_schema { |
83 | 83 | my ($self, $schema) = @_; |
84 | 84 | $self->schema($schema); |
85 | weaken($self->{schema}) if ref $self->{schema}; | |
85 | Scalar::Util::weaken($self->{schema}) if ref $self->{schema}; | |
86 | 86 | } |
87 | 87 | |
88 | 88 | =head2 connected |
119 | 119 | sub throw_exception { |
120 | 120 | my $self = shift; |
121 | 121 | |
122 | $self->schema->throw_exception(@_) if $self->schema; | |
123 | croak @_; | |
122 | if ($self->schema) { | |
123 | $self->schema->throw_exception(@_); | |
124 | } | |
125 | else { | |
126 | DBIx::Class::Exception->throw(@_); | |
127 | } | |
124 | 128 | } |
125 | 129 | |
126 | 130 | =head2 txn_do |
5 | 5 | use MRO::Compat; |
6 | 6 | |
7 | 7 | use vars qw($VERSION); |
8 | use base qw/DBIx::Class::Componentised Class::Accessor::Grouped/; | |
8 | use base qw/Class::C3::Componentised Class::Accessor::Grouped/; | |
9 | 9 | use DBIx::Class::StartupCheck; |
10 | 10 | |
11 | 11 | sub mk_classdata { |
23 | 23 | # Always remember to do all digits for the version even if they're 0 |
24 | 24 | # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports |
25 | 25 | # brain damage and presumably various other packaging systems too |
26 | ||
27 | $VERSION = '0.08111'; | |
26 | $VERSION = '0.08112'; | |
28 | 27 | |
29 | 28 | $VERSION = eval $VERSION; # numify for warning-free dev releases |
30 | ||
31 | # what version of sqlt do we require if deploy() without a ddl_dir is invoked | |
32 | # when changing also adjust $sqlt_recommends in Makefile.PL | |
33 | my $minimum_sqlt_version = '0.11002'; | |
34 | 29 | |
35 | 30 | sub MODIFY_CODE_ATTRIBUTES { |
36 | 31 | my ($class,$code,@attrs) = @_; |
46 | 41 | my $rest = eval { $self->next::method }; |
47 | 42 | return $@ ? $cache : { %$cache, %$rest }; |
48 | 43 | } |
49 | ||
50 | # SQLT version handling | |
51 | { | |
52 | my $_sqlt_version_ok; # private | |
53 | my $_sqlt_version_error; # private | |
54 | ||
55 | sub _sqlt_version_ok { | |
56 | if (!defined $_sqlt_version_ok) { | |
57 | eval "use SQL::Translator $minimum_sqlt_version"; | |
58 | if ($@) { | |
59 | $_sqlt_version_ok = 0; | |
60 | $_sqlt_version_error = $@; | |
61 | } | |
62 | else { | |
63 | $_sqlt_version_ok = 1; | |
64 | } | |
65 | } | |
66 | return $_sqlt_version_ok; | |
67 | } | |
68 | ||
69 | sub _sqlt_version_error { | |
70 | shift->_sqlt_version_ok unless defined $_sqlt_version_ok; | |
71 | return $_sqlt_version_error; | |
72 | } | |
73 | ||
74 | sub _sqlt_minimum_version { $minimum_sqlt_version }; | |
75 | } | |
76 | ||
77 | 44 | |
78 | 45 | 1; |
79 | 46 | |
261 | 228 | |
262 | 229 | bricas: Brian Cassidy <bricas@cpan.org> |
263 | 230 | |
231 | brunov: Bruno Vecchi <vecchi.b@gmail.com> | |
232 | ||
264 | 233 | caelum: Rafael Kitover <rkitover@cpan.org> |
265 | 234 | |
266 | 235 | castaway: Jess Robinson |
5 | 5 | |
6 | 6 | use lib qw(t/lib); |
7 | 7 | use DBICTest::ForeignComponent; |
8 | ||
9 | plan tests => 6; | |
10 | 8 | |
11 | 9 | # Tests if foreign component was loaded by calling foreign's method |
12 | 10 | ok( DBICTest::ForeignComponent->foreign_test_method, 'foreign component' ); |
34 | 32 | 'inject_base filters duplicates' |
35 | 33 | ); |
36 | 34 | |
37 | # Test for a warning with incorrect order in load_components | |
38 | my @warnings = (); | |
39 | { | |
40 | package A::Test; | |
41 | our @ISA = 'DBIx::Class'; | |
42 | { | |
43 | local $SIG{__WARN__} = sub { push @warnings, shift}; | |
44 | __PACKAGE__->load_components(qw(Core UTF8Columns)); | |
45 | } | |
46 | } | |
47 | like( $warnings[0], qr/Core loaded before UTF8Columns/, | |
48 | 'warning issued for incorrect order in load_components()' ); | |
49 | is( scalar @warnings, 1, | |
50 | 'only one warning issued for incorrect load_components call' ); | |
35 | use_ok('DBIx::Class::AccessorGroup'); | |
36 | use_ok('DBIx::Class::Componentised'); | |
51 | 37 | |
52 | # Test that no warning is issued for the correct order in load_components | |
53 | { | |
54 | @warnings = (); | |
55 | package B::Test; | |
56 | our @ISA = 'DBIx::Class'; | |
57 | { | |
58 | local $SIG{__WARN__} = sub { push @warnings, shift }; | |
59 | __PACKAGE__->load_components(qw(UTF8Columns Core)); | |
60 | } | |
61 | } | |
62 | is( scalar @warnings, 0, | |
63 | 'warning not issued for correct order in load_components()' ); | |
64 | ||
65 | use_ok('DBIx::Class::AccessorGroup'); | |
38 | done_testing; |
2 | 2 | |
3 | 3 | use Test::More; |
4 | 4 | use Test::Exception; |
5 | use Test::Warn; | |
5 | 6 | use lib qw(t/lib); |
6 | 7 | use DBICTest; |
7 | 8 | use DBIC::SqlMakerTest; |
34 | 35 | my %not_dirty = $art->get_dirty_columns(); |
35 | 36 | is(scalar(keys(%not_dirty)), 0, 'Nothing is dirty'); |
36 | 37 | |
37 | eval { | |
38 | throws_ok ( sub { | |
38 | 39 | my $ret = $art->make_column_dirty('name2'); |
39 | }; | |
40 | ok(defined($@), 'Failed to make non-existent column dirty'); | |
40 | }, qr/No such column 'name2'/, 'Failed to make non-existent column dirty'); | |
41 | ||
41 | 42 | $art->make_column_dirty('name'); |
42 | 43 | my %fake_dirty = $art->get_dirty_columns(); |
43 | 44 | is(scalar(keys(%fake_dirty)), 1, '1 fake dirty column'); |
107 | 108 | { |
108 | 109 | ok(my $artist = $schema->resultset('Artist')->create({name => 'store_column test'})); |
109 | 110 | is($artist->name, 'X store_column test'); # used to be 'X X store...' |
111 | ||
112 | # call store_column even though the column doesn't seem to be dirty | |
113 | ok($artist->update({name => 'X store_column test'})); | |
114 | is($artist->name, 'X X store_column test'); | |
110 | 115 | $artist->delete; |
111 | 116 | } |
112 | 117 | |
216 | 221 | isa_ok($tdata{'last_updated_on'}, 'DateTime', 'inflated accessored column'); |
217 | 222 | } |
218 | 223 | |
219 | eval { $schema->class("Track")->load_components('DoesNotExist'); }; | |
220 | ||
221 | ok $@, $@; | |
224 | throws_ok (sub { | |
225 | $schema->class("Track")->load_components('DoesNotExist'); | |
226 | }, qr!Can't locate DBIx/Class/DoesNotExist.pm!, 'exception on nonexisting component'); | |
222 | 227 | |
223 | 228 | is($schema->class("Artist")->field_name_for->{name}, 'artist name', 'mk_classdata usage ok'); |
224 | 229 | |
232 | 237 | my $collapsed_or_rs = $or_rs->search ({}, { distinct => 1 }); # induce collapse |
233 | 238 | is ($collapsed_or_rs->all, 4, 'Collapsed joined search with OR returned correct number of rows'); |
234 | 239 | is ($collapsed_or_rs->count, 4, 'Collapsed search count with OR ok'); |
240 | ||
241 | # make sure sure distinct on a grouped rs is warned about | |
242 | my $cd_rs = $schema->resultset ('CD') | |
243 | ->search ({}, { distinct => 1, group_by => 'title' }); | |
244 | warnings_exist (sub { | |
245 | $cd_rs->next; | |
246 | }, qr/Useless use of distinct/, 'UUoD warning'); | |
235 | 247 | |
236 | 248 | { |
237 | 249 | my $tcount = $schema->resultset('Track')->search( |
0 | 0 | use strict; |
1 | use warnings; | |
1 | use warnings; | |
2 | 2 | |
3 | 3 | use Test::More; |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest; |
6 | use DBIC::SqlMakerTest; | |
7 | use DBIC::DebugObj; | |
6 | 8 | |
7 | 9 | my $schema = DBICTest->init_schema(); |
8 | ||
9 | plan tests => 49; | |
10 | 10 | |
11 | 11 | # Check the defined unique constraints |
12 | 12 | is_deeply( |
208 | 208 | ); |
209 | 209 | ok($cd2->in_storage, 'Updating year using update_or_new was successful'); |
210 | 210 | is($cd2->id, $cd1->id, 'Got the same CD using update_or_new'); |
211 | }⏎ | |
211 | } | |
212 | ||
213 | # make sure the ident condition is assembled sanely | |
214 | { | |
215 | my $artist = $schema->resultset('Artist')->next; | |
216 | ||
217 | my ($sql, @bind); | |
218 | $schema->storage->debugobj(DBIC::DebugObj->new(\$sql, \@bind)), | |
219 | $schema->storage->debug(1); | |
220 | ||
221 | $artist->discard_changes; | |
222 | ||
223 | is_same_sql_bind ( | |
224 | $sql, | |
225 | \@bind, | |
226 | 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me WHERE me.artistid = ?', | |
227 | [qw/'1'/], | |
228 | ); | |
229 | ||
230 | $schema->storage->debug(0); | |
231 | $schema->storage->debugobj(undef); | |
232 | } | |
233 | ||
234 | done_testing; |
0 | 0 | use strict; |
1 | use warnings; | |
1 | use warnings; | |
2 | 2 | |
3 | 3 | use Test::More; |
4 | use Test::Warn; | |
4 | 5 | use Test::Exception; |
5 | 6 | use lib qw(t/lib); |
6 | 7 | use DBICTest; |
7 | 8 | |
8 | 9 | my $schema = DBICTest->init_schema(); |
9 | 10 | |
10 | plan tests => 64; | |
11 | ||
12 | 11 | my $code = sub { |
13 | 12 | my ($artist, @cd_titles) = @_; |
14 | ||
13 | ||
15 | 14 | $artist->create_related('cds', { |
16 | 15 | title => $_, |
17 | 16 | year => 2006, |
18 | 17 | }) foreach (@cd_titles); |
19 | ||
18 | ||
20 | 19 | return $artist->cds->all; |
21 | 20 | }; |
22 | 21 | |
257 | 256 | name => 'Death Cab for Cutie', |
258 | 257 | made_up_column => 1, |
259 | 258 | }); |
260 | ||
259 | ||
261 | 260 | $guard->commit; |
262 | 261 | } qr/No such column made_up_column .*? at .*?81transactions.t line \d+/s, "Error propogated okay"; |
263 | 262 | |
264 | 263 | ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created"); |
265 | 264 | |
266 | my $inner_exception; | |
265 | my $inner_exception; # set in inner() below | |
267 | 266 | eval { |
268 | 267 | outer($schema, 1); |
269 | 268 | }; |
272 | 271 | ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created"); |
273 | 272 | |
274 | 273 | lives_ok (sub { |
275 | my $w; | |
276 | local $SIG{__WARN__} = sub { $w = shift }; | |
277 | ||
278 | # The 0 arg says don't die, just let the scope guard go out of scope | |
279 | # forcing a txn_rollback to happen | |
280 | outer($schema, 0); | |
281 | ||
282 | like ($w, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or an error/, 'Out of scope warning detected'); | |
274 | warnings_exist ( sub { | |
275 | # The 0 arg says don't die, just let the scope guard go out of scope | |
276 | # forcing a txn_rollback to happen | |
277 | outer($schema, 0); | |
278 | }, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, 'Out of scope warning detected'); | |
283 | 279 | ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created"); |
284 | 280 | }, 'rollback successful withot exception'); |
285 | 281 | |
318 | 314 | $inner_guard->commit; |
319 | 315 | } |
320 | 316 | } |
317 | ||
318 | # make sure the guard does not eat exceptions | |
319 | { | |
320 | my $schema = DBICTest->init_schema(); | |
321 | throws_ok (sub { | |
322 | my $guard = $schema->txn_scope_guard; | |
323 | $schema->resultset ('Artist')->create ({ name => 'bohhoo'}); | |
324 | ||
325 | $schema->storage->disconnect; # this should freak out the guard rollback | |
326 | ||
327 | die 'Deliberate exception'; | |
328 | }, qr/Deliberate exception.+Rollback failed/s); | |
329 | } | |
330 | ||
331 | # make sure it warns *big* on failed rollbacks | |
332 | { | |
333 | my $schema = DBICTest->init_schema(); | |
334 | ||
335 | # something is really confusing Test::Warn here, no time to debug | |
336 | =begin | |
337 | warnings_exist ( | |
338 | sub { | |
339 | my $guard = $schema->txn_scope_guard; | |
340 | $schema->resultset ('Artist')->create ({ name => 'bohhoo'}); | |
341 | ||
342 | $schema->storage->disconnect; # this should freak out the guard rollback | |
343 | }, | |
344 | [ | |
345 | qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, | |
346 | qr/\*+ ROLLBACK FAILED\!\!\! \*+/, | |
347 | ], | |
348 | 'proper warnings generated on out-of-scope+rollback failure' | |
349 | ); | |
350 | =cut | |
351 | ||
352 | my @want = ( | |
353 | qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, | |
354 | qr/\*+ ROLLBACK FAILED\!\!\! \*+/, | |
355 | ); | |
356 | ||
357 | my @w; | |
358 | local $SIG{__WARN__} = sub { | |
359 | if (grep {$_[0] =~ $_} (@want)) { | |
360 | push @w, $_[0]; | |
361 | } | |
362 | else { | |
363 | warn $_[0]; | |
364 | } | |
365 | }; | |
366 | { | |
367 | my $guard = $schema->txn_scope_guard; | |
368 | $schema->resultset ('Artist')->create ({ name => 'bohhoo'}); | |
369 | ||
370 | $schema->storage->disconnect; # this should freak out the guard rollback | |
371 | } | |
372 | ||
373 | is (@w, 2, 'Both expected warnings found'); | |
374 | } | |
375 | ||
376 | done_testing; |
5 | 5 | use DBICTest; |
6 | 6 | |
7 | 7 | BEGIN { |
8 | require DBIx::Class; | |
8 | require DBIx::Class::Storage::DBI; | |
9 | 9 | plan skip_all => |
10 | 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version | |
11 | if not DBIx::Class->_sqlt_version_ok; | |
10 | 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version | |
11 | if not DBIx::Class::Storage::DBI->_sqlt_version_ok; | |
12 | 12 | } |
13 | 13 | |
14 | 14 | my $schema = DBICTest->init_schema (no_deploy => 1); |
0 | 0 | use strict; |
1 | use warnings; | |
1 | use warnings; | |
2 | 2 | |
3 | 3 | use Test::More; |
4 | use Test::Warn; | |
4 | 5 | use Test::Exception; |
5 | 6 | use lib qw(t/lib); |
6 | 7 | use DBICTest; |
7 | 8 | |
8 | 9 | my $schema = DBICTest->init_schema(); |
9 | ||
10 | plan tests => 20; | |
11 | 10 | |
12 | 11 | my $rs = $schema->resultset("CD")->search({}, { order_by => 'cdid' }); |
13 | 12 | |
31 | 30 | is($rs_year->next, 1999, "reset okay"); |
32 | 31 | |
33 | 32 | is($rs_year->first, 1999, "first okay"); |
33 | ||
34 | warnings_exist (sub { | |
35 | is($rs_year->single, 1999, "single okay"); | |
36 | }, qr/Query returned more than one row/, 'single warned'); | |
34 | 37 | |
35 | 38 | # test +select/+as for single column |
36 | 39 | my $psrs = $schema->resultset('CD')->search({}, |
93 | 96 | [ $rs->get_column ('cdid')->all ], |
94 | 97 | 'prefetch properly collapses amount of rows from get_column', |
95 | 98 | ); |
99 | ||
100 | done_testing; |
15 | 15 | plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test' |
16 | 16 | unless ($dsn); |
17 | 17 | |
18 | require DBIx::Class; | |
18 | require DBIx::Class::Storage::DBI; | |
19 | 19 | plan skip_all => |
20 | 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version | |
21 | if not DBIx::Class->_sqlt_version_ok; | |
20 | 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version | |
21 | if not DBIx::Class::Storage::DBI->_sqlt_version_ok; | |
22 | 22 | } |
23 | 23 | |
24 | 24 | my $version_table_name = 'dbix_class_schema_versions'; |
5 | 5 | use DBICTest; |
6 | 6 | |
7 | 7 | BEGIN { |
8 | require DBIx::Class; | |
8 | require DBIx::Class::Storage::DBI; | |
9 | 9 | plan skip_all => |
10 | 'Test needs SQL::Translator ' . DBIx::Class->_sqlt_minimum_version | |
11 | if not DBIx::Class->_sqlt_version_ok; | |
10 | 'Test needs SQL::Translator ' . DBIx::Class::Storage::DBI->_sqlt_minimum_version | |
11 | if not DBIx::Class::Storage::DBI->_sqlt_version_ok; | |
12 | 12 | } |
13 | 13 | |
14 | 14 | my $schema = DBICTest->init_schema(); |
94 | 94 | } |
95 | 95 | eval { Film->constrain_column(codirector => Untaint => 'date') }; |
96 | 96 | is $@, '', 'Can constrain with untaint'; |
97 | ||
97 | 98 | my $freeaa = |
98 | 99 | eval { Film->create({ title => "The Freaa", codirector => 'today' }) }; |
99 | TODO: { | |
100 | local $TODO = "no idea what this is supposed to do"; | |
101 | is $@, '', "Can create codirector"; | |
102 | is $freeaa && $freeaa->codirector, '2001-03-03', "Set the codirector"; | |
103 | } | |
100 | is $@, '', "Can create codirector"; | |
101 | is $freeaa && $freeaa->codirector, '2001-03-03', "Set the codirector"; | |
104 | 102 | } |
105 | 103 | |
106 | 104 | __DATA__ |
29 | 29 | }, |
30 | 30 | ); |
31 | 31 | __PACKAGE__->set_primary_key('artistid'); |
32 | __PACKAGE__->add_unique_constraint(artist => ['artistid']); # do not remove, part of a test | |
32 | 33 | |
33 | 34 | __PACKAGE__->mk_classdata('field_name_for', { |
34 | 35 | artistid => 'primary key', |
69 | 70 | |
70 | 71 | sub store_column { |
71 | 72 | my ($self, $name, $value) = @_; |
72 | $value = 'X '.$value if ($name eq 'name' && $value && $value =~ /store_column test/); | |
73 | $value = 'X '.$value if ($name eq 'name' && $value && $value =~ /(X )?store_column test/); | |
73 | 74 | $self->next::method($name, $value); |
74 | 75 | } |
75 | 76 |
0 | 0 | -- |
1 | 1 | -- Created by SQL::Translator::Producer::SQLite |
2 | -- Created on Tue Aug 25 12:34:34 2009 | |
2 | -- Created on Mon Sep 21 00:11:34 2009 | |
3 | 3 | -- |
4 | 4 | |
5 | 5 |
0 | use warnings; | |
1 | ||
2 | use Test::More; | |
3 | use Test::Exception; | |
4 | use lib qw(t/lib); | |
5 | use DBIC::SqlMakerTest; | |
6 | use DBICTest; | |
7 | ||
8 | my $schema = DBICTest->init_schema(); | |
9 | ||
10 | ||
11 | # a regular belongs_to prefetch | |
12 | my $cds = $schema->resultset('CD')->search ({}, { prefetch => 'artist' } ); | |
13 | ||
14 | my $nulls = { | |
15 | hashref => {}, | |
16 | arrayref => [], | |
17 | undef => undef, | |
18 | }; | |
19 | ||
20 | # make sure null-prefetches do not screw with the final sql: | |
21 | for my $type (keys %$nulls) { | |
22 | # is_same_sql_bind ( | |
23 | # $cds->search({}, { prefetch => { artist => $nulls->{$type} } })->as_query, | |
24 | # $cds->as_query, | |
25 | # "same sql with null $type prefetch" | |
26 | # ); | |
27 | } | |
28 | ||
29 | # make sure left join is carried only starting from the first has_many | |
30 | is_same_sql_bind ( | |
31 | $cds->search({}, { prefetch => { artist => { cds => 'artist' } } })->as_query, | |
32 | '( | |
33 | SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track, | |
34 | artist.artistid, artist.name, artist.rank, artist.charfield, | |
35 | cds.cdid, cds.artist, cds.title, cds.year, cds.genreid, cds.single_track, | |
36 | artist_2.artistid, artist_2.name, artist_2.rank, artist_2.charfield | |
37 | FROM cd me | |
38 | JOIN artist artist ON artist.artistid = me.artist | |
39 | LEFT JOIN cd cds ON cds.artist = artist.artistid | |
40 | LEFT JOIN artist artist_2 ON artist_2.artistid = cds.artist | |
41 | ORDER BY cds.artist, cds.year | |
42 | )', | |
43 | [], | |
44 | ); | |
45 | ||
46 | done_testing; |
56 | 56 | |
57 | 57 | |
58 | 58 | # test where conditions at the root of the related chain |
59 | my $artist_rs = $schema->resultset("Artist")->search({artistid => 11}); | |
60 | ||
59 | my $artist_rs = $schema->resultset("Artist")->search({artistid => 2}); | |
60 | my $artist = $artist_rs->next; | |
61 | $artist->create_related ('cds', $_) for ( | |
62 | { | |
63 | year => 1999, title => 'vague cd', genre => { name => 'vague genre' } | |
64 | }, | |
65 | { | |
66 | year => 1999, title => 'vague cd2', genre => { name => 'vague genre' } | |
67 | }, | |
68 | ); | |
61 | 69 | |
62 | 70 | $rs = $artist_rs->search_related('cds')->search_related('genre', |
63 | { 'genre.name' => 'foo' }, | |
71 | { 'genre.name' => 'vague genre' }, | |
64 | 72 | { prefetch => 'cds' }, |
65 | 73 | ); |
66 | is($rs->all, 0, 'prefetch without distinct (objects)'); | |
67 | is($rs->count, 0, 'prefetch without distinct (count)'); | |
68 | ||
74 | is($rs->all, 1, 'base without distinct (objects)'); | |
75 | is($rs->count, 1, 'base without distinct (count)'); | |
76 | # artist -> 2 cds -> 2 genres -> 2 cds for each genre = 4 | |
77 | is($rs->search_related('cds')->all, 4, 'prefetch without distinct (objects)'); | |
78 | is($rs->search_related('cds')->count, 4, 'prefetch without distinct (count)'); | |
69 | 79 | |
70 | 80 | |
71 | 81 | $rs = $artist_rs->search(undef, {distinct => 1}) |
72 | 82 | ->search_related('cds')->search_related('genre', |
73 | { 'genre.name' => 'foo' }, | |
83 | { 'genre.name' => 'vague genre' }, | |
74 | 84 | ); |
75 | is($rs->all, 0, 'distinct without prefetch (objects)'); | |
76 | is($rs->count, 0, 'distinct without prefetch (count)'); | |
77 | ||
85 | is($rs->all, 1, 'distinct without prefetch (objects)'); | |
86 | is($rs->count, 1, 'distinct without prefetch (count)'); | |
78 | 87 | |
79 | 88 | |
80 | 89 | $rs = $artist_rs->search({}, {distinct => 1}) |
81 | 90 | ->search_related('cds')->search_related('genre', |
82 | { 'genre.name' => 'foo' }, | |
91 | { 'genre.name' => 'vague genre' }, | |
83 | 92 | { prefetch => 'cds' }, |
84 | 93 | ); |
85 | is($rs->all, 0, 'distinct with prefetch (objects)'); | |
86 | is($rs->count, 0, 'distinct with prefetch (count)'); | |
94 | is($rs->all, 1, 'distinct with prefetch (objects)'); | |
95 | is($rs->count, 1, 'distinct with prefetch (count)'); | |
96 | # artist -> 2 cds -> 2 genres -> 2 cds for each genre + distinct = 2 | |
97 | is($rs->search_related('cds')->all, 2, 'prefetched distinct with prefetch (objects)'); | |
98 | is($rs->search_related('cds')->count, 2, 'prefetched distinct with prefetch (count)'); | |
87 | 99 | |
88 | 100 | |
89 | 101 |
0 | #!/usr/bin/perl -w | |
1 | ||
2 | 0 | use strict; |
3 | use warnings; | |
1 | use warnings; | |
4 | 2 | |
5 | 3 | use Test::More; |
6 | 4 | use lib qw(t/lib); |
0 | #!/usr/bin/perl -w | |
1 | ||
2 | 0 | use strict; |
3 | use warnings; | |
1 | use warnings; | |
4 | 2 | |
5 | 3 | use Test::More; |
6 | 4 | use lib qw(t/lib); |